From e10e6dd0026998dc28d86872078d8ba7a13798d2 Mon Sep 17 00:00:00 2001 From: Dina Suehiro Jones Date: Mon, 21 Apr 2025 16:05:11 -0700 Subject: [PATCH 001/217] Fixes for MultimodalQnA with the Milvus vector db (#1859) Signed-off-by: Dina Suehiro Jones --- MultimodalQnA/docker_compose/intel/cpu/xeon/README.md | 4 +++- .../docker_compose/intel/cpu/xeon/compose_milvus.yaml | 2 ++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/MultimodalQnA/docker_compose/intel/cpu/xeon/README.md b/MultimodalQnA/docker_compose/intel/cpu/xeon/README.md index 1260786c7f..c251e07a8e 100644 --- a/MultimodalQnA/docker_compose/intel/cpu/xeon/README.md +++ b/MultimodalQnA/docker_compose/intel/cpu/xeon/README.md @@ -241,7 +241,7 @@ docker compose -f compose.yaml up -d export MILVUS_HOST=${host_ip} export MILVUS_PORT=19530 export MILVUS_RETRIEVER_PORT=7000 -export COLLECTION_NAME=mm_rag_milvus +export COLLECTION_NAME=LangChainCollection cd GenAIExamples/MultimodalQnA/docker_compose/intel/cpu/xeon/ docker compose -f compose_milvus.yaml up -d ``` @@ -385,6 +385,8 @@ curl --silent --write-out "HTTPSTATUS:%{http_code}" \ Now, test the microservice with posting a custom caption along with an image and a PDF containing images and text. The image caption can be provided as a text (`.txt`) or as spoken audio (`.wav` or `.mp3`). +> Note: Audio captions for images are currently only supported when using the Redis data prep backend. + ```bash curl --silent --write-out "HTTPSTATUS:%{http_code}" \ ${DATAPREP_INGEST_SERVICE_ENDPOINT} \ diff --git a/MultimodalQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml b/MultimodalQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml index 6550a7e875..77a2e0bb01 100644 --- a/MultimodalQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml +++ b/MultimodalQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml @@ -226,6 +226,8 @@ services: - DATAPREP_INGEST_SERVICE_ENDPOINT=${DATAPREP_INGEST_SERVICE_ENDPOINT} - DATAPREP_GEN_TRANSCRIPT_SERVICE_ENDPOINT=${DATAPREP_GEN_TRANSCRIPT_SERVICE_ENDPOINT} - DATAPREP_GEN_CAPTION_SERVICE_ENDPOINT=${DATAPREP_GEN_CAPTION_SERVICE_ENDPOINT} + - DATAPREP_GET_FILE_ENDPOINT=${DATAPREP_GET_FILE_ENDPOINT} + - DATAPREP_DELETE_FILE_ENDPOINT=${DATAPREP_DELETE_FILE_ENDPOINT} - MEGA_SERVICE_PORT:=${MEGA_SERVICE_PORT} - UI_PORT=${UI_PORT} - DATAPREP_MMR_PORT=${DATAPREP_MMR_PORT} From a39824f1423c00bd25d8caaa0521865dc7e61723 Mon Sep 17 00:00:00 2001 From: Ervin Castelino <89144265+Ervin0307@users.noreply.github.com> Date: Wed, 23 Apr 2025 01:26:37 +0530 Subject: [PATCH 002/217] Update README.md of DBQnA (#1855) Co-authored-by: Ying Hu --- DBQnA/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DBQnA/README.md b/DBQnA/README.md index 063475c181..c13f2793b4 100644 --- a/DBQnA/README.md +++ b/DBQnA/README.md @@ -50,7 +50,7 @@ flowchart LR ### 💬 SQL Query Generation -The key feature of DBQnA app is that it converts a user's natural language query into an SQL query and automatically executes the generated SQL query on the database to return the relevant results. BAsically ask questions to database, receive corresponding SQL query and real-time query execution output, all without needing any SQL knowledge. +The key feature of DBQnA app is that it converts a user's natural language query into an SQL query and automatically executes the generated SQL query on the database to return the relevant results. Basically ask questions to database, receive corresponding SQL query and real-time query execution output, all without needing any SQL knowledge. --- From 48eaf9c1c9673b35f0152954679ca0d7ad7df3cd Mon Sep 17 00:00:00 2001 From: Omar Khleif Date: Tue, 22 Apr 2025 15:28:49 -0700 Subject: [PATCH 003/217] Added CodeGen Gradio README link to Docker Images List (#1864) Signed-off-by: okhleif-IL Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Abolfazl Shahbazi <12436063+ashahba@users.noreply.github.com> --- docker_images_list.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker_images_list.md b/docker_images_list.md index 83a4964934..62a2b1ea5f 100644 --- a/docker_images_list.md +++ b/docker_images_list.md @@ -16,7 +16,7 @@ Take ChatQnA for example. ChatQnA is a chatbot application service based on the | [opea/chatqna-conversation-ui](https://hub.docker.com/r/opea/chatqna-conversation-ui) | [Link](https://github.com/opea-project/GenAIExamples/blob/main/ChatQnA/ui/docker/Dockerfile.react) | Chatqna React UI. Facilitates interaction with users, enabling chat-based Q&A with conversation history stored in the browser's local storage. | [Link](https://github.com/opea-project/GenAIExamples/blob/main/ChatQnA/ui/react/README.md) | | [opea/chatqna-ui](https://hub.docker.com/r/opea/chatqna-ui) | [Link](https://github.com/opea-project/GenAIExamples/blob/main/ChatQnA/ui/docker/Dockerfile) | Chatqna UI entry. Facilitates interaction with users to answer questions | [Link](https://github.com/opea-project/GenAIExamples/blob/main/ChatQnA/ui/svelte/README.md) | | [opea/codegen](https://hub.docker.com/r/opea/codegen) | [Link](https://github.com/opea-project/GenAIExamples/blob/main/CodeGen/Dockerfile) | Codegen gateway. Provides automatic creation of source code from high-level representations | [Link](https://github.com/opea-project/GenAIExamples/blob/main/CodeGen/README.md) | -| [opea/codegen-gradio-ui]() | [Link](https://github.com/opea-project/GenAIExamples/blob/main/CodeGen/ui/docker/Dockerfile.gradio) | Codegen Gradio UI entry. Interact with users to generate source code by providing high-level descriptions or inputs. | | +| [opea/codegen-gradio-ui](https://hub.docker.com/r/opea/codegen-gradio-ui) | [Link](https://github.com/opea-project/GenAIExamples/blob/main/CodeGen/ui/docker/Dockerfile.gradio) | Codegen Gradio UI entry. Interact with users to generate source code by providing high-level descriptions or inputs. | [Link](https://github.com/opea-project/GenAIExamples/blob/main/CodeGen/ui/gradio/README.md) | | [opea/codegen-react-ui](https://hub.docker.com/r/opea/codegen-react-ui) | [Link](https://github.com/opea-project/GenAIExamples/blob/main/CodeGen/ui/docker/Dockerfile.react) | Codegen React UI. Interact with users to generate appropriate code based on current user input. | [Link](https://github.com/opea-project/GenAIExamples/blob/main/CodeGen/ui/react/README.md) | | [opea/codegen-ui](https://hub.docker.com/r/opea/codegen-ui) | [Link](https://github.com/opea-project/GenAIExamples/blob/main/CodeGen/ui/docker/Dockerfile) | Codegen UI entry. Facilitates interaction with users, automatically generate code based on user's descriptions | [Link](https://github.com/opea-project/GenAIExamples/blob/main/CodeGen/ui/svelte/README.md) | | [opea/codetrans](https://hub.docker.com/r/opea/codetrans) | [Link](https://github.com/opea-project/GenAIExamples/blob/main/CodeTrans/Dockerfile) | Codetrans gateway. Provide services to convert source code written in one programming language to an equivalent version in another programming language. | [Link](https://github.com/opea-project/GenAIExamples/blob/main/CodeTrans/README.md) | From c2e9a259feff8865067eb9d0cd4d3ba93cfdcc17 Mon Sep 17 00:00:00 2001 From: Artem Astafev Date: Wed, 23 Apr 2025 12:55:01 +0700 Subject: [PATCH 004/217] Refine AuidoQnA README.MD for AMD ROCm docker compose deployment (#1862) Signed-off-by: Artem Astafev --- .../docker_compose/amd/gpu/rocm/README.md | 321 ++++++++---------- CodeGen/docker_compose/amd/gpu/rocm/README.md | 20 +- 2 files changed, 157 insertions(+), 184 deletions(-) diff --git a/AudioQnA/docker_compose/amd/gpu/rocm/README.md b/AudioQnA/docker_compose/amd/gpu/rocm/README.md index 824ddf1d3a..d26e52553c 100644 --- a/AudioQnA/docker_compose/amd/gpu/rocm/README.md +++ b/AudioQnA/docker_compose/amd/gpu/rocm/README.md @@ -1,120 +1,59 @@ -# Build Mega Service of AudioQnA on AMD ROCm GPU +# Deploying AudioQnA on AMD ROCm GPU -This document outlines the deployment process for a AudioQnA application utilizing the [GenAIComps](https://github.com/opea-project/GenAIComps.git) microservice -pipeline on server on AMD ROCm GPU platform. +This document outlines the single node deployment process for a AudioQnA application utilizing the [GenAIComps](https://github.com/opea-project/GenAIComps.git) microservices on server with AMD ROCm processing accelerators. The steps include pulling Docker images, container deployment via Docker Compose, and service execution using microservices `llm`. -## Build Docker Images +Note: The default LLM is `Intel/neural-chat-7b-v3-3`. Before deploying the application, please make sure either you've requested and been granted the access to it on [Huggingface](https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct) or you've downloaded the model locally from [ModelScope](https://www.modelscope.cn/models). -### 1. Build Docker Image +## Table of Contents -- #### Create application install directory and go to it: +1. [AudioQnA Quick Start Deployment](#audioqna-quick-start-deployment) +2. [AudioQnA Docker Compose Files](#audioqna-docker-compose-files) +3. [Validate Microservices](#validate-microservices) +4. [Conclusion](#conclusion) - ```bash - mkdir ~/audioqna-install && cd audioqna-install - ``` +## AudioQnA Quick Start Deployment -- #### Clone the repository GenAIExamples (the default repository branch "main" is used here): +This section describes how to quickly deploy and test the AudioQnA service manually on an AMD ROCm platform. The basic steps are: - ```bash - git clone https://github.com/opea-project/GenAIExamples.git - ``` +1. [Access the Code](#access-the-code) +2. [Configure the Deployment Environment](#configure-the-deployment-environment) +3. [Deploy the Services Using Docker Compose](#deploy-the-services-using-docker-compose) +4. [Check the Deployment Status](#check-the-deployment-status) +5. [Validate the Pipeline](#validate-the-pipeline) +6. [Cleanup the Deployment](#cleanup-the-deployment) - If you need to use a specific branch/tag of the GenAIExamples repository, then (v1.3 replace with its own value): +### Access the Code - ```bash - git clone https://github.com/opea-project/GenAIExamples.git && cd GenAIExamples && git checkout v1.3 - ``` - - We remind you that when using a specific version of the code, you need to use the README from this version: - -- #### Go to build directory: - - ```bash - cd ~/audioqna-install/GenAIExamples/AudioQnA/docker_image_build - ``` - -- Cleaning up the GenAIComps repository if it was previously cloned in this directory. - This is necessary if the build was performed earlier and the GenAIComps folder exists and is not empty: - - ```bash - echo Y | rm -R GenAIComps - ``` - -- #### Clone the repository GenAIComps (the default repository branch "main" is used here): +Clone the GenAIExample repository and access the AudioQnA AMD ROCm platform Docker Compose files and supporting scripts: ```bash -git clone https://github.com/opea-project/GenAIComps.git -cd GenAIComps +git clone https://github.com/opea-project/GenAIExamples.git +cd GenAIExamples/AudioQnA ``` -We remind you that when using a specific version of the code, you need to use the README from this version. - -- #### Setting the list of images for the build (from the build file.yaml) - - If you want to deploy a vLLM-based or TGI-based application, then the set of services is installed as follows: - - #### vLLM-based application - - ```bash - service_list="vllm-rocm whisper speecht5 audioqna audioqna-ui" - ``` - - #### TGI-based application - - ```bash - service_list="whisper speecht5 audioqna audioqna-ui" - ``` - -- #### Optional. Pull TGI Docker Image (Do this if you want to use TGI) - - ```bash - docker pull ghcr.io/huggingface/text-generation-inference:2.3.1-rocm - ``` - -- #### Build Docker Images - - ```bash - docker compose -f build.yaml build ${service_list} --no-cache - ``` - - After the build, we check the list of images with the command: - - ```bash - docker image ls - ``` - - The list of images should include: - - ##### vLLM-based application: - - - opea/vllm-rocm:latest - - opea/whisper:latest - - opea/speecht5:latest - - opea/audioqna:latest - - ##### TGI-based application: +Then checkout a released version, such as v1.3: - - ghcr.io/huggingface/text-generation-inference:2.3.1-rocm - - opea/whisper:latest - - opea/speecht5:latest - - opea/audioqna:latest - ---- +```bash +git checkout v1.3 +``` -## Deploy the AudioQnA Application +### Configure the Deployment Environment -### Docker Compose Configuration for AMD GPUs +#### Docker Compose GPU Configuration -To enable GPU support for AMD GPUs, the following configuration is added to the Docker Compose file: +Consult the section on [AudioQnA Service configuration](#audioqna-configuration) for information on how service specific configuration parameters affect deployments. -- compose_vllm.yaml - for vLLM-based application -- compose.yaml - for TGI-based +To enable GPU support for AMD GPUs, the following configuration is added to the Docker Compose files (`compose.yaml`, `compose_vllm.yaml`) for the LLM serving container: ```yaml +# Example for vLLM service in compose_vllm.yaml +# Note: Modern docker compose might use deploy.resources syntax instead. +# Check your docker version and compose file. shm_size: 1g devices: - /dev/kfd:/dev/kfd - /dev/dri/:/dev/dri/ +# - /dev/dri/render128:/dev/dri/render128 cap_add: - SYS_PTRACE group_add: @@ -123,131 +62,161 @@ security_opt: - seccomp:unconfined ``` -This configuration forwards all available GPUs to the container. To use a specific GPU, specify its `cardN` and `renderN` device IDs. For example: - -```yaml -shm_size: 1g -devices: - - /dev/kfd:/dev/kfd - - /dev/dri/card0:/dev/dri/card0 - - /dev/dri/render128:/dev/dri/render128 -cap_add: - - SYS_PTRACE -group_add: - - video -security_opt: - - seccomp:unconfined -``` - -**How to Identify GPU Device IDs:** -Use AMD GPU driver utilities to determine the correct `cardN` and `renderN` IDs for your GPU. +#### Environment Variables (`set_env*.sh`) -### Set deploy environment variables +These scripts (`set_env_vllm.sh` for vLLM, `set_env.sh` for TGI) configure crucial parameters passed to the containers. -#### Setting variables in the operating system environment: +To set up environment variables for deploying AudioQnA services, set up some parameters specific to the deployment environment and source the `set_env.sh` script in this directory: -##### Set variable HUGGINGFACEHUB_API_TOKEN: +For TGI inference usage: ```bash -### Replace the string 'your_huggingfacehub_token' with your HuggingFacehub repository access token. -export HUGGINGFACEHUB_API_TOKEN='your_huggingfacehub_token' +export host_ip="External_Public_IP" # ip address of the node +export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +export http_proxy="Your_HTTP_Proxy" # http proxy if any +export https_proxy="Your_HTTPs_Proxy" # https proxy if any +export no_proxy=localhost,127.0.0.1,$host_ip,whisper-service,speecht5-service,vllm-service,tgi-service,audioqna-xeon-backend-server,audioqna-xeon-ui-server # additional no proxies if needed +export NGINX_PORT=${your_nginx_port} # your usable port for nginx, 80 for example +source ./set_env.sh ``` -#### Set variables value in set_env\*\*\*\*.sh file: - -Go to Docker Compose directory: +For vLLM inference usage ```bash -cd ~/audioqna-install/GenAIExamples/AudioQnA/docker_compose/amd/gpu/rocm +export host_ip="External_Public_IP" # ip address of the node +export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +export http_proxy="Your_HTTP_Proxy" # http proxy if any +export https_proxy="Your_HTTPs_Proxy" # https proxy if any +export no_proxy=localhost,127.0.0.1,$host_ip,whisper-service,speecht5-service,vllm-service,tgi-service,audioqna-xeon-backend-server,audioqna-xeon-ui-server # additional no proxies if needed +export NGINX_PORT=${your_nginx_port} # your usable port for nginx, 80 for example +source ./set_env_vllm.sh ``` -The example uses the Nano text editor. You can use any convenient text editor: +### Deploy the Services Using Docker Compose -#### If you use vLLM +To deploy the AudioQnA services, execute the `docker compose up` command with the appropriate arguments. For a default deployment, execute the command below. It uses the 'compose.yaml' file. + +for TGI inference deployment ```bash -nano set_env_vllm.sh +cd docker_compose/amd/gpu/rocm +docker compose -f compose.yaml up -d ``` -#### If you use TGI +for vLLM inference deployment ```bash -nano set_env.sh +cd docker_compose/amd/gpu/rocm +docker compose -f compose_vllm.yaml up -d ``` -If you are in a proxy environment, also set the proxy-related environment variables: +> **Note**: developers should build docker image from source when: +> +> - Developing off the git main branch (as the container's ports in the repo may be different > from the published docker image). +> - Unable to download the docker image. +> - Use a specific version of Docker image. + +Please refer to the table below to build different microservices from source: + +| Microservice | Deployment Guide | +| ------------ | --------------------------------------------------------------------------------------------------------------------------------- | +| vLLM | [vLLM build guide](https://github.com/opea-project/GenAIComps/tree/main/comps/third_parties/vllm#build-docker) | +| LLM | [LLM build guide](https://github.com/opea-project/GenAIComps/tree/main/comps/llms) | +| WHISPER | [Whisper build guide](https://github.com/opea-project/GenAIComps/tree/main/comps/asr/src#211-whisper-server-image) | +| SPEECHT5 | [SpeechT5 build guide](https://github.com/opea-project/GenAIComps/tree/main/comps/tts/src#211-speecht5-server-image) | +| GPT-SOVITS | [GPT-SOVITS build guide](https://github.com/opea-project/GenAIComps/tree/main/comps/third_parties/gpt-sovits/src#build-the-image) | +| MegaService | [MegaService build guide](../../../../README_miscellaneous.md#build-megaservice-docker-image) | +| UI | [Basic UI build guide](../../../../README_miscellaneous.md#build-ui-docker-image) | + +### Check the Deployment Status + +After running docker compose, check if all the containers launched via docker compose have started: + +#### For TGI inference deployment ```bash -export http_proxy="Your_HTTP_Proxy" -export https_proxy="Your_HTTPs_Proxy" +docker ps -a ``` -Set the values of the variables: +For the default deployment, the following 5 containers should have started: -- **HOST_IP, HOST_IP_EXTERNAL** - These variables are used to configure the name/address of the service in the operating system environment for the application services to interact with each other and with the outside world. +``` +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +d8007690868d opea/audioqna:latest "python audioqna.py" 21 seconds ago Up 19 seconds 0.0.0.0:3008->8888/tcp, [::]:3008->8888/tcp audioqna-rocm-backend-server +87ba9a1d56ae ghcr.io/huggingface/text-generation-inference:2.4.1-rocm "/tgi-entrypoint.sh …" 21 seconds ago Up 20 seconds 0.0.0.0:3006->80/tcp, [::]:3006->80/tcp tgi-service +59e869acd742 opea/speecht5:latest "python speecht5_ser…" 21 seconds ago Up 20 seconds 0.0.0.0:7055->7055/tcp, :::7055->7055/tcp speecht5-service +0143267a4327 opea/whisper:latest "python whisper_serv…" 21 seconds ago Up 20 seconds 0.0.0.0:7066->7066/tcp, :::7066->7066/tcp whisper-service +``` - If your server uses only an internal address and is not accessible from the Internet, then the values for these two variables will be the same and the value will be equal to the server's internal name/address. +### For vLLM inference deployment - If your server uses only an external, Internet-accessible address, then the values for these two variables will be the same and the value will be equal to the server's external name/address. +```bash +docker ps -a +``` - If your server is located on an internal network, has an internal address, but is accessible from the Internet via a proxy/firewall/load balancer, then the HOST_IP variable will have a value equal to the internal name/address of the server, and the EXTERNAL_HOST_IP variable will have a value equal to the external name/address of the proxy/firewall/load balancer behind which the server is located. +For the default deployment, the following 5 containers should have started: - We set these values in the file set_env\*\*\*\*.sh +``` +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +f3e6893a69fa opea/audioqna-ui:latest "docker-entrypoint.s…" 37 seconds ago Up 35 seconds 0.0.0.0:18039->5173/tcp, [::]:18039->5173/tcp audioqna-ui-server +f943e5cd21e9 opea/audioqna:latest "python audioqna.py" 37 seconds ago Up 35 seconds 0.0.0.0:18038->8888/tcp, [::]:18038->8888/tcp audioqna-backend-server +074e8c418f52 opea/speecht5:latest "python speecht5_ser…" 37 seconds ago Up 36 seconds 0.0.0.0:7055->7055/tcp, :::7055->7055/tcp speecht5-service +77abe498e427 opea/vllm-rocm:latest "python3 /workspace/…" 37 seconds ago Up 36 seconds 0.0.0.0:8081->8011/tcp, [::]:8081->8011/tcp audioqna-vllm-service +9074a95bb7a6 opea/whisper:latest "python whisper_serv…" 37 seconds ago Up 36 seconds 0.0.0.0:7066->7066/tcp, :::7066->7066/tcp whisper-service +``` -- **Variables with names like "**\*\*\*\*\*\*\_PORT"\*\* - These variables set the IP port numbers for establishing network connections to the application services. - The values shown in the file set_env.sh or set_env_vllm they are the values used for the development and testing of the application, as well as configured for the environment in which the development is performed. These values must be configured in accordance with the rules of network access to your environment's server, and must not overlap with the IP ports of other applications that are already in use. +If any issues are encountered during deployment, refer to the [Troubleshooting](../../../../README_miscellaneous.md#troubleshooting) section. -#### Set variables with script set_env\*\*\*\*.sh +### Validate the Pipeline -#### If you use vLLM +Once the AudioQnA services are running, test the pipeline using the following command: ```bash -. set_env_vllm.sh -``` +# Test the AudioQnA megaservice by recording a .wav file, encoding the file into the base64 format, and then sending the base64 string to the megaservice endpoint. +# The megaservice will return a spoken response as a base64 string. To listen to the response, decode the base64 string and save it as a .wav file. +wget https://github.com/intel/intel-extension-for-transformers/raw/refs/heads/main/intel_extension_for_transformers/neural_chat/assets/audio/sample_2.wav +base64_audio=$(base64 -w 0 sample_2.wav) -#### If you use TGI +# if you are using speecht5 as the tts service, voice can be "default" or "male" +# if you are using gpt-sovits for the tts service, you can set the reference audio following https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/gpt-sovits/src/README.md -```bash -. set_env.sh +curl http://${host_ip}:3008/v1/audioqna \ + -X POST \ + -H "Content-Type: application/json" \ + -d "{\"audio\": \"${base64_audio}\", \"max_tokens\": 64, \"voice\": \"default\"}" \ + | sed 's/^"//;s/"$//' | base64 -d > output.wav ``` -### Start the services: +**Note** : Access the AudioQnA UI by web browser through this URL: `http://${host_ip}:5173`. Please confirm the `5173` port is opened in the firewall. To validate each microservice used in the pipeline refer to the [Validate Microservices](#validate-microservices) section. + +### Cleanup the Deployment + +To stop the containers associated with the deployment, execute the following command: #### If you use vLLM ```bash -docker compose -f compose_vllm.yaml up -d +cd ~/audioqna-install/GenAIExamples/AudioQnA/docker_compose/amd/gpu/rocm +docker compose -f compose_vllm.yaml down ``` #### If you use TGI ```bash -docker compose -f compose.yaml up -d +cd ~/audioqna-install/GenAIExamples/AudioQnA/docker_compose/amd/gpu/rocm +docker compose -f compose.yaml down ``` -All containers should be running and should not restart: - -##### If you use vLLM: - -- audioqna-vllm-service -- whisper-service -- speecht5-service -- audioqna-backend-server -- audioqna-ui-server +## AudioQnA Docker Compose Files -##### If you use TGI: +In the context of deploying an AudioQnA pipeline on an Intel® Xeon® platform, we can pick and choose different large language model serving frameworks, or single English TTS/multi-language TTS component. The table below outlines the various configurations that are available as part of the application. These configurations can be used as templates and can be extended to different components available in [GenAIComps](https://github.com/opea-project/GenAIComps.git). -- audioqna-tgi-service -- whisper-service -- speecht5-service -- audioqna-backend-server -- audioqna-ui-server +| File | Description | +| ---------------------------------------- | ----------------------------------------------------------------------------------------- | +| [compose_vllm.yaml](./compose_vllm.yaml) | Default compose file using vllm as serving framework and redis as vector database | +| [compose.yaml](./compose.yaml) | The LLM serving framework is TGI. All other configurations remain the same as the default | ---- - -## Validate the Services - -### 1. Validate the vLLM/TGI Service +### Validate the vLLM/TGI Service #### If you use vLLM: @@ -313,7 +282,7 @@ Checking the response from the service. The response should be similar to JSON: If the service response has a meaningful response in the value of the "generated_text" key, then we consider the TGI service to be successfully launched -### 2. Validate MegaServices +### Validate MegaServices Test the AudioQnA megaservice by recording a .wav file, encoding the file into the base64 format, and then sending the base64 string to the megaservice endpoint. The megaservice will return a spoken response as a base64 string. To listen @@ -327,7 +296,7 @@ curl http://${host_ip}:3008/v1/audioqna \ -H 'Content-Type: application/json' | sed 's/^"//;s/"$//' | base64 -d > output.wav ``` -### 3. Validate MicroServices +### Validate MicroServices ```bash # whisper service @@ -343,18 +312,6 @@ curl http://${host_ip}:7055/v1/tts \ -H 'Content-Type: application/json' ``` -### 4. Stop application +## Conclusion -#### If you use vLLM - -```bash -cd ~/audioqna-install/GenAIExamples/AudioQnA/docker_compose/amd/gpu/rocm -docker compose -f compose_vllm.yaml down -``` - -#### If you use TGI - -```bash -cd ~/audioqna-install/GenAIExamples/AudioQnA/docker_compose/amd/gpu/rocm -docker compose -f compose.yaml down -``` +This guide should enable developers to deploy the default configuration or any of the other compose yaml files for different configurations. It also highlights the configurable parameters that can be set before deployment. diff --git a/CodeGen/docker_compose/amd/gpu/rocm/README.md b/CodeGen/docker_compose/amd/gpu/rocm/README.md index 3635107f35..3d1c3e5190 100644 --- a/CodeGen/docker_compose/amd/gpu/rocm/README.md +++ b/CodeGen/docker_compose/amd/gpu/rocm/README.md @@ -103,7 +103,6 @@ shm_size: 1g devices: - /dev/kfd:/dev/kfd - /dev/dri/:/dev/dri/ -# - /dev/dri/render128:/dev/dri/render128 cap_add: - SYS_PTRACE group_add: @@ -112,7 +111,24 @@ security_opt: - seccomp:unconfined ``` -This configuration forwards all available GPUs to the container. To use a specific GPU, specify its `cardN` and `renderN` device IDs (e.g., `/dev/dri/card0:/dev/dri/card0`, `/dev/dri/render128:/dev/dri/render128`). Use AMD GPU driver utilities to identify device IDs. +This configuration forwards all available GPUs to the container. To use a specific GPU, specify its `cardN` and `renderN` device IDs (e.g., `/dev/dri/card0:/dev/dri/card0`, `/dev/dri/render128:/dev/dri/render128`). For example: + +```yaml +shm_size: 1g +devices: + - /dev/kfd:/dev/kfd + - /dev/dri/card0:/dev/dri/card0 + - /dev/dri/render128:/dev/dri/render128 +cap_add: + - SYS_PTRACE +group_add: + - video +security_opt: + - seccomp:unconfined +``` + +**How to Identify GPU Device IDs:** +Use AMD GPU driver utilities to determine the correct `cardN` and `renderN` IDs for your GPU. ### Environment Variables (`set_env*.sh`) From c39c8752119673936e3406edc7342960642d6a6e Mon Sep 17 00:00:00 2001 From: Artem Astafev Date: Wed, 23 Apr 2025 21:58:25 +0700 Subject: [PATCH 005/217] Fix compose file and functional tests for Avatarchatbot on AMD ROCm platform (#1872) Signed-off-by: Artem Astafev --- .../docker_compose/amd/gpu/rocm/compose.yaml | 22 ++----------------- .../docker_compose/amd/gpu/rocm/set_env.sh | 2 +- AvatarChatbot/tests/test_compose_on_rocm.sh | 4 ++-- 3 files changed, 5 insertions(+), 23 deletions(-) diff --git a/AvatarChatbot/docker_compose/amd/gpu/rocm/compose.yaml b/AvatarChatbot/docker_compose/amd/gpu/rocm/compose.yaml index 3bee9ab662..884e1fcf79 100644 --- a/AvatarChatbot/docker_compose/amd/gpu/rocm/compose.yaml +++ b/AvatarChatbot/docker_compose/amd/gpu/rocm/compose.yaml @@ -42,7 +42,7 @@ services: environment: TTS_ENDPOINT: ${TTS_ENDPOINT} tgi-service: - image: ghcr.io/huggingface/text-generation-inference:2.3.1-rocm + image: ghcr.io/huggingface/text-generation-inference:2.4.1-rocm container_name: tgi-service ports: - "${TGI_SERVICE_PORT:-3006}:80" @@ -66,24 +66,6 @@ services: - seccomp:unconfined ipc: host command: --model-id ${LLM_MODEL_ID} --max-input-length 4096 --max-total-tokens 8192 - llm: - image: ${REGISTRY:-opea}/llm-textgen:${TAG:-latest} - container_name: llm-tgi-server - depends_on: - - tgi-service - ports: - - "3007:9000" - ipc: host - environment: - no_proxy: ${no_proxy} - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} - TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT} - LLM_ENDPOINT: ${TGI_LLM_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} - OPENAI_API_KEY: ${OPENAI_API_KEY} - restart: unless-stopped wav2lip-service: image: ${REGISTRY:-opea}/wav2lip:${TAG:-latest} container_name: wav2lip-service @@ -125,7 +107,7 @@ services: container_name: avatarchatbot-backend-server depends_on: - asr - - llm + - tgi-service - tts - animation ports: diff --git a/AvatarChatbot/docker_compose/amd/gpu/rocm/set_env.sh b/AvatarChatbot/docker_compose/amd/gpu/rocm/set_env.sh index b84139d906..e6a2af0984 100644 --- a/AvatarChatbot/docker_compose/amd/gpu/rocm/set_env.sh +++ b/AvatarChatbot/docker_compose/amd/gpu/rocm/set_env.sh @@ -30,7 +30,7 @@ export ANIMATION_SERVICE_HOST_IP=${host_ip} export MEGA_SERVICE_PORT=8888 export ASR_SERVICE_PORT=3001 export TTS_SERVICE_PORT=3002 -export LLM_SERVICE_PORT=3007 +export LLM_SERVICE_PORT=3006 export ANIMATION_SERVICE_PORT=3008 export DEVICE="cpu" diff --git a/AvatarChatbot/tests/test_compose_on_rocm.sh b/AvatarChatbot/tests/test_compose_on_rocm.sh index 14cc33a891..f0069ef913 100644 --- a/AvatarChatbot/tests/test_compose_on_rocm.sh +++ b/AvatarChatbot/tests/test_compose_on_rocm.sh @@ -27,7 +27,7 @@ function build_docker_images() { git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="avatarchatbot whisper asr llm-textgen speecht5 tts wav2lip animation" + service_list="avatarchatbot whisper asr speecht5 tts wav2lip animation" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker pull ghcr.io/huggingface/text-generation-inference:2.3.1-rocm @@ -65,7 +65,7 @@ function start_services() { export MEGA_SERVICE_PORT=8888 export ASR_SERVICE_PORT=3001 export TTS_SERVICE_PORT=3002 - export LLM_SERVICE_PORT=3007 + export LLM_SERVICE_PORT=3006 export ANIMATION_SERVICE_PORT=3008 export DEVICE="cpu" From f7002fcb70bca2fc50abdb343ec9fc03170ce009 Mon Sep 17 00:00:00 2001 From: "chen, suyue" Date: Thu, 24 Apr 2025 09:49:20 +0800 Subject: [PATCH 006/217] Set opea_branch for CD test (#1870) Signed-off-by: chensuyue --- .github/workflows/_example-workflow.yml | 1 + .github/workflows/_run-docker-compose.yml | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/.github/workflows/_example-workflow.yml b/.github/workflows/_example-workflow.yml index f0511e882c..bc54f6e63b 100644 --- a/.github/workflows/_example-workflow.yml +++ b/.github/workflows/_example-workflow.yml @@ -76,6 +76,7 @@ jobs: example: ${{ inputs.example }} hardware: ${{ inputs.node }} use_model_cache: ${{ inputs.use_model_cache }} + opea_branch: ${{ inputs.opea_branch }} secrets: inherit diff --git a/.github/workflows/_run-docker-compose.yml b/.github/workflows/_run-docker-compose.yml index 0b5ed02bac..24879a9759 100644 --- a/.github/workflows/_run-docker-compose.yml +++ b/.github/workflows/_run-docker-compose.yml @@ -32,6 +32,10 @@ on: required: false type: boolean default: false + opea_branch: + default: "main" + required: false + type: string jobs: get-test-case: runs-on: ubuntu-latest @@ -169,6 +173,7 @@ jobs: FINANCIAL_DATASETS_API_KEY: ${{ secrets.FINANCIAL_DATASETS_API_KEY }} IMAGE_REPO: ${{ inputs.registry }} IMAGE_TAG: ${{ inputs.tag }} + opea_branch: ${{ inputs.opea_branch }} example: ${{ inputs.example }} hardware: ${{ inputs.hardware }} test_case: ${{ matrix.test_case }} From db4bf1a4c345140baf5381a1d5bcd7fef717d0ce Mon Sep 17 00:00:00 2001 From: Artem Astafev Date: Thu, 24 Apr 2025 10:00:51 +0700 Subject: [PATCH 007/217] Refine README.MD for AMD ROCm docker compose deployment (#1856) Signed-off-by: Artem Astafev --- CodeGen/docker_compose/amd/gpu/rocm/README.md | 632 ++++++++++-------- 1 file changed, 348 insertions(+), 284 deletions(-) diff --git a/CodeGen/docker_compose/amd/gpu/rocm/README.md b/CodeGen/docker_compose/amd/gpu/rocm/README.md index 3d1c3e5190..f3119a2091 100644 --- a/CodeGen/docker_compose/amd/gpu/rocm/README.md +++ b/CodeGen/docker_compose/amd/gpu/rocm/README.md @@ -2,78 +2,69 @@ This README provides instructions for deploying the CodeGen application using Docker Compose on a system equipped with AMD GPUs supporting ROCm, detailing the steps to configure, run, and validate the services. This guide defaults to using the **vLLM** backend for LLM serving. +If the service response has a meaningful response in the value of the "choices.text" key, +then we consider the vLLM service to be successfully launched + ## Table of Contents -- [Steps to Run with Docker Compose (Default vLLM)](#steps-to-run-with-docker-compose-default-vllm) -- [Service Overview](#service-overview) +- [Overview](#overview) +- [Prerequisites](#prerequisites) +- [Quick Start](#quick-start) - [Available Deployment Options](#available-deployment-options) - - [compose_vllm.yaml (vLLM - Default)](#compose_vllyaml-vllm---default) - - [compose.yaml (TGI)](#composeyaml-tgi) -- [Configuration Parameters and Usage](#configuration-parameters-and-usage) - - [Docker Compose GPU Configuration](#docker-compose-gpu-configuration) - - [Environment Variables (`set_env*.sh`)](#environment-variables-set_envsh) -- [Building Docker Images Locally (Optional)](#building-docker-images-locally-optional) - - [1. Setup Build Environment](#1-setup-build-environment) - - [2. Clone Repositories](#2-clone-repositories) - - [3. Select Services and Build](#3-select-services-and-build) -- [Validate Service Health](#validate-service-health) - - [1. Validate the vLLM/TGI Service](#1-validate-the-vllmtgi-service) - - [2. Validate the LLM Service](#2-validate-the-llm-service) - - [3. Validate the MegaService (Backend)](#3-validate-the-megaservice-backend) - - [4. Validate the Frontend (UI)](#4-validate-the-frontend-ui) -- [How to Open the UI](#how-to-open-the-ui) + - [Default: vLLM-based Deployment (`--profile codegen-xeon-vllm`)](#default-vllm-based-deployment---profile-codegen-xeon-vllm) + - [TGI-based Deployment (`--profile codegen-xeon-tgi`)](#tgi-based-deployment---profile-codegen-xeon-tgi) +- [Configuration Parameters](#configuration-parameters) + - [Environment Variables](#environment-variables) + - [Compose Profiles](#compose-profiles) +- [Building Custom Images (Optional)](#building-custom-images-optional) +- [Validate Services](#validate-services) + - [Check Container Status](#check-container-status) + - [Run Validation Script/Commands](#run-validation-scriptcommands) +- [Accessing the User Interface (UI)](#accessing-the-user-interface-ui) + - [Gradio UI (Default)](#gradio-ui-default) + - [Svelte UI (Optional)](#svelte-ui-optional) + - [React UI (Optional)](#react-ui-optional) + - [VS Code Extension (Optional)](#vs-code-extension-optional) - [Troubleshooting](#troubleshooting) - [Stopping the Application](#stopping-the-application) - [Next Steps](#next-steps) -## Steps to Run with Docker Compose (Default vLLM) +## Overview -_This section assumes you are using pre-built images and targets the default vLLM deployment._ +This guide focuses on running the pre-configured CodeGen service using Docker Compose on AMD ROCm processing acelarating platform. It leverages containers optimized for Intel architecture for the CodeGen gateway, LLM serving (vLLM or TGI), and UI. -1. **Set Deploy Environment Variables:** +## CodeGen Quick Start Deployment - - Go to the Docker Compose directory: - ```bash - # Adjust path if your GenAIExamples clone is located elsewhere - cd GenAIExamples/CodeGen/docker_compose/amd/gpu/rocm - ``` - - Setting variables in the operating system environment: - - Set variable `HUGGINGFACEHUB_API_TOKEN`: - ```bash - ### Replace the string 'your_huggingfacehub_token' with your HuggingFacehub repository access token. - export HUGGINGFACEHUB_API_TOKEN='your_huggingfacehub_token' - ``` - - Edit the environment script for the **vLLM** deployment (`set_env_vllm.sh`): - ```bash - nano set_env_vllm.sh - ``` - - Configure `HOST_IP`, `EXTERNAL_HOST_IP`, `*_PORT` variables, and proxies (`http_proxy`, `https_proxy`, `no_proxy`) as described in the Configuration section below. - - Source the environment variables: - ```bash - . set_env_vllm.sh - ``` +This section describes how to quickly deploy and test the CodeGen service manually on an AMD GPU (ROCm) platform. The basic steps are: -2. **Start the Services (vLLM):** +1. [Prerequisites](#prerequisites) +2. [Generate a HuggingFace Access Token](#generate-a-huggingface-access-token) +3. [Configure the Deployment Environment](#configure-the-deployment-environment) +4. [Deploy the Services Using Docker Compose](#deploy-the-services-using-docker-compose) +5. [Check the Deployment Status](#check-the-deployment-status) +6. [Test the Pipeline](#test-the-pipeline) +7. [Cleanup the Deployment](#cleanup-the-deployment) - ```bash - docker compose -f compose_vllm.yaml up -d - ``` +## Prerequisites -3. **Verify:** Proceed to the [Validate Service Health](#validate-service-health) section after allowing time for services to start. +- Docker and Docker Compose installed. +- x86 Intel or AMD CPU. +- 4x AMD Instinct MI300X Accelerators. +- Git installed (for cloning repository). +- Hugging Face Hub API Token (for downloading models). +- Access to the internet (or a private model cache). +- Clone the `GenAIExamples` repository: -## Service Overview +```bash + git clone https://github.com/opea-project/GenAIExamples.git + cd GenAIExamples/CodeGen/docker_compose/amd/gpu/rocm/ +``` -When using the default `compose_vllm.yaml` (vLLM-based), the following services are deployed: +Checkout a released version, such as v1.3: -| Service Name | Default Port (Host) | Internal Port | Purpose | -| :--------------------- | :--------------------------------------------- | :------------ | :-------------------------- | -| codegen-vllm-service | `${CODEGEN_VLLM_SERVICE_PORT}` (e.g., 8028) | 8000 | LLM Serving (vLLM on ROCm) | -| codegen-llm-server | `${CODEGEN_LLM_SERVICE_PORT}` (e.g., 9000) | 80 | LLM Microservice Wrapper | -| codegen-backend-server | `${CODEGEN_BACKEND_SERVICE_PORT}` (e.g., 7778) | 80 | CodeGen MegaService/Gateway | -| codegen-ui-server | `${CODEGEN_UI_SERVICE_PORT}` (e.g., 5173) | 80 | Frontend User Interface | - -_(Note: Ports are configurable via `set_env_vllm.sh`. Check the script for actual defaults used.)_ -_(Note: The TGI deployment (`compose.yaml`) uses `codegen-tgi-service` instead of `codegen-vllm-service`)_ +```bash +git checkout v1.3 +``` ## Available Deployment Options @@ -91,6 +82,69 @@ This directory provides different Docker Compose files: ## Configuration Parameters and Usage +### Environment Variables (`set_env*.sh`) + +These scripts (`set_env_vllm.sh` for vLLM, `set_env.sh` for TGI) configure crucial parameters passed to the containers. + +This example covers the single-node on-premises deployment of the CodeGen example using OPEA components. There are various ways to enable CodeGen, but this example will focus on four options available for deploying the CodeGen pipeline to AMD ROCm AI Accelerators. This example begins with a Quick Start section and then documents how to modify deployments, leverage new models and configure the number of allocated devices. + +This example includes the following sections: + +- [CodeGen Quick Start Deployment](#CodeGen-quick-start-deployment): Demonstrates how to quickly deploy a CodeGen application/pipeline on AMD GPU (ROCm) platform. +- [CodeGen Docker Compose Files](#CodeGen-docker-compose-files): Describes some example deployments and their docker compose files. +- [CodeGen Service Configuration](#CodeGen-service-configuration): Describes the services and possible configuration changes. + +**Note** This example requires access to a properly installed AMD ROCm platform with a functional Docker service configured + +## Generate a HuggingFace Access Token + +Some HuggingFace resources, such as some models, are only accessible if you have an access token. If you do not already have a HuggingFace access token, you can create one by first creating an account by following the steps provided at [HuggingFace](https://huggingface.co/) and then generating a [user access token](https://huggingface.co/docs/transformers.js/en/guides/private#step-1-generating-a-user-access-token). + +## Configure the Deployment Environment + +### Environment Variables + +Key parameters are configured via environment variables set before running `docker compose up`. + +| Environment Variable | Description | Default (Set Externally) | +| :-------------------------------------- | :------------------------------------------------------------------------------------------------------------------ | :----------------------------------------------------------------------------------------------- | +| `HOST_IP` | External IP address of the host machine. **Required.** | `your_external_ip_address` | +| `HUGGINGFACEHUB_API_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingface_token` | +| `LLM_MODEL_ID` | Hugging Face model ID for the CodeGen LLM (used by TGI/vLLM service). Configured within `compose.yaml` environment. | `Qwen/Qwen2.5-Coder-7B-Instruct` | +| `EMBEDDING_MODEL_ID` | Hugging Face model ID for the embedding model (used by TEI service). Configured within `compose.yaml` environment. | `BAAI/bge-base-en-v1.5` | +| `LLM_ENDPOINT` | Internal URL for the LLM serving endpoint (used by `codegen-llm-server`). Configured in `compose.yaml`. | `http://codegen-tgi-server:80/generate` or `http://codegen-vllm-server:8000/v1/chat/completions` | +| `TEI_EMBEDDING_ENDPOINT` | Internal URL for the Embedding service. Configured in `compose.yaml`. | `http://codegen-tei-embedding-server:80/embed` | +| `DATAPREP_ENDPOINT` | Internal URL for the Data Preparation service. Configured in `compose.yaml`. | `http://codegen-dataprep-server:80/dataprep` | +| `BACKEND_SERVICE_ENDPOINT` | External URL for the CodeGen Gateway (MegaService). Derived from `HOST_IP` and port `7778`. | `http://${HOST_IP}:7778/v1/codegen` | +| `*_PORT` (Internal) | Internal container ports (e.g., `80`, `6379`). Defined in `compose.yaml`. | N/A | +| `http_proxy` / `https_proxy`/`no_proxy` | Network proxy settings (if required). | `""` | + +To set up environment variables for deploying CodeGen services, source the _setup_env.sh_ script in this directory: + +For TGI + +```bash +export host_ip="External_Public_IP" #ip address of the node +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export http_proxy="Your_HTTP_Proxy" #http proxy if any +export https_proxy="Your_HTTPs_Proxy" #https proxy if any +export no_proxy=localhost,127.0.0.1,$host_ip #additional no proxies if needed +export no_proxy=$no_proxy +source ./set_env.sh +``` + +For vLLM + +```bash +export host_ip="External_Public_IP" #ip address of the node +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export http_proxy="Your_HTTP_Proxy" #http proxy if any +export https_proxy="Your_HTTPs_Proxy" #https proxy if any +export no_proxy=localhost,127.0.0.1,$host_ip #additional no proxies if needed +export no_proxy=$no_proxy +source ./set_env_vllm.sh +``` + ### Docker Compose GPU Configuration To enable GPU support for AMD GPUs, the following configuration is added to the Docker Compose files (`compose.yaml`, `compose_vllm.yaml`) for the LLM serving container: @@ -130,300 +184,310 @@ security_opt: **How to Identify GPU Device IDs:** Use AMD GPU driver utilities to determine the correct `cardN` and `renderN` IDs for your GPU. -### Environment Variables (`set_env*.sh`) - -These scripts (`set_env_vllm.sh` for vLLM, `set_env.sh` for TGI) configure crucial parameters passed to the containers. - -| Environment Variable | Description | Example Value (Edit in Script) | -| :----------------------------- | :------------------------------------------------------------------------------------------------------- | :------------------------------- | -| `HUGGINGFACEHUB_API_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingfacehub_token` | -| `HOST_IP` | Internal/Primary IP address of the host machine. Used for inter-service communication. **Required.** | `192.168.1.100` | -| `EXTERNAL_HOST_IP` | External IP/hostname used to access the UI from outside. Same as `HOST_IP` if no proxy/LB. **Required.** | `192.168.1.100` | -| `CODEGEN_LLM_MODEL_ID` | Hugging Face model ID for the CodeGen LLM. | `Qwen/Qwen2.5-Coder-7B-Instruct` | -| `CODEGEN_VLLM_SERVICE_PORT` | Host port mapping for the vLLM serving endpoint (in `set_env_vllm.sh`). | `8028` | -| `CODEGEN_TGI_SERVICE_PORT` | Host port mapping for the TGI serving endpoint (in `set_env.sh`). | `8028` | -| `CODEGEN_LLM_SERVICE_PORT` | Host port mapping for the LLM Microservice wrapper. | `9000` | -| `CODEGEN_BACKEND_SERVICE_PORT` | Host port mapping for the CodeGen MegaService/Gateway. | `7778` | -| `CODEGEN_UI_SERVICE_PORT` | Host port mapping for the UI service. | `5173` | -| `http_proxy` | Network HTTP Proxy URL (if required). | `Your_HTTP_Proxy` | -| `https_proxy` | Network HTTPS Proxy URL (if required). | `Your_HTTPs_Proxy` | -| `no_proxy` | Comma-separated list of hosts to bypass proxy. Should include `localhost,127.0.0.1,$HOST_IP`. | `localhost,127.0.0.1` | - -**How to Use:** Edit the relevant `set_env*.sh` file (`set_env_vllm.sh` for the default) with your values, then source it (`. ./set_env*.sh`) before running `docker compose`. - -## Building Docker Images Locally (Optional) - -Follow these steps if you need to build the Docker images from source instead of using pre-built ones. - -### 1. Setup Build Environment - -- #### Create application install directory and go to it: - - ```bash - mkdir ~/codegen-install && cd codegen-install - ``` - -### 2. Clone Repositories - -- #### Clone the repository GenAIExamples (the default repository branch "main" is used here): - - ```bash - git clone https://github.com/opea-project/GenAIExamples.git - ``` - - If you need to use a specific branch/tag of the GenAIExamples repository, then (v1.3 replace with its own value): - - ```bash - git clone https://github.com/opea-project/GenAIExamples.git && cd GenAIExamples && git checkout v1.3 - ``` - - We remind you that when using a specific version of the code, you need to use the README from this version. +### Deploy the Services Using Docker Compose -- #### Go to build directory: +Please refer to the table below to build different microservices from source: - ```bash - cd ~/codegen-install/GenAIExamples/CodeGen/docker_image_build - ``` +When using the default `compose_vllm.yaml` (vLLM-based), the following services are deployed: -- Cleaning up the GenAIComps repository if it was previously cloned in this directory. - This is necessary if the build was performed earlier and the GenAIComps folder exists and is not empty: +| Service Name | Default Port (Host) | Internal Port | Purpose | +| :--------------------- | :--------------------------------------------- | :------------ | :-------------------------- | +| codegen-vllm-service | `${CODEGEN_VLLM_SERVICE_PORT}` (e.g., 8028) | 8000 | LLM Serving (vLLM on ROCm) | +| codegen-llm-server | `${CODEGEN_LLM_SERVICE_PORT}` (e.g., 9000) | 80 | LLM Microservice Wrapper | +| codegen-backend-server | `${CODEGEN_BACKEND_SERVICE_PORT}` (e.g., 7778) | 80 | CodeGen MegaService/Gateway | +| codegen-ui-server | `${CODEGEN_UI_SERVICE_PORT}` (e.g., 5173) | 80 | Frontend User Interface | - ```bash - echo Y | rm -R GenAIComps - ``` +To deploy the CodeGen services, execute the `docker compose up` command with the appropriate arguments. For a vLLM deployment, execute: -- #### Clone the repository GenAIComps (the default repository branch "main" is used here): +```bash +docker compose -f compose_vllm.sh up -d +``` - ```bash - git clone https://github.com/opea-project/GenAIComps.git - ``` +The CodeGen docker images should automatically be downloaded from the `OPEA registry` and deployed on the AMD GPU (ROCM) Platform: - If you use a specific tag of the GenAIExamples repository, - then you should also use the corresponding tag for GenAIComps. (v1.3 replace with its own value): +```bash +[+] Running 5/5_default Created 0.3s + ✔ Network rocm_default Created 0.3s + ✔ Container codegen-vllm-service Healthy 100.9s + ✔ Container codegen-llm-server Started 101.2s + ✔ Container codegen-backend-server Started 101.5s + ✔ Container codegen-ui-server Started 101.9s +``` - ```bash - git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout v1.3 - ``` +# To deploy the CodeGen services, execute the `docker compose up` command with the appropriate arguments. For a TGI deployment, execute: - We remind you that when using a specific version of the code, you need to use the README from this version. +``` +docker compose up -d +``` -### 3. Select Services and Build +The CodeGen docker images should automatically be downloaded from the `OPEA registry` and deployed on the AMD GPU (ROCM) Platform: -- #### Setting the list of images for the build (from the build file.yaml) +```bash +[+] Running 5/5_default Created 0.4s + ✔ Network rocm_default Created 0.4s + ✔ Container codegen-tgi-service Healthy 102.6s + ✔ Container codegen-llm-server Started 100.2s + ✔ Container codegen-backend-server Started 103.7s + ✔ Container codegen-ui-server Started 102.9s +``` - Select the services corresponding to your desired deployment (vLLM is the default): +## Building Custom Images (Optional) - ##### vLLM-based application (Default) +If you need to modify the microservices: - ```bash - service_list="vllm-rocm llm-textgen codegen codegen-ui" - ``` +1. Clone the [OPEA GenAIComps](https://github.com/opea-project/GenAIComps) repository. +2. Follow build instructions in the respective component directories (e.g., `comps/llms/text-generation`, `comps/codegen`, `comps/ui/gradio`, etc.). Use the provided Dockerfiles (e.g., `CodeGen/Dockerfile`, `CodeGen/ui/docker/Dockerfile.gradio`). +3. Tag your custom images appropriately (e.g., `my-custom-codegen:latest`). +4. Update the `image:` fields in the `compose.yaml` file to use your custom image tags. - ##### TGI-based application +_Refer to the main [CodeGen README](../../../../README.md) for links to relevant GenAIComps components._ - ```bash - service_list="llm-textgen codegen codegen-ui" - ``` +## Validate Services -- #### Optional. Pull TGI Docker Image (Do this if you plan to build/use the TGI variant) +### Check the Deployment Status for TGI base deployment - ```bash - docker pull ghcr.io/huggingface/text-generation-inference:2.3.1-rocm - ``` +After running docker compose, check if all the containers launched via docker compose have started: -- #### Build Docker Images +```bash +docker ps -a +``` - _Ensure you are in the `~/codegen-install/GenAIExamples/CodeGen/docker_image_build` directory._ +For the default deployment, the following 10 containers should have started: - ```bash - docker compose -f build.yaml build ${service_list} --no-cache - ``` +```bash +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +1d08caeae2ed opea/codegen-ui:latest "docker-entrypoint.s…" 2 minutes ago Up About a minute 0.0.0.0:18151->5173/tcp, [::]:18151->5173/tcp codegen-ui-server +f52adc66c116 opea/codegen:latest "python codegen.py" 2 minutes ago Up About a minute 0.0.0.0:18150->7778/tcp, [::]:18150->7778/tcp codegen-backend-server +4b1cb8f5d4ff opea/llm-textgen:latest "bash entrypoint.sh" 2 minutes ago Up About a minute 0.0.0.0:9000->9000/tcp, :::9000->9000/tcp codegen-llm-server +3bb4ee0abf15 ghcr.io/huggingface/text-generation-inference:2.4.1-rocm "/tgi-entrypoint.sh …" 2 minutes ago Up 2 minutes (healthy) 0.0.0.0:8028->80/tcp, [::]:8028->80/tcp codegen-tgi-service +``` - After the build, check the list of images with the command: +### Check the Deployment Status for vLLM base deployment - ```bash - docker image ls - ``` +After running docker compose, check if all the containers launched via docker compose have started: - The list of images should include (depending on `service_list`): +```bash +docker ps -a +``` - ###### vLLM-based application: +For the default deployment, the following 10 containers should have started: - - opea/vllm-rocm:latest - - opea/llm-textgen:latest - - opea/codegen:latest - - opea/codegen-ui:latest +```bash +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +f100cc326343 opea/codegen-ui:latest "docker-entrypoint.s…" 16 minutes ago Up 14 minutes 0.0.0.0:18151->5173/tcp, [::]:18151->5173/tcp codegen-ui-server +c59de0b2da5b opea/codegen:latest "python codegen.py" 16 minutes ago Up 14 minutes 0.0.0.0:18150->7778/tcp, [::]:18150->7778/tcp codegen-backend-server +dcd83e0e4c0f opea/llm-textgen:latest "bash entrypoint.sh" 16 minutes ago Up 14 minutes 0.0.0.0:9000->9000/tcp, :::9000->9000/tcp codegen-llm-server +d091d8f2fab6 opea/vllm-rocm:latest "python3 /workspace/…" 16 minutes ago Up 16 minutes (healthy) 0.0.0.0:8028->8011/tcp, [::]:8028->8011/tcp codegen-vllm-service +``` - ###### TGI-based application: +### Test the Pipeline - - ghcr.io/huggingface/text-generation-inference:2.3.1-rocm (if pulled) - - opea/llm-textgen:latest - - opea/codegen:latest - - opea/codegen-ui:latest +### If you use vLLM: - _After building, ensure the `image:` tags in the main `compose_vllm.yaml` or `compose.yaml` (in the `amd/gpu/rocm` directory) match these built images (e.g., `opea/vllm-rocm:latest`)._ +```bash +DATA='{"model": "Qwen/Qwen2.5-Coder-7B-Instruct", '\ +'"messages": [{"role": "user", "content": "Implement a high-level API for a TODO list application. '\ +'The API takes as input an operation request and updates the TODO list in place. '\ +'If the request is invalid, raise an exception."}], "max_tokens": 256}' + +curl http://${HOST_IP}:${CODEGEN_VLLM_SERVICE_PORT}/v1/chat/completions \ + -X POST \ + -d "$DATA" \ + -H 'Content-Type: application/json' +``` -## Validate Service Health +Checking the response from the service. The response should be similar to JSON: + +````json +{ + "id": "chatcmpl-142f34ef35b64a8db3deedd170fed951", + "object": "chat.completion", + "created": 1742270316, + "model": "Qwen/Qwen2.5-Coder-7B-Instruct", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "```python\nfrom typing import Optional, List, Dict, Union\nfrom pydantic import BaseModel, validator\n\nclass OperationRequest(BaseModel):\n # Assuming OperationRequest is already defined as per the given text\n pass\n\nclass UpdateOperation(OperationRequest):\n new_items: List[str]\n\n def apply_and_maybe_raise(self, updatable_item: \"Updatable todo list\") -> None:\n # Assuming updatable_item is an instance of Updatable todo list\n self.validate()\n updatable_item.add_items(self.new_items)\n\nclass Updatable:\n # Abstract class for items that can be updated\n pass\n\nclass TodoList(Updatable):\n # Class that represents a todo list\n items: List[str]\n\n def add_items(self, new_items: List[str]) -> None:\n self.items.extend(new_items)\n\ndef handle_request(operation_request: OperationRequest) -> None:\n # Function to handle an operation request\n if isinstance(operation_request, UpdateOperation):\n operation_request.apply_and_maybe_raise(get_todo_list_for_update())\n else:\n raise ValueError(\"Invalid operation request\")\n\ndef get_todo_list_for_update() -> TodoList:\n # Function to get the todo list for update\n # Assuming this function returns the", + "tool_calls": [] + }, + "logprobs": null, + "finish_reason": "length", + "stop_reason": null + } + ], + "usage": { "prompt_tokens": 66, "total_tokens": 322, "completion_tokens": 256, "prompt_tokens_details": null }, + "prompt_logprobs": null +} +```` + +If the service response has a meaningful response in the value of the "choices.message.content" key, +then we consider the vLLM service to be successfully launched + +### If you use TGI: -Run these checks after starting the services to ensure they are operational. Focus on the vLLM checks first as it's the default. +```bash +DATA='{"inputs":"Implement a high-level API for a TODO list application. '\ +'The API takes as input an operation request and updates the TODO list in place. '\ +'If the request is invalid, raise an exception.",'\ +'"parameters":{"max_new_tokens":256,"do_sample": true}}' + +curl http://${HOST_IP}:${CODEGEN_TGI_SERVICE_PORT}/generate \ + -X POST \ + -d "$DATA" \ + -H 'Content-Type: application/json' +``` -### 1. Validate the vLLM/TGI Service +Checking the response from the service. The response should be similar to JSON: -#### If you use vLLM (Default - using `compose_vllm.yaml` and `set_env_vllm.sh`) +````json +{ + "generated_text": " The supported operations are \"add_task\", \"complete_task\", and \"remove_task\". Each operation can be defined with a corresponding function in the API.\n\nAdd your API in the following format:\n\n```\nTODO App API\n\nsupported operations:\n\noperation name description\n----------------------- ------------------------------------------------\n \n```\n\nUse type hints for function parameters and return values. Specify a text description of the API's supported operations.\n\nUse the following code snippet as a starting point for your high-level API function:\n\n```\nclass TodoAPI:\n def __init__(self, tasks: List[str]):\n self.tasks = tasks # List of tasks to manage\n\n def add_task(self, task: str) -> None:\n self.tasks.append(task)\n\n def complete_task(self, task: str) -> None:\n self.tasks = [t for t in self.tasks if t != task]\n\n def remove_task(self, task: str) -> None:\n self.tasks = [t for t in self.tasks if t != task]\n\n def handle_request(self, request: Dict[str, str]) -> None:\n operation = request.get('operation')\n if operation == 'add_task':\n self.add_task(request.get('task'))\n elif" +} +```` -- **How Tested:** Send a POST request with a sample prompt to the vLLM endpoint. -- **CURL Command:** +If the service response has a meaningful response in the value of the "generated_text" key, +then we consider the TGI service to be successfully launched - ```bash - DATA='{"model": "Qwen/Qwen2.5-Coder-7B-Instruct", '\ - '"messages": [{"role": "user", "content": "Implement a high-level API for a TODO list application. '\ - 'The API takes as input an operation request and updates the TODO list in place. '\ - 'If the request is invalid, raise an exception."}], "max_tokens": 256}' +### 2. Validate the LLM Service - curl http://${HOST_IP}:${CODEGEN_VLLM_SERVICE_PORT}/v1/chat/completions \ - -X POST \ - -d "$DATA" \ - -H 'Content-Type: application/json' - ``` +```bash +DATA='{"query":"Implement a high-level API for a TODO list application. '\ +'The API takes as input an operation request and updates the TODO list in place. '\ +'If the request is invalid, raise an exception.",'\ +'"max_tokens":256,"top_k":10,"top_p":0.95,"typical_p":0.95,"temperature":0.01,'\ +'"repetition_penalty":1.03,"stream":false}' + +curl http://${HOST_IP}:${CODEGEN_LLM_SERVICE_PORT}/v1/chat/completions \ + -X POST \ + -d "$DATA" \ + -H 'Content-Type: application/json' +``` -- **Sample Output:** - ```json - { - "id": "chatcmpl-142f34ef35b64a8db3deedd170fed951", - "object": "chat.completion" - // ... (rest of output) ... +Checking the response from the service. The response should be similar to JSON: + +````json +{ + "id": "cmpl-4e89a590b1af46bfb37ce8f12b2996f8", + "choices": [ + { + "finish_reason": "length", + "index": 0, + "logprobs": null, + "text": " The API should support the following operations:\n\n1. Add a new task to the TODO list.\n2. Remove a task from the TODO list.\n3. Mark a task as completed.\n4. Retrieve the list of all tasks.\n\nThe API should also support the following features:\n\n1. The ability to filter tasks based on their completion status.\n2. The ability to sort tasks based on their priority.\n3. The ability to search for tasks based on their description.\n\nHere is an example of how the API can be used:\n\n```python\ntodo_list = []\napi = TodoListAPI(todo_list)\n\n# Add tasks\napi.add_task(\"Buy groceries\")\napi.add_task(\"Finish homework\")\n\n# Mark a task as completed\napi.mark_task_completed(\"Buy groceries\")\n\n# Retrieve the list of all tasks\nprint(api.get_all_tasks())\n\n# Filter tasks based on completion status\nprint(api.filter_tasks(completed=True))\n\n# Sort tasks based on priority\napi.sort_tasks(priority=\"high\")\n\n# Search for tasks based on description\nprint(api.search_tasks(description=\"homework\"))\n```\n\nIn this example, the `TodoListAPI` class is used to manage the TODO list. The `add_task` method adds a new task to the list, the `mark_task_completed` method", + "stop_reason": null, + "prompt_logprobs": null + } + ], + "created": 1742270567, + "model": "Qwen/Qwen2.5-Coder-7B-Instruct", + "object": "text_completion", + "system_fingerprint": null, + "usage": { + "completion_tokens": 256, + "prompt_tokens": 37, + "total_tokens": 293, + "completion_tokens_details": null, + "prompt_tokens_details": null } - ``` -- **Expected Result:** A JSON response with a `choices[0].message.content` field containing meaningful generated code. - -#### If you use TGI (using `compose.yaml` and `set_env.sh`) +} +```` -- **How Tested:** Send a POST request with a sample prompt to the TGI endpoint. -- **CURL Command:** +## Accessing the User Interface (UI) - ```bash - DATA='{"inputs":"Implement a high-level API for a TODO list application. '\ - # ... (data payload as before) ... - '"parameters":{"max_new_tokens":256,"do_sample": true}}' +Multiple UI options can be configured via the `compose.yaml`. - curl http://${HOST_IP}:${CODEGEN_TGI_SERVICE_PORT}/generate \ - -X POST \ - -d "$DATA" \ - -H 'Content-Type: application/json' - ``` +### Svelte UI (Optional) -- **Sample Output:** - ```json - { - "generated_text": " The supported operations are \"add_task\", \"complete_task\", and \"remove_task\". # ... (generated code) ..." - } - ``` -- **Expected Result:** A JSON response with a `generated_text` field containing meaningful generated code. +1. Modify `compose.yaml`: Comment out the `codegen-gradio-ui-server` service and uncomment/add the `codegen-xeon-ui-server` (Svelte) service definition, ensuring the port mapping is correct (e.g., `"- 5173:5173"`). +2. Restart Docker Compose: `docker compose --profile up -d` +3. Access: `http://{HOST_IP}:5173` (or the host port you mapped). -### 2. Validate the LLM Service +![Svelte UI Init](../../../../assets/img/codeGen_ui_init.jpg) -- **Service Name:** `codegen-llm-server` -- **How Tested:** Send a POST request to the LLM microservice wrapper endpoint. -- **CURL Command:** - - ```bash - DATA='{"query":"Implement a high-level API for a TODO list application. '\ - # ... (data payload as before) ... - '"repetition_penalty":1.03,"stream":false}' - - curl http://${HOST_IP}:${CODEGEN_LLM_SERVICE_PORT}/v1/chat/completions \ - -X POST \ - -d "$DATA" \ - -H 'Content-Type: application/json' - ``` - -- **Sample Output:** (Structure may vary slightly depending on whether vLLM or TGI is backend) - ```json - { - "id": "cmpl-4e89a590b1af46bfb37ce8f12b2996f8" // Example ID - // ... (output structure depends on backend, check original validation) ... - } - ``` -- **Expected Result:** A JSON response containing meaningful generated code within the `choices` array. +### VS Code Extension (Optional) -### 3. Validate the MegaService (Backend) +Users can interact with the backend service using the `Neural Copilot` VS Code extension. -- **Service Name:** `codegen-backend-server` -- **How Tested:** Send a POST request to the main CodeGen gateway endpoint. -- **CURL Command:** +1. **Install:** Find and install `Neural Copilot` from the VS Code Marketplace. + ![Install Copilot](../../../../assets/img/codegen_copilot.png) +2. **Configure:** Set the "Service URL" in the extension settings to your CodeGen backend endpoint: `http://${HOST_IP}:7778/v1/codegen` (use the correct port if changed). + ![Configure Endpoint](../../../../assets/img/codegen_endpoint.png) +3. **Usage:** + - **Inline Suggestion:** Type a comment describing the code you want (e.g., `# Python function to read a file`) and wait for suggestions. + ![Code Suggestion](../../../../assets/img/codegen_suggestion.png) + - **Chat:** Use the Neural Copilot panel to chat with the AI assistant about code. + ![Chat Dialog](../../../../assets/img/codegen_dialog.png) - ```bash - DATA='{"messages": "Implement a high-level API for a TODO list application. '\ - # ... (data payload as before) ... - 'If the request is invalid, raise an exception."}' +## Troubleshooting - curl http://${HOST_IP}:${CODEGEN_BACKEND_SERVICE_PORT}/v1/codegen \ - -H "Content-Type: application/json" \ - -d "$DATA" - ``` +- **Model Download Issues:** Check `HUGGINGFACEHUB_API_TOKEN`. Ensure internet connectivity or correct proxy settings. Check logs of `tgi-service`/`vllm-service` and `tei-embedding-server`. Gated models need prior Hugging Face access. +- **Connection Errors:** Verify `HOST_IP` is correct and accessible. Check `docker ps` for port mappings. Ensure `no_proxy` includes `HOST_IP` if using a proxy. Check logs of the service failing to connect (e.g., `codegen-backend-server` logs if it can't reach `codegen-llm-server`). +- **"Container name is in use"**: Stop existing containers (`docker compose down`) or change `container_name` in `compose.yaml`. +- **Resource Issues:** CodeGen models can be memory-intensive. Monitor host RAM usage. Increase Docker resources if needed. -- **Sample Output:** - ```textmate - data: {"id":"cmpl-...", ...} - # ... more data chunks ... - data: [DONE] - ``` -- **Expected Result:** A stream of server-sent events (SSE) containing JSON data with generated code tokens, ending with `data: [DONE]`. +### Cleanup the Deployment -### 4. Validate the Frontend (UI) +To stop the containers associated with the deployment, execute the following command: -- **Service Name:** `codegen-ui-server` -- **How Tested:** Access the UI URL in a web browser and perform a test query. -- **Steps:** See [How to Open the UI](#how-to-open-the-ui). -- **Expected Result:** The UI loads correctly, and submitting a prompt results in generated code displayed on the page. +```bash +docker compose -f compose.yaml down +``` -## How to Open the UI +```bash +[+] Running 0/1 +[+] Running 1/2degen-ui-server Stopping 0.4s +[+] Running 2/3degen-ui-server Removed 10.5s +[+] Running 2/3degen-ui-server Removed 10.5s +[+] Running 3/4degen-ui-server Removed 10.5s +[+] Running 5/5degen-ui-server Removed 10.5s + ✔ Container codegen-ui-server Removed 10.5s + ✔ Container codegen-backend-server Removed 10.4s + ✔ Container codegen-llm-server Removed 10.4s + ✔ Container codegen-tgi-service Removed 8.0s + ✔ Network rocm_default Removed 0.6s +``` -1. Determine the UI access URL using the `EXTERNAL_HOST_IP` and `CODEGEN_UI_SERVICE_PORT` variables defined in your sourced `set_env*.sh` file (use `set_env_vllm.sh` for the default vLLM deployment). The default URL format is: - `http://${EXTERNAL_HOST_IP}:${CODEGEN_UI_SERVICE_PORT}` - (e.g., `http://192.168.1.100:5173`) +### compose.yaml - TGI Deployment -2. Open this URL in your web browser. +The TGI (Text Generation Inference) deployment and the default deployment differ primarily in their service configurations and specific focus on handling large language models (LLMs). The TGI deployment includes a unique `codegen-tgi-service`, which utilizes the `ghcr.io/huggingface/text-generation-inference:2.4.1-rocm` image and is specifically configured to run on AMD hardware. -3. You should see the CodeGen starting page: - ![UI start page](../../../../assets/img/ui-starting-page.png) +| Service Name | Image Name | AMD Use | +| ---------------------- | -------------------------------------------------------- | ------- | +| codegen-backend-server | opea/codegen:latest | no | +| codegen-llm-server | opea/codegen:latest | no | +| codegen-tgi-service | ghcr.io/huggingface/text-generation-inference:2.4.1-rocm | yes | +| codegen-ui-server | opea/codegen-ui:latest | no | -4. Enter a prompt in the input field (e.g., "Write a Python code that returns the current time and date") and press Enter or click the submit button. +### compose_vllm.yaml - vLLM Deployment -5. Verify that the generated code appears correctly: - ![UI result page](../../../../assets/img/ui-result-page.png) +The vLLM deployment utilizes AMD devices primarily for the `vllm-service`, which handles large language model (LLM) tasks. This service is configured to maximize the use of AMD's capabilities, potentially allocating multiple devices to enhance parallel processing and throughput. -## Troubleshooting +| Service Name | Image Name | AMD Use | +| ---------------------- | ---------------------- | ------- | +| codegen-backend-server | opea/codegen:latest | no | +| codegen-llm-server | opea/codegen:latest | no | +| codegen-vllm-service | opea/vllm-rocm:latest | yes | +| codegen-ui-server | opea/codegen-ui:latest | no | -_(No specific troubleshooting steps provided in the original content for this file. Add common issues if known.)_ +## CodeGen Service Configuration -- Check container logs (`docker compose -f logs `), especially for `codegen-vllm-service` or `codegen-tgi-service`. -- Ensure `HUGGINGFACEHUB_API_TOKEN` is correct. -- Verify ROCm drivers and Docker setup for GPU access. -- Confirm network connectivity and proxy settings. -- Ensure `HOST_IP` and `EXTERNAL_HOST_IP` are correctly set and accessible. -- If building locally, ensure build steps completed without error and image tags match compose file. +The table provides a comprehensive overview of the CodeGen services utilized across various deployments as illustrated in the example Docker Compose files. Each row in the table represents a distinct service, detailing its possible images used to enable it and a concise description of its function within the deployment architecture. These services collectively enable functionalities such as data storage and management, text embedding, retrieval, reranking, and large language model processing. -## Stopping the Application +ex.: (From ChatQna) +| Service Name | Possible Image Names | Optional | Description +| redis-vector-db | redis/redis-stack:7.2.0-v9 | No | Acts as a Redis database for storing and managing -### If you use vLLM (Default) +## Conclusion -```bash -# Ensure you are in the correct directory -# cd GenAIExamples/CodeGen/docker_compose/amd/gpu/rocm -docker compose -f compose_vllm.yaml down -``` +In the configuration of the `vllm-service` and the `tgi-service`, two variables play a primary role in determining the service's performance and functionality. The `LLM_MODEL_ID` parameter specifies the particular large language model (LLM) that the service will utilize, effectively determining the capabilities and characteristics of the language processing tasks it can perform. This model identifier ensures that the service is aligned with the specific requirements of the application, whether it involves text generation, comprehension, or other language-related tasks. -### If you use TGI +However, developers need to be aware of the models that have been tested with the respective service image supporting the `vllm-service` and `tgi-service`. For example, documentation for the OPEA GenAIComps v1.0 release specify the list of [validated LLM models](https://github.com/opea-project/GenAIComps/blob/v1.0/comps/llms/text-generation/README.md#validated-llm-models) for each AMD ROCm enabled service image. Specific models may have stringent requirements on the number of AMD ROCm devices required to support them. -```bash -# Ensure you are in the correct directory -# cd GenAIExamples/CodeGen/docker_compose/amd/gpu/rocm -docker compose -f compose.yaml down -``` +This guide should enable developer to deploy the default configuration or any of the other compose yaml files for different configurations. It also highlights the configurable parameters that can be set before deployment. ## Next Steps -- Explore the alternative TGI deployment option if needed. -- Refer to the main [CodeGen README](../../../../README.md) for architecture details and links to other deployment methods (Kubernetes, Xeon). - Consult the [OPEA GenAIComps](https://github.com/opea-project/GenAIComps) repository for details on individual microservices. +- Refer to the main [CodeGen README](../../../../README.md) for links to benchmarking and Kubernetes deployment options. From 1787d1ee9812b9bff786d88173e75e822deea2b9 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Thu, 24 Apr 2025 13:34:41 +0800 Subject: [PATCH 008/217] Update image links. (#1866) Signed-off-by: ZePan110 --- docker_images_list.md | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/docker_images_list.md b/docker_images_list.md index 62a2b1ea5f..401cdc5872 100644 --- a/docker_images_list.md +++ b/docker_images_list.md @@ -29,7 +29,7 @@ Take ChatQnA for example. ChatQnA is a chatbot application service based on the | [opea/edgecraftrag](https://hub.docker.com/r/opea/edgecraftrag) | [Link](https://github.com/opea-project/GenAIExamples/blob/main/EdgeCraftRAG/Dockerfile) | Edge Craft RAG (EC-RAG) gateway. Provides a customizable, production-ready retrieval-enhanced generation system that is optimized for edge solutions. | [Link](https://github.com/opea-project/GenAIExamples/blob/main/EdgeCraftRAG/README.md) | | [opea/edgecraftrag-server](https://hub.docker.com/r/opea/edgecraftrag-server) | [Link](https://github.com/opea-project/GenAIExamples/blob/main/EdgeCraftRAG/Dockerfile.server) | Edge Craft RAG (EC-RAG) server, Provides a customizable, production-ready retrieval-enhanced generation system that is optimized for edge solutions. | [Link](https://github.com/opea-project/GenAIExamples/blob/main/EdgeCraftRAG/README.md) | | [opea/edgecraftrag-ui](https://hub.docker.com/r/opea/edgecraftrag-ui) | [Link](https://github.com/opea-project/GenAIExamples/blob/main/EdgeCraftRAG/ui/docker/Dockerfile.ui) | Edge Craft RAG (EC-RAG) UI entry. Ensuring high-quality, performant interactions tailored for edge environments. | | -| [opea/edgecraftrag-ui-gradio]() | [Link](https://github.com/opea-project/GenAIExamples/blob/main/EdgeCraftRAG/ui/docker/Dockerfile.gradio) | Edge Craft RAG (EC-RAG) Gradio UI entry. Interact with users to provide a customizable, production-ready retrieval-enhanced generation system optimized for edge solutions. | | +| [opea/edgecraftrag-ui-gradio](https://hub.docker.com/r/opea/edgecraftrag-ui-gradio) | [Link](https://github.com/opea-project/GenAIExamples/blob/main/EdgeCraftRAG/ui/docker/Dockerfile.gradio) | Edge Craft RAG (EC-RAG) Gradio UI entry. Interact with users to provide a customizable, production-ready retrieval-enhanced generation system optimized for edge solutions. | | | [opea/graphrag](https://hub.docker.com/r/opea/graphrag) | [Link](https://github.com/opea-project/GenAIExamples/blob/main/GraphRAG/Dockerfile) | GraphRAG gateway, Local and global queries are processed using knowledge graphs extracted from source documents. | [Link](https://github.com/opea-project/GenAIExamples/blob/main/GraphRAG/README.md) | | [opea/graphrag-react-ui](https://hub.docker.com/r/opea/graphrag-react-ui) | [Link](https://github.com/opea-project/GenAIExamples/blob/main/GraphRAG/ui/docker/Dockerfile.react) | Graphrag React UI entry. Facilitates interaction with users, enabling queries and providing relevant answers using knowledge graphs. | [Link](https://github.com/opea-project/GenAIExamples/blob/main/GraphRAG/ui/react/README.md) | | [opea/graphrag-ui](https://hub.docker.com/r/opea/graphrag-ui) | [Link](https://github.com/opea-project/GenAIExamples/blob/main/GraphRAG/ui/docker/Dockerfile) | Graphrag UI entry. Interact with users to facilitate queries and provide relevant answers using knowledge graphs. | [Link](https://github.com/opea-project/GenAIExamples/blob/main/GraphRAG/ui/svelte/README.md) | @@ -54,7 +54,7 @@ Take ChatQnA for example. ChatQnA is a chatbot application service based on the | [opea/animation](https://hub.docker.com/r/opea/animation) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/animation/src/Dockerfile) | OPEA Avatar Animation microservice for GenAI applications | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/animation/src/README.md) | | [opea/asr](https://hub.docker.com/r/opea/asr) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/asr/src/Dockerfile) | OPEA Audio-Speech-Recognition microservice for GenAI applications | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/asr/src/README.md) | | [opea/chathistory-mongo](https://hub.docker.com/r/opea/chathistory-mongo) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/chathistory/src/Dockerfile) | OPEA Chat History microservice is based on a MongoDB database and is designed to allow users to store, retrieve and manage chat conversations. | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/chathistory/src/README.md) | -| [opea/comps-base]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/Dockerfile) | OPEA Microservice base image. | [Link](https://github.com/opea-project/GenAIComps/blob/main/README.md) | +| [opea/comps-base](https://hub.docker.com/r/opea/comps-base) | [Link](https://github.com/opea-project/GenAIComps/blob/main/Dockerfile) | OPEA Microservice base image. | [Link](https://github.com/opea-project/GenAIComps/blob/main/README.md) | | [opea/dataprep](https://hub.docker.com/r/opea/dataprep) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/src/Dockerfile) | OPEA data preparation microservices for GenAI applications | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/README.md) | | [opea/embedding](https://hub.docker.com/r/opea/embedding) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/embeddings/src/Dockerfile) | OPEA mosec embedding microservice for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/embeddings/src/README.md) | | [opea/embedding-multimodal-bridgetower](https://hub.docker.com/r/opea/embedding-multimodal-bridgetower) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/bridgetower/src/Dockerfile) | OPEA multimodal embedded microservices based on bridgetower for use by GenAI applications | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/bridgetower/src/README.md) | @@ -63,7 +63,7 @@ Take ChatQnA for example. ChatQnA is a chatbot application service based on the | [opea/feedbackmanagement-mongo](https://hub.docker.com/r/opea/feedbackmanagement-mongo) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/feedback_management/src/Dockerfile) | OPEA feedback management microservice uses MongoDB database for GenAI applications. | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/feedback_management/src/README.md) | | [opea/finetuning](https://hub.docker.com/r/opea/finetuning) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/finetuning/src/Dockerfile) | OPEA Fine-tuning microservice for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/finetuning/src/README.md) | | [opea/finetuning-gaudi](https://hub.docker.com/r/opea/finetuning-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/finetuning/src/Dockerfile.intel_hpu) | OPEA Fine-tuning microservice for GenAI application use on the Gaudi | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/finetuning/src/README.md) | -| [opea/finetuning-xtune]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/finetuning/src/Dockerfile.xtune) | OPEA Fine-tuning microservice base on Xtune for GenAI application use on the Arc A770 | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/finetuning/src/README.md) | +| [opea/finetuning-xtune](https://hub.docker.com/r/opea/finetuning-xtune) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/finetuning/src/Dockerfile.xtune) | OPEA Fine-tuning microservice base on Xtune for GenAI application use on the Arc A770 | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/finetuning/src/README.md) | | [opea/gpt-sovits](https://hub.docker.com/r/opea/gpt-sovits) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/gpt-sovits/src/Dockerfile) | OPEA GPT-SoVITS service for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/gpt-sovits/src/README.md) | | [opea/guardrails](https://hub.docker.com/r/opea/guardrails) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/guardrails/src/guardrails/Dockerfile) | OPEA guardrail microservice for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/guardrails/src/guardrails/README.md) | | [opea/guardrails-bias-detection](https://hub.docker.com/r/opea/guardrails-bias-detection) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/guardrails/src/bias_detection/Dockerfile) | OPEA guardrail microservice to provide bias detection for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/guardrails/src/bias_detection/README.md) | @@ -76,19 +76,19 @@ Take ChatQnA for example. ChatQnA is a chatbot application service based on the | [opea/image2image-gaudi](https://hub.docker.com/r/opea/image2image-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/image2image/src/Dockerfile.intel_hpu) | OPEA Image-to-Image microservice for GenAI application use on the Gaudi. | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/image2image/src/README.md) | | [opea/image2video](https://hub.docker.com/r/opea/image2video) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/image2video/src/Dockerfile) | OPEA image-to-video microservice for GenAI application. | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/image2video/src/README.md) | | [opea/image2video-gaudi](https://hub.docker.com/r/opea/image2video-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/image2video/src/Dockerfile.intel_hpu) | OPEA image-to-video microservice for GenAI application use on the Gaudi. | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/image2video/src/README.md) | -| [opea/ipex-llm]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/ipex/src/Dockerfile) | OPEA is a Large Language Model (LLM) service based on intel-extension-for-pytorch. It provides specialized optimizations, including technical points like paged attention, ROPE fusion, etc. | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/ipex/src/README.md) | +| [opea/ipex-llm](https://hub.docker.com/r/opea/ipex-llm) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/ipex/src/Dockerfile) | OPEA is a Large Language Model (LLM) service based on intel-extension-for-pytorch. It provides specialized optimizations, including technical points like paged attention, ROPE fusion, etc. | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/ipex/src/README.md) | | [opea/llm-docsum](https://hub.docker.com/r/opea/llm-docsum) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/src/doc-summarization/Dockerfile) | OPEA LLM microservice upon docsum docker image for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/src/doc-summarization/README.md) | | [opea/llm-eval](https://hub.docker.com/r/opea/llm-eval) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/utils/lm-eval/Dockerfile) | OPEA LLM microservice upon eval docker image for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/utils/lm-eval/README.md) | | [opea/llm-faqgen](https://hub.docker.com/r/opea/llm-faqgen) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/src/faq-generation/Dockerfile) | OPEA FAQ Generation Microservice is designed to generate frequently asked questions from document input using the HuggingFace Text Generation Inference (TGI) framework. | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/src/faq-generation/README.md) | | [opea/llm-textgen](https://hub.docker.com/r/opea/llm-textgen) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/src/text-generation/Dockerfile) | OPEA LLM microservice upon textgen docker image for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/src/text-generation/README.md) | | [opea/llm-textgen-gaudi](https://hub.docker.com/r/opea/llm-textgen-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/src/text-generation/Dockerfile.intel_hpu) | OPEA LLM microservice upon textgen docker image for GenAI application use on the Gaudi2 | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/src/text-generation/README.md) | -| [opea/llm-textgen-phi4-gaudi]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/src/text-generation/Dockerfile.intel_hpu_phi4) | OPEA LLM microservice upon textgen docker image for GenAI application use on the Gaudi2 with Phi4 optimization. | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/src/text-generation/README_native.md) | +| [opea/llm-textgen-phi4-gaudi](https://hub.docker.com/r/opea/llm-textgen-phi4-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/src/text-generation/Dockerfile.intel_hpu_phi4) | OPEA LLM microservice upon textgen docker image for GenAI application use on the Gaudi2 with Phi4 optimization. | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/llms/src/text-generation/README_native.md) | | [opea/lvm](https://hub.docker.com/r/opea/lvm) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/lvms/src/Dockerfile) | OPEA large visual model (LVM) microservice for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/lvms/src/README.md) | | [opea/lvm-llama-vision](https://hub.docker.com/r/opea/lvm-llama-vision) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/llama-vision/src/Dockerfile) | OPEA microservice running Llama Vision as a large visualization model (LVM) server for GenAI applications | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/llama-vision/src/README.md) | | [opea/lvm-llama-vision-guard](https://hub.docker.com/r/opea/lvm-llama-vision-guard) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/llama-vision/src/Dockerfile.guard) | OPEA microservice running Llama Vision Guard as a large visualization model (LVM) server for GenAI applications | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/llama-vision/src/README.md) | | [opea/lvm-llama-vision-tp](https://hub.docker.com/r/opea/lvm-llama-vision-tp) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/llama-vision/src/Dockerfile.tp) | OPEA microservice running Llama Vision with DeepSpeed as a large visualization model (LVM) server for GenAI applications | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/llama-vision/src/README.md) | | [opea/lvm-llava](https://hub.docker.com/r/opea/lvm-llava) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/llava/src/Dockerfile) | OPEA microservice running LLaVA as a large visualization model (LVM) server for GenAI applications | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/llava/src/README.md) | -| [opea/lvm-llava-gaudi]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/llava/src/Dockerfile.intel_hpu) | OPEA microservice running LLaVA as a large visualization model (LVM) server for GenAI applications on the Gaudi2 | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/llava/src/README.md) | +| [opea/lvm-llava-gaudi](https://hub.docker.com/r/opea/lvm-llava-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/llava/src/Dockerfile.intel_hpu) | OPEA microservice running LLaVA as a large visualization model (LVM) server for GenAI applications on the Gaudi2 | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/llava/src/README.md) | | [opea/lvm-predictionguard](https://hub.docker.com/r/opea/lvm-predictionguard) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/predictionguard/src/Dockerfile) | OPEA microservice running PredictionGuard as a large visualization model (LVM) server for GenAI applications | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/predictionguard/src/README.md) | | [opea/lvm-video-llama](https://hub.docker.com/r/opea/lvm-video-llama) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/video-llama/src/Dockerfile) | OPEA microservice running Video-Llama as a large visualization model (LVM) server for GenAI applications | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/video-llama/src/README.md) | | [opea/nginx](https://hub.docker.com/r/opea/nginx) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/nginx/src/Dockerfile) | OPEA nginx microservice for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/nginx/deployment/kubernetes/README.md) | @@ -98,20 +98,20 @@ Take ChatQnA for example. ChatQnA is a chatbot application service based on the | [opea/retriever](https://hub.docker.com/r/opea/retriever) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/src/Dockerfile) | OPEA retrieval microservice for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/README.md) | | [opea/speecht5](https://hub.docker.com/r/opea/speecht5) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/speecht5/src/Dockerfile) | OPEA SpeechT5 service for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/tts/src/README.md) | | [opea/speecht5-gaudi](https://hub.docker.com/r/opea/speecht5-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/speecht5/src/Dockerfile.intel_hpu) | OPEA SpeechT5 service on the Gaudi2 for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/tts/src/README.md) | -| [opea/struct2graph]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/struct2graph/src/Dockerfile) | OPEA struct-to-graph service for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/struct2graph/src/README.md) | +| [opea/struct2graph](https://hub.docker.com/r/opea/struct2graph) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/struct2graph/src/Dockerfile) | OPEA struct-to-graph service for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/struct2graph/src/README.md) | | [opea/text2cypher-gaudi]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/text2cypher/src/Dockerfile.intel_hpu) | OPEA Text-to-Cypher microservice for GenAI application use on the Gaudi2. | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/text2cypher/src/README.md) | -| [opea/text2graph]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/text2graph/src/Dockerfile) | OPEA Text-to-Graph microservice for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/text2graph/src/README.md) | +| [opea/text2graph](https://hub.docker.com/r/opea/text2graph) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/text2graph/src/Dockerfile) | OPEA Text-to-Graph microservice for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/text2graph/src/README.md) | | [opea/text2image](https://hub.docker.com/r/opea/text2image) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/text2image/src/Dockerfile) | OPEA text-to-image microservice for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/text2image/src/README.md) | | [opea/text2image-gaudi](https://hub.docker.com/r/opea/text2image-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/text2image/src/Dockerfile.intel_hpu) | OPEA text-to-image microservice for GenAI application use on the Gaudi | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/text2image/src/README.md) | | [opea/text2image-ui](https://hub.docker.com/r/opea/text2image-ui) | [Link](https://github.com/opea-project/GenAIExamples/blob/main/Text2Image/ui/docker/Dockerfile) | OPEA text-to-image microservice UI entry for GenAI application | [Link](https://github.com/opea-project/GenAIExamples/blob/main/Text2Image/README.md) | | [opea/text2sql](https://hub.docker.com/r/opea/text2sql) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/text2sql/src/Dockerfile) | OPEA text to Structured Query Language microservice for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/text2sql/src/README.md) | | [opea/text2sql-react-ui](https://hub.docker.com/r/opea/text2sql-react-ui) | [Link](https://github.com/opea-project/GenAIExamples/blob/main/DBQnA/ui/docker/Dockerfile.react) | OPEA text to Structured Query Language microservice react UI entry for GenAI application | [Link](https://github.com/opea-project/GenAIExamples/blob/main/DBQnA/README.md) | | [opea/tts](https://hub.docker.com/r/opea/tts) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/tts/src/Dockerfile) | OPEA Text-To-Speech microservice for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/tts/src/README.md) | -| [opea/vllm](https://hub.docker.com/r/opea/vllm) | [Link](https://github.com/HabanaAI/vllm-fork/blob/habana_main/Dockerfile.cpu) | Deploying and servicing VLLM models based on VLLM projects | [Link](https://github.com/HabanaAI/vllm-fork/blob/habana_main/README.md) | +| [opea/vllm](https://hub.docker.com/r/opea/vllm) | [Link](https://github.com/vllm-project/vllm/blob/v0.8.3/docker/Dockerfile.cpu) | Deploying and servicing VLLM models based on VLLM projects | [Link](https://github.com/vllm-project/vllm/blob/v0.8.3/README.md) | | [opea/vllm-arc](https://hub.docker.com/r/opea/vllm-arc) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/vllm/src/Dockerfile.intel_gpu) | Deploying and servicing VLLM models on Arc based on VLLM projects | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/vllm/README.md) | | [opea/vllm-gaudi](https://hub.docker.com/r/opea/vllm-gaudi) | [Link](https://github.com/HabanaAI/vllm-fork/blob/v0.6.6.post1%2BGaudi-1.20.0/Dockerfile.hpu) | Deploying and servicing VLLM models on Gaudi2 based on VLLM project | [Link](https://github.com/HabanaAI/vllm-fork/blob/habana_main/README.md) | | [opea/vllm-openvino](https://hub.docker.com/r/opea/vllm-openvino) | [Link](https://github.com/vllm-project/vllm/blob/v0.6.1/Dockerfile.openvino) | VLLM Model for Deploying and Serving Openvino Framework Based on VLLM Project | [Link](https://github.com/vllm-project/vllm/blob/main/README.md) | -| [opea/vllm-rocm]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/vllm/src/Dockerfile.amd_gpu) | Deploying and servicing VLLM models on AMD Rocm based on VLLM project | | +| [opea/vllm-rocm](https://hub.docker.com/r/opea/vllm-rocm) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/vllm/src/Dockerfile.amd_gpu) | Deploying and servicing VLLM models on AMD Rocm based on VLLM project | | | [opea/wav2lip](https://hub.docker.com/r/opea/wav2lip) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/wav2lip/src/Dockerfile) | OPEA Generate lip movements from audio files microservice with Pathway for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/wav2lip/deployment/kubernetes/README.md) | | [opea/wav2lip-gaudi](https://hub.docker.com/r/opea/wav2lip-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/wav2lip/src/Dockerfile.intel_hpu) | OPEA Generate lip movements from audio files microservice with Pathway for GenAI application use on the Gaudi2 | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/wav2lip/deployment/kubernetes/README.md) | | [opea/web-retriever](https://hub.docker.com/r/opea/web-retriever)
| [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/web_retrievers/src/Dockerfile) | OPEA retrieval microservice based on chroma vectordb for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/web_retrievers/src/README.md) | From 13ea13862ac9ecd5b0efd997bb14174ff03da246 Mon Sep 17 00:00:00 2001 From: "chen, suyue" Date: Thu, 24 Apr 2025 13:47:56 +0800 Subject: [PATCH 009/217] Remove proxy in CodeTrans test (#1874) Signed-off-by: chensuyue --- ChatQnA/tests/test_compose_milvus_on_xeon.sh | 1 - CodeTrans/tests/test_compose_on_gaudi.sh | 3 --- CodeTrans/tests/test_compose_on_rocm.sh | 2 -- CodeTrans/tests/test_compose_on_xeon.sh | 2 -- CodeTrans/tests/test_compose_tgi_on_gaudi.sh | 2 -- CodeTrans/tests/test_compose_tgi_on_xeon.sh | 2 -- CodeTrans/tests/test_compose_vllm_on_rocm.sh | 2 -- 7 files changed, 14 deletions(-) diff --git a/ChatQnA/tests/test_compose_milvus_on_xeon.sh b/ChatQnA/tests/test_compose_milvus_on_xeon.sh index 06e0fe1db3..19beb81752 100644 --- a/ChatQnA/tests/test_compose_milvus_on_xeon.sh +++ b/ChatQnA/tests/test_compose_milvus_on_xeon.sh @@ -41,7 +41,6 @@ function build_docker_images() { } function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon/ - export no_proxy=${no_proxy},${ip_address} export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" export RERANK_MODEL_ID="BAAI/bge-reranker-base" export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" diff --git a/CodeTrans/tests/test_compose_on_gaudi.sh b/CodeTrans/tests/test_compose_on_gaudi.sh index 8d4691f849..a9bf0c0f79 100644 --- a/CodeTrans/tests/test_compose_on_gaudi.sh +++ b/CodeTrans/tests/test_compose_on_gaudi.sh @@ -43,9 +43,6 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/hpu/gaudi - - export http_proxy=${http_proxy} - export https_proxy=${http_proxy} export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" export LLM_ENDPOINT="http://${ip_address}:8008" export LLM_COMPONENT_NAME="OpeaTextGenService" diff --git a/CodeTrans/tests/test_compose_on_rocm.sh b/CodeTrans/tests/test_compose_on_rocm.sh index b0cbbd62a1..16b25c78d0 100644 --- a/CodeTrans/tests/test_compose_on_rocm.sh +++ b/CodeTrans/tests/test_compose_on_rocm.sh @@ -42,8 +42,6 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/amd/gpu/rocm/ - export http_proxy=${http_proxy} - export https_proxy=${http_proxy} export CODETRANS_TGI_SERVICE_PORT=8008 export CODETRANS_LLM_SERVICE_PORT=9000 export CODETRANS_LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" diff --git a/CodeTrans/tests/test_compose_on_xeon.sh b/CodeTrans/tests/test_compose_on_xeon.sh index 8b279b2f2e..7b27375682 100644 --- a/CodeTrans/tests/test_compose_on_xeon.sh +++ b/CodeTrans/tests/test_compose_on_xeon.sh @@ -45,8 +45,6 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon/ - export http_proxy=${http_proxy} - export https_proxy=${http_proxy} export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" export LLM_ENDPOINT="http://${ip_address}:8008" export LLM_COMPONENT_NAME="OpeaTextGenService" diff --git a/CodeTrans/tests/test_compose_tgi_on_gaudi.sh b/CodeTrans/tests/test_compose_tgi_on_gaudi.sh index 1c0404d397..c0f5e1e714 100644 --- a/CodeTrans/tests/test_compose_tgi_on_gaudi.sh +++ b/CodeTrans/tests/test_compose_tgi_on_gaudi.sh @@ -41,8 +41,6 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/hpu/gaudi/ - export http_proxy=${http_proxy} - export https_proxy=${http_proxy} export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" export LLM_ENDPOINT="http://${ip_address}:8008" export LLM_COMPONENT_NAME="OpeaTextGenService" diff --git a/CodeTrans/tests/test_compose_tgi_on_xeon.sh b/CodeTrans/tests/test_compose_tgi_on_xeon.sh index 95154c7c9d..be7aec935d 100644 --- a/CodeTrans/tests/test_compose_tgi_on_xeon.sh +++ b/CodeTrans/tests/test_compose_tgi_on_xeon.sh @@ -41,8 +41,6 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon/ - export http_proxy=${http_proxy} - export https_proxy=${http_proxy} export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" export LLM_ENDPOINT="http://${ip_address}:8008" export LLM_COMPONENT_NAME="OpeaTextGenService" diff --git a/CodeTrans/tests/test_compose_vllm_on_rocm.sh b/CodeTrans/tests/test_compose_vllm_on_rocm.sh index 4574da774b..5279336ba4 100644 --- a/CodeTrans/tests/test_compose_vllm_on_rocm.sh +++ b/CodeTrans/tests/test_compose_vllm_on_rocm.sh @@ -40,8 +40,6 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/amd/gpu/rocm/ - export http_proxy=${http_proxy} - export https_proxy=${http_proxy} export HOST_IP=${ip_address} export CODETRANS_VLLM_SERVICE_PORT=8008 export CODETRANS_LLM_SERVICE_PORT=9000 From 1fdab591d9cc42e1b82c589715f0e537eb79259a Mon Sep 17 00:00:00 2001 From: chyundunovDatamonsters Date: Thu, 24 Apr 2025 14:28:57 +0700 Subject: [PATCH 010/217] CodeTrans - refactoring README.md for deploy application on ROCm with Docker Compose (#1875) Signed-off-by: Chingis Yundunov --- .../docker_compose/amd/gpu/rocm/README.md | 187 ++++++++++++------ 1 file changed, 130 insertions(+), 57 deletions(-) diff --git a/CodeTrans/docker_compose/amd/gpu/rocm/README.md b/CodeTrans/docker_compose/amd/gpu/rocm/README.md index 676eb4bd83..9ea891b496 100644 --- a/CodeTrans/docker_compose/amd/gpu/rocm/README.md +++ b/CodeTrans/docker_compose/amd/gpu/rocm/README.md @@ -1,8 +1,10 @@ -# Deploy CodeTrans on AMD GPU (ROCm) +# Deploying CodeTrans on AMD ROCm GPU -This document outlines the single node deployment process for a CodeTrans application utilizing the [GenAIComps](https://github.com/opea-project/GenAIComps.git) microservices on AMD GPU (ROCm) server. The steps include pulling Docker images, container deployment via Docker Compose, and service execution using microservices `llm`. +This document outlines the single node deployment process for a CodeTrans application utilizing the [GenAIComps](https://github.com/opea-project/GenAIComps.git) microservices on Intel Xeon server and AMD GPU. The steps include pulling Docker images, container deployment via Docker Compose, and service execution using microservices `llm`. -# Table of Contents +Note: The default LLM is `Qwen/Qwen2.5-Coder-7B-Instruct`. Before deploying the application, please make sure either you've requested and been granted the access to it on [Huggingface](https://huggingface.co/Qwen/Qwen2.5-Coder-7B-Instruct) or you've downloaded the model locally from [ModelScope](https://www.modelscope.cn/models). + +## Table of Contents 1. [CodeTrans Quick Start Deployment](#codetrans-quick-start-deployment) 2. [CodeTrans Docker Compose Files](#codetrans-docker-compose-files) @@ -11,7 +13,7 @@ This document outlines the single node deployment process for a CodeTrans applic ## CodeTrans Quick Start Deployment -This section describes how to quickly deploy and test the CodeTrans service manually on an AMD GPU (ROCm) processor. The basic steps are: +This section describes how to quickly deploy and test the CodeTrans service manually on an AMD ROCm GPU. The basic steps are: 1. [Access the Code](#access-the-code) 2. [Configure the Deployment Environment](#configure-the-deployment-environment) @@ -22,7 +24,7 @@ This section describes how to quickly deploy and test the CodeTrans service manu ### Access the Code -Clone the GenAIExample repository and access the CodeTrans AMD GPU (ROCm) platform Docker Compose files and supporting scripts: +Clone the GenAIExample repository and access the CodeTrans AMD ROCm GPU platform Docker Compose files and supporting scripts: ```bash git clone https://github.com/opea-project/GenAIExamples.git @@ -37,29 +39,84 @@ git checkout v1.2 ### Configure the Deployment Environment -To set up environment variables for deploying CodeTrans services, set up some parameters specific to the deployment environment and source the `set_env.sh` script in this directory: +To set up environment variables for deploying CodeTrans services, set up some parameters specific to the deployment environment and source the `set_env_*.sh` script in this directory: + +- if used vLLM - set_env_vllm.sh +- if used TGI - set_env.sh + +Set the values of the variables: + +- **HOST_IP, HOST_IP_EXTERNAL** - These variables are used to configure the name/address of the service in the operating system environment for the application services to interact with each other and with the outside world. + + If your server uses only an internal address and is not accessible from the Internet, then the values for these two variables will be the same and the value will be equal to the server's internal name/address. + + If your server uses only an external, Internet-accessible address, then the values for these two variables will be the same and the value will be equal to the server's external name/address. + + If your server is located on an internal network, has an internal address, but is accessible from the Internet via a proxy/firewall/load balancer, then the HOST_IP variable will have a value equal to the internal name/address of the server, and the EXTERNAL_HOST_IP variable will have a value equal to the external name/address of the proxy/firewall/load balancer behind which the server is located. + + We set these values in the file set_env\*\*\*\*.sh + +- **Variables with names like "**\*\*\*\*\*\*\_PORT"\*\* - These variables set the IP port numbers for establishing network connections to the application services. + The values shown in the file set_env.sh or set_env_vllm they are the values used for the development and testing of the application, as well as configured for the environment in which the development is performed. These values must be configured in accordance with the rules of network access to your environment's server, and must not overlap with the IP ports of other applications that are already in use. + +Setting variables in the operating system environment: ```bash -export host_ip="External_Public_IP" # ip address of the node export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" -export http_proxy="Your_HTTP_Proxy" # http proxy if any -export https_proxy="Your_HTTPs_Proxy" # https proxy if any -export no_proxy=localhost,127.0.0.1,$host_ip # additional no proxies if needed -export NGINX_PORT=${your_nginx_port} # your usable port for nginx, 80 for example -source ./set_env.sh +source ./set_env_*.sh # replace the script name with the appropriate one ``` Consult the section on [CodeTrans Service configuration](#codetrans-configuration) for information on how service specific configuration parameters affect deployments. ### Deploy the Services Using Docker Compose -To deploy the CodeTrans services, execute the `docker compose up` command with the appropriate arguments. For a default deployment, execute the command below. It uses the 'compose.yaml' file. +To deploy the CodeTrans services, execute the `docker compose up` command with the appropriate arguments. For a default deployment with TGI, execute the command below. It uses the 'compose.yaml' file. ```bash cd docker_compose/amd/gpu/rocm +# if used TGI docker compose -f compose.yaml up -d +# if used vLLM +# docker compose -f compose_vllm.yaml up -d +``` + +To enable GPU support for AMD GPUs, the following configuration is added to the Docker Compose file: + +- compose_vllm.yaml - for vLLM-based application +- compose.yaml - for TGI-based + +```yaml +shm_size: 1g +devices: + - /dev/kfd:/dev/kfd + - /dev/dri:/dev/dri +cap_add: + - SYS_PTRACE +group_add: + - video +security_opt: + - seccomp:unconfined +``` + +This configuration forwards all available GPUs to the container. To use a specific GPU, specify its `cardN` and `renderN` device IDs. For example: + +```yaml +shm_size: 1g +devices: + - /dev/kfd:/dev/kfd + - /dev/dri/card0:/dev/dri/card0 + - /dev/dri/render128:/dev/dri/render128 +cap_add: + - SYS_PTRACE +group_add: + - video +security_opt: + - seccomp:unconfined ``` +**How to Identify GPU Device IDs:** +Use AMD GPU driver utilities to determine the correct `cardN` and `renderN` IDs for your GPU. + > **Note**: developers should build docker image from source when: > > - Developing off the git main branch (as the container's ports in the repo may be different > from the published docker image). @@ -71,9 +128,11 @@ Please refer to the table below to build different microservices from source: | Microservice | Deployment Guide | | ------------ | -------------------------------------------------------------------------------------------------------------- | | vLLM | [vLLM build guide](https://github.com/opea-project/GenAIComps/tree/main/comps/third_parties/vllm#build-docker) | +| TGI | [TGI project](https://github.com/huggingface/text-generation-inference.git) | | LLM | [LLM build guide](https://github.com/opea-project/GenAIComps/tree/main/comps/llms) | -| MegaService | [MegaService build guide](../../../../README_miscellaneous.md#build-megaservice-docker-image) | -| UI | [Basic UI build guide](../../../../README_miscellaneous.md#build-ui-docker-image) | +| MegaService | [MegaService guide](../../../../README.md) | +| UI | [UI guide](../../../../ui/svelte/README.md) | +| Nginx | [Nginx guide](https://github.com/opea-project/GenAIComps/tree/main/comps/third_parties/nginx) | ### Check the Deployment Status @@ -83,15 +142,26 @@ After running docker compose, check if all the containers launched via docker co docker ps -a ``` -For the default deployment, the following 5 containers should have started: +For the default deployment with TGI, the following 9 containers should have started: ``` -CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES -b3e1388fa2ca opea/nginx:${RELEASE_VERSION} "/usr/local/bin/star…" 32 hours ago Up 2 hours 0.0.0.0:80->80/tcp, :::80->80/tcp codetrans-nginx-server -3b5fa9a722da opea/codetrans-ui:${RELEASE_VERSION} "docker-entrypoint.s…" 32 hours ago Up 2 hours 0.0.0.0:5173->5173/tcp, :::5173->5173/tcp codetrans-ui-server -d3b37f3d1faa opea/codetrans:${RELEASE_VERSION} "python codetrans.py" 32 hours ago Up 2 hours 0.0.0.0:7777->7777/tcp, :::7777->7777/tcp codetrans-backend-server -24cae0db1a70 opea/llm-textgen:${RELEASE_VERSION} "bash entrypoint.sh" 32 hours ago Up 2 hours 0.0.0.0:9000->9000/tcp, :::9000->9000/tcp codetrans-llm-server -b98fa07a4f5c opea/vllm:${RELEASE_VERSION} "python3 -m vllm.ent…" 32 hours ago Up 2 hours 0.0.0.0:9009->80/tcp, :::9009->80/tcp codetrans-tgi-service +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +eaf24161aca8 opea/nginx:latest "/docker-entrypoint.…" 37 seconds ago Up 5 seconds 0.0.0.0:18104->80/tcp, [::]:18104->80/tcp chaqna-nginx-server +2fce48a4c0f4 opea/codetrans-ui:latest "docker-entrypoint.s…" 37 seconds ago Up 5 seconds 0.0.0.0:18101->5173/tcp, [::]:18101->5173/tcp codetrans-ui-server +613c384979f4 opea/codetrans:latest "bash entrypoint.sh" 37 seconds ago Up 5 seconds 0.0.0.0:18102->8888/tcp, [::]:18102->8888/tcp codetrans-backend-server +e0ef1ea67640 opea/llm-textgen:latest "bash entrypoint.sh" 37 seconds ago Up 36 seconds 0.0.0.0:18011->9000/tcp, [::]:18011->9000/tcp codetrans-llm-server +342f01bfdbb2 ghcr.io/huggingface/text-generation-inference:2.3.1-rocm"python3 /workspace/…" 37 seconds ago Up 36 seconds 0.0.0.0:18008->8011/tcp, [::]:18008->8011/tcp codetrans-tgi-service +``` + +if used vLLM: + +``` +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +eaf24161aca8 opea/nginx:latest "/docker-entrypoint.…" 37 seconds ago Up 5 seconds 0.0.0.0:18104->80/tcp, [::]:18104->80/tcp chaqna-nginx-server +2fce48a4c0f4 opea/codetrans-ui:latest "docker-entrypoint.s…" 37 seconds ago Up 5 seconds 0.0.0.0:18101->5173/tcp, [::]:18101->5173/tcp codetrans-ui-server +613c384979f4 opea/codetrans:latest "bash entrypoint.sh" 37 seconds ago Up 5 seconds 0.0.0.0:18102->8888/tcp, [::]:18102->8888/tcp codetrans-backend-server +e0ef1ea67640 opea/llm-textgen:latest "bash entrypoint.sh" 37 seconds ago Up 36 seconds 0.0.0.0:18011->9000/tcp, [::]:18011->9000/tcp codetrans-llm-server +342f01bfdbb2 opea/vllm-rocm:latest "python3 /workspace/…" 37 seconds ago Up 36 seconds 0.0.0.0:18008->8011/tcp, [::]:18008->8011/tcp codetrans-vllm-service ``` If any issues are encountered during deployment, refer to the [Troubleshooting](../../../../README_miscellaneous.md#troubleshooting) section. @@ -109,65 +179,68 @@ curl http://${HOST_IP}:${CODETRANS_BACKEND_SERVICE_PORT}/v1/codetrans \ -d "$DATA" ``` -**Note** : Access the CodeTrans UI by web browser through this URL: `http://${host_ip}:80`. Please confirm the `80` port is opened in the firewall. To validate each microservie used in the pipeline refer to the [Validate Microservices](#validate-microservices) section. +**Note** : Access the CodeTrans UI by web browser through this URL: `http://${HOST_IP_EXTERNAL}:${CODETRANS_NGINX_PORT}` ### Cleanup the Deployment To stop the containers associated with the deployment, execute the following command: ```bash +# if used TGI docker compose -f compose.yaml down +# if used vLLM +# docker compose -f compose_vllm.yaml down ``` ## CodeTrans Docker Compose Files -In the context of deploying a CodeTrans pipeline on an AMD GPU (ROCm) platform, we can pick and choose different large language model serving frameworks. The table below outlines the various configurations that are available as part of the application. These configurations can be used as templates and can be extended to different components available in [GenAIComps](https://github.com/opea-project/GenAIComps.git). +In the context of deploying an ChatQnA pipeline on an Intel® Xeon® platform, we can pick and choose different large language model serving frameworks, or single English TTS/multi-language TTS component. The table below outlines the various configurations that are available as part of the application. These configurations can be used as templates and can be extended to different components available in [GenAIComps](https://github.com/opea-project/GenAIComps.git). -| File | Description | -| ---------------------------------------- | ------------------------------------------------------------------------------------------ | -| [compose.yaml](./compose.yaml) | Default compose file using TGI as serving framework | -| [compose_vllm.yaml](./compose_vllm.yaml) | The LLM serving framework is vLLM. All other configurations remain the same as the default | +| File | Description | +| ---------------------------------------- | ------------------------------------------------------------------------------------- | +| [compose.yaml](./compose.yaml) | The LLM serving framework is TGI. Default compose file using TGI as serving framework | +| [compose_vllm.yaml](./compose_vllm.yaml) | The LLM serving framework is vLLM. Compose file using vllm as serving framework | -## Validate Microservices +## Validate MicroServices -1. LLM backend Service +LLM backend Service - In the first startup, this service will take more time to download, load and warm up the model. After it's finished, the service will be ready. +In the first startup, this service will take more time to download, load and warm up the model. After it's finished, the service will be ready. - Try the command below to check whether the LLM serving is ready. +Try the command below to check whether the LLM serving is ready. - ```bash - # vLLM service - docker logs codetrans-vllm-service 2>&1 | grep complete - # If the service is ready, you will get the response like below. - INFO: Application startup complete. - ``` +```bash +# vLLM service +docker logs codetrans-vllm-service 2>&1 | grep complete +# If the service is ready, you will get the response like below. +INFO: Application startup complete. +``` - ```bash - # TGI service - docker logs codetrans-tgi-service | grep Connected - # If the service is ready, you will get the response like below. - 2024-09-03T02:47:53.402023Z INFO text_generation_router::server: router/src/server.rs:2311: Connected - ``` +```bash +# TGI service +docker logs codetrans-tgi-service | grep Connected +# If the service is ready, you will get the response like below. +2024-09-03T02:47:53.402023Z INFO text_generation_router::server: router/src/server.rs:2311: Connected +``` - Then try the `cURL` command below to validate services. +Then try the `cURL` command below to validate services. - ```bash - # either vLLM or TGI service - # for vllm service - export port=${CODETRANS_VLLM_SERVICE_PORT} - # for tgi service - export port=${CODETRANS_TGI_SERVICE_PORT} - curl http://${HOST_IP}:${port}/v1/chat/completions \ - -X POST \ - -d '{"inputs":" ### System: Please translate the following Golang codes into Python codes. ### Original codes: '\'''\'''\''Golang \npackage main\n\nimport \"fmt\"\nfunc main() {\n fmt.Println(\"Hello, World!\");\n '\'''\'''\'' ### Translated codes:","parameters":{"max_new_tokens":17, "do_sample": true}}' \ - -H 'Content-Type: application/json' - ``` +```bash +# either vLLM or TGI service +# for vllm service +export port=${CODETRANS_VLLM_SERVICE_PORT} +# for tgi service +export port=${CODETRANS_TGI_SERVICE_PORT} +curl http://${HOST_IP}:${port}/v1/chat/completions \ + -X POST \ + -d '{"inputs":" ### System: Please translate the following Golang codes into Python codes. ### Original codes: '\'''\'''\''Golang \npackage main\n\nimport \"fmt\"\nfunc main() {\n fmt.Println(\"Hello, World!\");\n '\'''\'''\'' ### Translated codes:","parameters":{"max_new_tokens":17, "do_sample": true}}' \ + -H 'Content-Type: application/json' +``` 2. LLM Microservice ```bash - curl http://${HOST_IP}:${CODETRANS_LLM_SERVICE_PORT}/v1/chat/completions\ + curl http://${HOST_IP}:${CODETRANS_LLM_SERVICE_PORT}/v1/chat/completions \ -X POST \ -d '{"query":" ### System: Please translate the following Golang codes into Python codes. ### Original codes: '\'''\'''\''Golang \npackage main\n\nimport \"fmt\"\nfunc main() {\n fmt.Println(\"Hello, World!\");\n '\'''\'''\'' ### Translated codes:"}' \ -H 'Content-Type: application/json' From f90a6d2a8e225a8ae1aac00d7373df20cc2d43f9 Mon Sep 17 00:00:00 2001 From: "chen, suyue" Date: Thu, 24 Apr 2025 16:18:44 +0800 Subject: [PATCH 011/217] [CICD enhance] EdgeCraftRAG run CI with latest base image, group logs in GHA outputs. (#1877) Signed-off-by: chensuyue --- AudioQnA/Dockerfile | 1 - AudioQnA/Dockerfile.multilang | 1 - EdgeCraftRAG/Dockerfile | 3 +- .../intel/gpu/arc/compose_vllm.yaml | 2 +- EdgeCraftRAG/docker_image_build/build.yaml | 30 +++++++++---------- EdgeCraftRAG/tests/test_compose_on_arc.sh | 24 ++++++++++++++- .../tests/test_compose_vllm_on_arc.sh | 26 +++++++++++++--- 7 files changed, 62 insertions(+), 25 deletions(-) diff --git a/AudioQnA/Dockerfile b/AudioQnA/Dockerfile index 82d8d3540c..3dd8228c72 100644 --- a/AudioQnA/Dockerfile +++ b/AudioQnA/Dockerfile @@ -3,7 +3,6 @@ ARG IMAGE_REPO=opea ARG BASE_TAG=latest -FROM opea/comps-base:$BASE_TAG FROM $IMAGE_REPO/comps-base:$BASE_TAG COPY ./audioqna.py $HOME/audioqna.py diff --git a/AudioQnA/Dockerfile.multilang b/AudioQnA/Dockerfile.multilang index 8b95507842..6c2d013e4e 100644 --- a/AudioQnA/Dockerfile.multilang +++ b/AudioQnA/Dockerfile.multilang @@ -3,7 +3,6 @@ ARG IMAGE_REPO=opea ARG BASE_TAG=latest -FROM opea/comps-base:$BASE_TAG FROM $IMAGE_REPO/comps-base:$BASE_TAG COPY ./audioqna_multilang.py $HOME/audioqna_multilang.py diff --git a/EdgeCraftRAG/Dockerfile b/EdgeCraftRAG/Dockerfile index c7100d1600..1f770020a5 100644 --- a/EdgeCraftRAG/Dockerfile +++ b/EdgeCraftRAG/Dockerfile @@ -1,8 +1,9 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 +ARG IMAGE_REPO=opea ARG BASE_TAG=latest -FROM opea/comps-base:$BASE_TAG +FROM $IMAGE_REPO/comps-base:$BASE_TAG COPY ./chatqna.py $HOME/chatqna.py diff --git a/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm.yaml b/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm.yaml index cfa14d968c..1204e5f0b2 100644 --- a/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm.yaml +++ b/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm.yaml @@ -63,7 +63,7 @@ services: - ecrag vllm-openvino-server: container_name: vllm-openvino-server - image: opea/vllm-arc:latest + image: ${REGISTRY:-opea}/vllm-arc:${TAG:-latest} ports: - ${VLLM_SERVICE_PORT:-8008}:80 environment: diff --git a/EdgeCraftRAG/docker_image_build/build.yaml b/EdgeCraftRAG/docker_image_build/build.yaml index 9c80955d9e..de4a781d49 100644 --- a/EdgeCraftRAG/docker_image_build/build.yaml +++ b/EdgeCraftRAG/docker_image_build/build.yaml @@ -2,35 +2,33 @@ # SPDX-License-Identifier: Apache-2.0 services: - edgecraftrag-server: + edgecraftrag: build: context: ../ args: + IMAGE_REPO: ${REGISTRY} + BASE_TAG: ${TAG} http_proxy: ${http_proxy} https_proxy: ${https_proxy} + dockerfile: ./Dockerfile + image: ${REGISTRY:-opea}/edgecraftrag:${TAG:-latest} + edgecraftrag-server: + build: dockerfile: ./Dockerfile.server + extends: edgecraftrag image: ${REGISTRY:-opea}/edgecraftrag-server:${TAG:-latest} edgecraftrag-ui: build: - context: ../ - args: - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} dockerfile: ./ui/docker/Dockerfile.ui + extends: edgecraftrag image: ${REGISTRY:-opea}/edgecraftrag-ui:${TAG:-latest} edgecraftrag-ui-gradio: build: - context: ../ - args: - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} dockerfile: ./ui/docker/Dockerfile.gradio + extends: edgecraftrag image: ${REGISTRY:-opea}/edgecraftrag-ui-gradio:${TAG:-latest} - edgecraftrag: + vllm-arc: build: - context: ../ - args: - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} - dockerfile: ./Dockerfile - image: ${REGISTRY:-opea}/edgecraftrag:${TAG:-latest} + context: GenAIComps + dockerfile: comps/third_parties/vllm/src/Dockerfile.intel_gpu + image: ${REGISTRY:-opea}/vllm-arc:${TAG:-latest} diff --git a/EdgeCraftRAG/tests/test_compose_on_arc.sh b/EdgeCraftRAG/tests/test_compose_on_arc.sh index 4d41cdac1a..cdf445b1f4 100755 --- a/EdgeCraftRAG/tests/test_compose_on_arc.sh +++ b/EdgeCraftRAG/tests/test_compose_on_arc.sh @@ -30,8 +30,16 @@ HF_ENDPOINT=https://hf-mirror.com function build_docker_images() { + opea_branch=${opea_branch:-"main"} cd $WORKPATH/docker_image_build + git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s + echo "Build all the images with --no-cache, check docker_image_build.log for details..." + service_list="edgecraftrag edgecraftrag-server edgecraftrag-ui" docker compose -f build.yaml build --no-cache > ${LOG_PATH}/docker_image_build.log docker images && sleep 1s @@ -102,16 +110,30 @@ function stop_docker() { function main() { mkdir -p $LOG_PATH + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services - echo "EC_RAG service started" && sleep 1s + echo "::endgroup::" + echo "::group::validate_rag" validate_rag + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice + echo "::endgroup::" + echo "::group::stop_docker" stop_docker echo y | docker system prune + echo "::endgroup::" } diff --git a/EdgeCraftRAG/tests/test_compose_vllm_on_arc.sh b/EdgeCraftRAG/tests/test_compose_vllm_on_arc.sh index feeba14fb5..4c471c2450 100755 --- a/EdgeCraftRAG/tests/test_compose_vllm_on_arc.sh +++ b/EdgeCraftRAG/tests/test_compose_vllm_on_arc.sh @@ -33,7 +33,14 @@ vLLM_ENDPOINT="http://${HOST_IP}:${VLLM_SERVICE_PORT}" function build_docker_images() { + opea_branch=${opea_branch:-"main"} cd $WORKPATH/docker_image_build + git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s + echo "Build all the images with --no-cache, check docker_image_build.log for details..." docker compose -f build.yaml build --no-cache > ${LOG_PATH}/docker_image_build.log @@ -152,19 +159,30 @@ function stop_docker() { function main() { mkdir -p "$LOG_PATH" + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi - start_time=$(date +%s) + echo "::endgroup::" + + echo "::group::start_services" start_services - end_time=$(date +%s) - duration=$((end_time-start_time)) - echo "EC_RAG service start duration is $duration s" && sleep 1s + echo "::endgroup::" + echo "::group::validate_rag" validate_rag + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice + echo "::endgroup::" + echo "::group::stop_docker" stop_docker echo y | docker system prune + echo "::endgroup::" } From bb7a675665e7d4f4f14bb5f0589846efafa11897 Mon Sep 17 00:00:00 2001 From: chyundunovDatamonsters Date: Fri, 25 Apr 2025 07:52:24 +0700 Subject: [PATCH 012/217] ChatQnA - refactoring README.md for deploy application on ROCm (#1857) Signed-off-by: Chingis Yundunov Signed-off-by: Chingis Yundunov Co-authored-by: Chingis Yundunov Co-authored-by: Artem Astafev Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- ChatQnA/docker_compose/amd/gpu/rocm/README.md | 700 ++++++------------ .../docker_compose/amd/gpu/rocm/compose.yaml | 2 +- .../amd/gpu/rocm/compose_faqgen.yaml | 2 +- .../amd/gpu/rocm/compose_faqgen_vllm.yaml | 2 +- 4 files changed, 209 insertions(+), 497 deletions(-) diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/README.md b/ChatQnA/docker_compose/amd/gpu/rocm/README.md index 0edcf44141..4d968b84eb 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/README.md +++ b/ChatQnA/docker_compose/amd/gpu/rocm/README.md @@ -1,163 +1,90 @@ -# Build and Deploy ChatQnA Application on AMD GPU (ROCm) +# Deploying ChatQnA on AMD ROCm GPU -## Build Docker Images +This document outlines the single node deployment process for a ChatQnA application utilizing the [GenAIComps](https://github.com/opea-project/GenAIComps.git) microservices on Intel Xeon server and AMD GPU. The steps include pulling Docker images, container deployment via Docker Compose, and service execution using microservices `llm`. -### 1. Build Docker Image +Note: The default LLM is `meta-llama/Meta-Llama-3-8B-Instruct`. Before deploying the application, please make sure either you've requested and been granted the access to it on [Huggingface](https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct) or you've downloaded the model locally from [ModelScope](https://www.modelscope.cn/models). -- #### Create application install directory and go to it: +## Table of Contents - ```bash - mkdir ~/chatqna-install && cd chatqna-install - ``` +1. [ChatQnA Quick Start Deployment](#chatqna-quick-start-deployment) +2. [ChatQnA Docker Compose Files](#chatqna-docker-compose-files) +3. [Validate Microservices](#validate-microservices) +4. [Conclusion](#conclusion) -- #### Clone the repository GenAIExamples (the default repository branch "main" is used here): +## ChatQnA Quick Start Deployment - ```bash - git clone https://github.com/opea-project/GenAIExamples.git - ``` +This section describes how to quickly deploy and test the ChatQnA service manually on an AMD ROCm GPU. The basic steps are: - If you need to use a specific branch/tag of the GenAIExamples repository, then (v1.3 replace with its own value): +1. [Access the Code](#access-the-code) +2. [Configure the Deployment Environment](#configure-the-deployment-environment) +3. [Deploy the Services Using Docker Compose](#deploy-the-services-using-docker-compose) +4. [Check the Deployment Status](#check-the-deployment-status) +5. [Validate the Pipeline](#validate-the-pipeline) +6. [Cleanup the Deployment](#cleanup-the-deployment) - ```bash - git clone https://github.com/opea-project/GenAIExamples.git && cd GenAIExamples && git checkout v1.3 - ``` +### Access the Code - We remind you that when using a specific version of the code, you need to use the README from this version: +Clone the GenAIExample repository and access the ChatQnA AMD ROCm GPU platform Docker Compose files and supporting scripts: -- #### Go to build directory: - - ```bash - cd ~/chatqna-install/GenAIExamples/ChatQnA/docker_image_build - ``` - -- Cleaning up the GenAIComps repository if it was previously cloned in this directory. - This is necessary if the build was performed earlier and the GenAIComps folder exists and is not empty: - - ```bash - echo Y | rm -R GenAIComps - ``` - -- #### Clone the repository GenAIComps (the default repository branch "main" is used here): - - ```bash - git clone https://github.com/opea-project/GenAIComps.git - ``` - - If you use a specific tag of the GenAIExamples repository, - then you should also use the corresponding tag for GenAIComps. (v1.3 replace with its own value): - - ```bash - git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout v1.3 - ``` - - We remind you that when using a specific version of the code, you need to use the README from this version. - -- #### Setting the list of images for the build (from the build file.yaml) - - If you want to deploy a vLLM-based or TGI-based application, then the set of services is installed as follows: - - #### vLLM-based application - - ```bash - service_list="dataprep retriever vllm-rocm chatqna chatqna-ui nginx" - ``` - - #### vLLM-based application with FaqGen - - ```bash - service_list="dataprep retriever vllm-rocm llm-faqgen chatqna chatqna-ui nginx" - ``` - - #### TGI-based application - - ```bash - service_list="dataprep retriever chatqna chatqna-ui nginx" - ``` - - #### TGI-based application with FaqGen - - ```bash - service_list="dataprep retriever llm-faqgen chatqna chatqna-ui nginx" - ``` - -- #### Pull Docker Images - - ```bash - docker pull redis/redis-stack:7.2.0-v9 - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 - ``` +```bash +git clone https://github.com/opea-project/GenAIExamples.git +cd GenAIExamples/ChatQnA +``` -- #### Optional. Pull TGI Docker Image (Do this if you want to use TGI) +Then checkout a released version, such as v1.3: - ```bash - docker pull ghcr.io/huggingface/text-generation-inference:2.3.1-rocm - ``` +```bash +git checkout v1.3 +``` -- #### Build Docker Images +### Configure the Deployment Environment - ```bash - docker compose -f build.yaml build ${service_list} --no-cache - ``` +To set up environment variables for deploying ChatQnA services, set up some parameters specific to the deployment environment and source the `set_env_*.sh` script in this directory: - After the build, we check the list of images with the command: +- if used vLLM - set_env_vllm.sh +- if used vLLM with FaqGen - set_env_faqgen_vllm.sh +- if used TGI - set_env.sh +- if used TGI with FaqGen - set_env_faqgen.sh - ```bash - docker image ls - ``` +Set the values of the variables: - The list of images should include: +- **HOST_IP, HOST_IP_EXTERNAL** - These variables are used to configure the name/address of the service in the operating system environment for the application services to interact with each other and with the outside world. - ##### vLLM-based application: + If your server uses only an internal address and is not accessible from the Internet, then the values for these two variables will be the same and the value will be equal to the server's internal name/address. - - redis/redis-stack:7.2.0-v9 - - opea/dataprep:latest - - ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 - - opea/retriever:latest - - opea/vllm-rocm:latest - - opea/chatqna:latest - - opea/chatqna-ui:latest - - opea/nginx:latest + If your server uses only an external, Internet-accessible address, then the values for these two variables will be the same and the value will be equal to the server's external name/address. - ##### vLLM-based application with FaqGen: + If your server is located on an internal network, has an internal address, but is accessible from the Internet via a proxy/firewall/load balancer, then the HOST_IP variable will have a value equal to the internal name/address of the server, and the EXTERNAL_HOST_IP variable will have a value equal to the external name/address of the proxy/firewall/load balancer behind which the server is located. - - redis/redis-stack:7.2.0-v9 - - opea/dataprep:latest - - ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 - - opea/retriever:latest - - opea/vllm-rocm:latest - - opea/llm-faqgen:latest - - opea/chatqna:latest - - opea/chatqna-ui:latest - - opea/nginx:latest + We set these values in the file set_env\*\*\*\*.sh - ##### TGI-based application: +- **Variables with names like "**\*\*\*\*\*\*\_PORT"\*\* - These variables set the IP port numbers for establishing network connections to the application services. + The values shown in the file set_env.sh or set_env_vllm.sh they are the values used for the development and testing of the application, as well as configured for the environment in which the development is performed. These values must be configured in accordance with the rules of network access to your environment's server, and must not overlap with the IP ports of other applications that are already in use. - - redis/redis-stack:7.2.0-v9 - - opea/dataprep:latest - - ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 - - opea/retriever:latest - - ghcr.io/huggingface/text-generation-inference:2.3.1-rocm - - opea/chatqna:latest - - opea/chatqna-ui:latest - - opea/nginx:latest +Setting variables in the operating system environment: - ##### TGI-based application with FaqGen: +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +source ./set_env_*.sh # replace the script name with the appropriate one +``` - - redis/redis-stack:7.2.0-v9 - - opea/dataprep:latest - - ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 - - opea/retriever:latest - - ghcr.io/huggingface/text-generation-inference:2.3.1-rocm - - opea/llm-faqgen:latest - - opea/chatqna:latest - - opea/chatqna-ui:latest - - opea/nginx:latest +Consult the section on [ChatQnA Service configuration](#chatqna-configuration) for information on how service specific configuration parameters affect deployments. ---- +### Deploy the Services Using Docker Compose -## Deploy the ChatQnA Application +To deploy the ChatQnA services, execute the `docker compose up` command with the appropriate arguments. For a default deployment with TGI, execute the command below. It uses the 'compose.yaml' file. -### Docker Compose Configuration for AMD GPUs +```bash +cd docker_compose/amd/gpu/rocm +# if used TGI +docker compose -f compose.yaml up -d +# if used TGI with FaqGen +# docker compose -f compose_faqgen.yaml up -d +# if used vLLM +# docker compose -f compose_vllm.yaml up -d +# if used vLLM with FaqGen +# docker compose -f compose_faqgen_vllm.yaml up -d +``` To enable GPU support for AMD GPUs, the following configuration is added to the Docker Compose file: @@ -198,424 +125,209 @@ security_opt: **How to Identify GPU Device IDs:** Use AMD GPU driver utilities to determine the correct `cardN` and `renderN` IDs for your GPU. -### Set deploy environment variables - -#### Setting variables in the operating system environment: - -##### Set variable HUGGINGFACEHUB_API_TOKEN: - -```bash -### Replace the string 'your_huggingfacehub_token' with your HuggingFacehub repository access token. -export HUGGINGFACEHUB_API_TOKEN='your_huggingfacehub_token' -``` - -#### Set variables value in set_env\*\*\*\*.sh file: - -Go to Docker Compose directory: - -```bash -cd ~/chatqna-install/GenAIExamples/ChatQnA/docker_compose/amd/gpu/rocm -``` - -The example uses the Nano text editor. You can use any convenient text editor: - -#### If you use vLLM based application - -```bash -nano set_env_vllm.sh -``` - -#### If you use vLLM based application with FaqGen - -```bash -nano set_env_vllm_faqgen.sh -``` - -#### If you use TGI based application - -```bash -nano set_env.sh -``` - -#### If you use TGI based application with FaqGen - -```bash -nano set_env_faqgen.sh -``` - -If you are in a proxy environment, also set the proxy-related environment variables: - -```bash -export http_proxy="Your_HTTP_Proxy" -export https_proxy="Your_HTTPs_Proxy" -``` - -Set the values of the variables: - -- **HOST_IP, HOST_IP_EXTERNAL** - These variables are used to configure the name/address of the service in the operating system environment for the application services to interact with each other and with the outside world. - - If your server uses only an internal address and is not accessible from the Internet, then the values for these two variables will be the same and the value will be equal to the server's internal name/address. - - If your server uses only an external, Internet-accessible address, then the values for these two variables will be the same and the value will be equal to the server's external name/address. - - If your server is located on an internal network, has an internal address, but is accessible from the Internet via a proxy/firewall/load balancer, then the HOST_IP variable will have a value equal to the internal name/address of the server, and the EXTERNAL_HOST_IP variable will have a value equal to the external name/address of the proxy/firewall/load balancer behind which the server is located. +> **Note**: developers should build docker image from source when: +> +> - Developing off the git main branch (as the container's ports in the repo may be different > from the published docker image). +> - Unable to download the docker image. +> - Use a specific version of Docker image. - We set these values in the file set_env\*\*\*\*.sh +Please refer to the table below to build different microservices from source: -- **Variables with names like "**\*\*\*\*\*\*\_PORT"\*\* - These variables set the IP port numbers for establishing network connections to the application services. - The values shown in the file set_env.sh or set_env_vllm they are the values used for the development and testing of the application, as well as configured for the environment in which the development is performed. These values must be configured in accordance with the rules of network access to your environment's server, and must not overlap with the IP ports of other applications that are already in use. +| Microservice | Deployment Guide | +| --------------- | ------------------------------------------------------------------------------------------------------------------ | +| vLLM | [vLLM build guide](https://github.com/opea-project/GenAIComps/tree/main/comps/third_parties/vllm#build-docker) | +| TGI | [TGI project](https://github.com/huggingface/text-generation-inference.git) | +| LLM | [LLM build guide](https://github.com/opea-project/GenAIComps/tree/main/comps/llms) | +| Redis Vector DB | [Redis](https://github.com/redis/redis.git) | +| Dataprep | [Dataprep build guide](https://github.com/opea-project/GenAIComps/tree/main/comps/dataprep/src/README_redis.md) | +| TEI Embedding | [TEI guide](https://github.com/huggingface/text-embeddings-inference.git) | +| Retriever | [Retriever build guide](https://github.com/opea-project/GenAIComps/tree/main/comps/retrievers/src/README_redis.md) | +| TEI Reranking | [TEI guide](https://github.com/huggingface/text-embeddings-inference.git) | +| MegaService | [MegaService guide](../../../../README.md) | +| UI | [UI guide](../../../../ui/react/README.md) | +| Nginx | [Nginx guide](https://github.com/opea-project/GenAIComps/tree/main/comps/third_parties/nginx) | -#### Set variables with script set_env\*\*\*\*.sh +### Check the Deployment Status -#### If you use vLLM based application +After running docker compose, check if all the containers launched via docker compose have started: ```bash -. set_env_vllm.sh +docker ps -a ``` -#### If you use vLLM based application with FaqGen +For the default deployment with TGI, the following 9 containers should have started: -```bash -. set_env_faqgen_vllm.sh ``` - -#### If you use TGI based application - -```bash -. set_env.sh +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +eaf24161aca8 opea/nginx:latest "/docker-entrypoint.…" 37 seconds ago Up 5 seconds 0.0.0.0:18104->80/tcp, [::]:18104->80/tcp chatqna-nginx-server +2fce48a4c0f4 opea/chatqna-ui:latest "docker-entrypoint.s…" 37 seconds ago Up 5 seconds 0.0.0.0:18101->5173/tcp, [::]:18101->5173/tcp chatqna-ui-server +613c384979f4 opea/chatqna:latest "bash entrypoint.sh" 37 seconds ago Up 5 seconds 0.0.0.0:18102->8888/tcp, [::]:18102->8888/tcp chatqna-backend-server +05512bd29fee opea/dataprep:latest "sh -c 'python $( [ …" 37 seconds ago Up 36 seconds (healthy) 0.0.0.0:18103->5000/tcp, [::]:18103->5000/tcp chatqna-dataprep-service +49844d339d1d opea/retriever:latest "python opea_retriev…" 37 seconds ago Up 36 seconds 0.0.0.0:7000->7000/tcp, [::]:7000->7000/tcp chatqna-retriever +75b698fe7de0 ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 "text-embeddings-rou…" 37 seconds ago Up 36 seconds 0.0.0.0:18808->80/tcp, [::]:18808->80/tcp chatqna-tei-reranking-service +342f01bfdbb2 ghcr.io/huggingface/text-generation-inference:2.3.1-rocm"python3 /workspace/…" 37 seconds ago Up 36 seconds 0.0.0.0:18008->8011/tcp, [::]:18008->8011/tcp chatqna-tgi-service +6081eb1c119d redis/redis-stack:7.2.0-v9 "/entrypoint.sh" 37 seconds ago Up 36 seconds 0.0.0.0:6379->6379/tcp, [::]:6379->6379/tcp, 0.0.0.0:8001->8001/tcp, [::]:8001->8001/tcp chatqna-redis-vector-db +eded17420782 ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 "text-embeddings-rou…" 37 seconds ago Up 36 seconds 0.0.0.0:18090->80/tcp, [::]:18090->80/tcp chatqna-tei-embedding-service ``` -#### If you use TGI based application with FaqGen +if used TGI with FaqGen: -```bash -. set_env_faqgen.sh ``` - -### Start the services: - -#### If you use vLLM based application - -```bash -docker compose -f compose_vllm.yaml up -d -``` - -#### If you use vLLM based application with FaqGen - -```bash -docker compose -f compose_faqgen_vllm.yaml up -d -``` - -#### If you use TGI based application - -```bash -docker compose -f compose.yaml up -d +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +eaf24161aca8 opea/nginx:latest "/docker-entrypoint.…" 37 seconds ago Up 5 seconds 0.0.0.0:18104->80/tcp, [::]:18104->80/tcp chatqna-nginx-server +2fce48a4c0f4 opea/chatqna-ui:latest "docker-entrypoint.s…" 37 seconds ago Up 5 seconds 0.0.0.0:18101->5173/tcp, [::]:18101->5173/tcp chatqna-ui-server +613c384979f4 opea/chatqna:latest "bash entrypoint.sh" 37 seconds ago Up 5 seconds 0.0.0.0:18102->8888/tcp, [::]:18102->8888/tcp chatqna-backend-server +e0ef1ea67640 opea/llm-faqgen:latest "bash entrypoint.sh" 37 seconds ago Up 36 seconds 0.0.0.0:18011->9000/tcp, [::]:18011->9000/tcp chatqna-llm-faqgen +05512bd29fee opea/dataprep:latest "sh -c 'python $( [ …" 37 seconds ago Up 36 seconds (healthy) 0.0.0.0:18103->5000/tcp, [::]:18103->5000/tcp chatqna-dataprep-service +49844d339d1d opea/retriever:latest "python opea_retriev…" 37 seconds ago Up 36 seconds 0.0.0.0:7000->7000/tcp, [::]:7000->7000/tcp chatqna-retriever +75b698fe7de0 ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 "text-embeddings-rou…" 37 seconds ago Up 36 seconds 0.0.0.0:18808->80/tcp, [::]:18808->80/tcp chatqna-tei-reranking-service +342f01bfdbb2 ghcr.io/huggingface/text-generation-inference:2.3.1-rocm"python3 /workspace/…" 37 seconds ago Up 36 seconds 0.0.0.0:18008->8011/tcp, [::]:18008->8011/tcp chatqna-tgi-service +6081eb1c119d redis/redis-stack:7.2.0-v9 "/entrypoint.sh" 37 seconds ago Up 36 seconds 0.0.0.0:6379->6379/tcp, [::]:6379->6379/tcp, 0.0.0.0:8001->8001/tcp, [::]:8001->8001/tcp chatqna-redis-vector-db +eded17420782 ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 "text-embeddings-rou…" 37 seconds ago Up 36 seconds 0.0.0.0:18090->80/tcp, [::]:18090->80/tcp chatqna-tei-embedding-service ``` -#### If you use TGI based application with FaqGen +if used vLLM: -```bash -docker compose -f compose_faqgen.yaml up -d ``` - -All containers should be running and should not restart: - -##### If you use vLLM based application: - -- chatqna-redis-vector-db -- chatqna-dataprep-service -- chatqna-tei-embedding-service -- chatqna-retriever -- chatqna-tei-reranking-service -- chatqna-vllm-service -- chatqna-backend-server -- chatqna-ui-server -- chatqna-nginx-server - -##### If you use vLLM based application with FaqGen: - -- chatqna-redis-vector-db -- chatqna-dataprep-service -- chatqna-tei-embedding-service -- chatqna-retriever -- chatqna-tei-reranking-service -- chatqna-vllm-service -- chatqna-llm-faqgen -- chatqna-backend-server -- chatqna-ui-server -- chatqna-nginx-server - -##### If you use TGI based application: - -- chatqna-redis-vector-db -- chatqna-dataprep-service -- chatqna-tei-embedding-service -- chatqna-retriever -- chatqna-tei-reranking-service -- chatqna-tgi-service -- chatqna-backend-server -- chatqna-ui-server -- chaqna-nginx-server - -##### If you use TGI based application with FaqGen: - -- chatqna-redis-vector-db -- chatqna-dataprep-service -- chatqna-tei-embedding-service -- chatqna-retriever -- chatqna-tei-reranking-service -- chatqna-tgi-service -- chatqna-llm-faqgen -- chatqna-backend-server -- chatqna-ui-server -- chaqna-nginx-server - ---- - -## Validate the Services - -### 1. Validate TEI Embedding Service - -```bash -curl http://${HOST_IP}:${CHATQNA_TEI_EMBEDDING_PORT}/embed \ - -X POST \ - -d '{"inputs":"What is Deep Learning?"}' \ - -H 'Content-Type: application/json' +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +eaf24161aca8 opea/nginx:latest "/docker-entrypoint.…" 37 seconds ago Up 5 seconds 0.0.0.0:18104->80/tcp, [::]:18104->80/tcp chatqna-nginx-server +2fce48a4c0f4 opea/chatqna-ui:latest "docker-entrypoint.s…" 37 seconds ago Up 5 seconds 0.0.0.0:18101->5173/tcp, [::]:18101->5173/tcp chatqna-ui-server +613c384979f4 opea/chatqna:latest "bash entrypoint.sh" 37 seconds ago Up 5 seconds 0.0.0.0:18102->8888/tcp, [::]:18102->8888/tcp chatqna-backend-server +05512bd29fee opea/dataprep:latest "sh -c 'python $( [ …" 37 seconds ago Up 36 seconds (healthy) 0.0.0.0:18103->5000/tcp, [::]:18103->5000/tcp chatqna-dataprep-service +49844d339d1d opea/retriever:latest "python opea_retriev…" 37 seconds ago Up 36 seconds 0.0.0.0:7000->7000/tcp, [::]:7000->7000/tcp chatqna-retriever +75b698fe7de0 ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 "text-embeddings-rou…" 37 seconds ago Up 36 seconds 0.0.0.0:18808->80/tcp, [::]:18808->80/tcp chatqna-tei-reranking-service +342f01bfdbb2 opea/vllm-rocm:latest "python3 /workspace/…" 37 seconds ago Up 36 seconds 0.0.0.0:18008->8011/tcp, [::]:18008->8011/tcp chatqna-vllm-service +6081eb1c119d redis/redis-stack:7.2.0-v9 "/entrypoint.sh" 37 seconds ago Up 36 seconds 0.0.0.0:6379->6379/tcp, [::]:6379->6379/tcp, 0.0.0.0:8001->8001/tcp, [::]:8001->8001/tcp chatqna-redis-vector-db +eded17420782 ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 "text-embeddings-rou…" 37 seconds ago Up 36 seconds 0.0.0.0:18090->80/tcp, [::]:18090->80/tcp chatqna-tei-embedding-service ``` -Checking the response from the service. The response should be similar to text: +if used vLLM with FaqGen: -```textmate -[[0.00037115702,-0.06356819,0.0024758505,..................,0.022725677,0.016026087,-0.02125421,-0.02984927,-0.0049473033]] ``` - -If the service response has a meaningful response in the value, -then we consider the TEI Embedding Service to be successfully launched - -### 2. Validate Retriever Microservice - -```bash -export your_embedding=$(python3 -c "import random; embedding = [random.uniform(-1, 1) for _ in range(768)]; print(embedding)") -curl http://${HOST_IP}:${CHATQNA_REDIS_RETRIEVER_PORT}/v1/retrieval \ - -X POST \ - -d "{\"text\":\"test\",\"embedding\":${your_embedding}}" \ - -H 'Content-Type: application/json' +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +eaf24161aca8 opea/nginx:latest "/docker-entrypoint.…" 37 seconds ago Up 5 seconds 0.0.0.0:18104->80/tcp, [::]:18104->80/tcp chatqna-nginx-server +2fce48a4c0f4 opea/chatqna-ui:latest "docker-entrypoint.s…" 37 seconds ago Up 5 seconds 0.0.0.0:18101->5173/tcp, [::]:18101->5173/tcp chatqna-ui-server +613c384979f4 opea/chatqna:latest "bash entrypoint.sh" 37 seconds ago Up 5 seconds 0.0.0.0:18102->8888/tcp, [::]:18102->8888/tcp chatqna-backend-server +e0ef1ea67640 opea/llm-faqgen:latest "bash entrypoint.sh" 37 seconds ago Up 36 seconds 0.0.0.0:18011->9000/tcp, [::]:18011->9000/tcp chatqna-llm-faqgen +05512bd29fee opea/dataprep:latest "sh -c 'python $( [ …" 37 seconds ago Up 36 seconds (healthy) 0.0.0.0:18103->5000/tcp, [::]:18103->5000/tcp chatqna-dataprep-service +49844d339d1d opea/retriever:latest "python opea_retriev…" 37 seconds ago Up 36 seconds 0.0.0.0:7000->7000/tcp, [::]:7000->7000/tcp chatqna-retriever +75b698fe7de0 ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 "text-embeddings-rou…" 37 seconds ago Up 36 seconds 0.0.0.0:18808->80/tcp, [::]:18808->80/tcp chatqna-tei-reranking-service +342f01bfdbb2 opea/vllm-rocm:latest "python3 /workspace/…" 37 seconds ago Up 36 seconds 0.0.0.0:18008->8011/tcp, [::]:18008->8011/tcp chatqna-vllm-service +6081eb1c119d redis/redis-stack:7.2.0-v9 "/entrypoint.sh" 37 seconds ago Up 36 seconds 0.0.0.0:6379->6379/tcp, [::]:6379->6379/tcp, 0.0.0.0:8001->8001/tcp, [::]:8001->8001/tcp chatqna-redis-vector-db +eded17420782 ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 "text-embeddings-rou…" 37 seconds ago Up 36 seconds 0.0.0.0:18090->80/tcp, [::]:18090->80/tcp chatqna-tei-embedding-service ``` -Checking the response from the service. The response should be similar to JSON: - -```json -{ "id": "e191846168aed1f80b2ea12df80844d2", "retrieved_docs": [], "initial_query": "test", "top_n": 1, "metadata": [] } -``` +If any issues are encountered during deployment, refer to the [Troubleshooting](../../../../README_miscellaneous.md#troubleshooting) section. -If the response corresponds to the form of the provided JSON, then we consider the -Retriever Microservice verification successful. +### Validate the Pipeline -### 3. Validate TEI Reranking Service +Once the ChatQnA services are running, test the pipeline using the following command: ```bash -curl http://${HOST_IP}:${CHATQNA_TEI_RERANKING_PORT}/rerank \ - -X POST \ - -d '{"query":"What is Deep Learning?", "texts": ["Deep Learning is not...", "Deep learning is..."]}' \ - -H 'Content-Type: application/json' -``` - -Checking the response from the service. The response should be similar to JSON: - -```json -[ - { "index": 1, "score": 0.94238955 }, - { "index": 0, "score": 0.120219156 } -] -``` - -If the response corresponds to the form of the provided JSON, then we consider the TEI Reranking Service -verification successful. - -### 4. Validate the vLLM/TGI Service - -#### If you use vLLM: - -```bash -DATA='{"model": "meta-llama/Meta-Llama-3-8B-Instruct", '\ -'"messages": [{"role": "user", "content": "What is a Deep Learning?"}], "max_tokens": 64}' - -curl http://${HOST_IP}:${CHATQNA_VLLM_SERVICE_PORT}/v1/chat/completions \ - -X POST \ - -d "$DATA" \ - -H 'Content-Type: application/json' -``` - -Checking the response from the service. The response should be similar to JSON: - -```json -{ - "id": "chatcmpl-91003647d1c7469a89e399958f390f67", - "object": "chat.completion", - "created": 1742877228, - "model": "meta-llama/Meta-Llama-3-8B-Instruct", - "choices": [ - { - "index": 0, - "message": { - "role": "assistant", - "content": "Deep Learning ( DL) is a subfield of Machine Learning (ML) that focuses on the design of algorithms and architectures inspired by the structure and function of the human brain. These algorithms are designed to analyze and interpret data that is presented in the form of patterns or signals, and they often mimic the way the human brain", - "tool_calls": [] - }, - "logprobs": null, - "finish_reason": "length", - "stop_reason": null - } - ], - "usage": { "prompt_tokens": 16, "total_tokens": 80, "completion_tokens": 64, "prompt_tokens_details": null }, - "prompt_logprobs": null -} -``` - -If the service response has a meaningful response in the value of the "choices.message.content" key, -then we consider the vLLM service to be successfully launched - -#### If you use TGI: - -```bash -DATA='{"inputs":"What is a Deep Learning?",'\ -'"parameters":{"max_new_tokens":64,"do_sample": true}}' - -curl http://${HOST_IP}:${CHATQNA_TGI_SERVICE_PORT}/generate \ - -X POST \ - -d "$DATA" \ - -H 'Content-Type: application/json' +curl http://${HOST_IP}:${CHATQNA_BACKEND_SERVICE_PORT}/v1/chatqna \ + -H "Content-Type: application/json" \ + -d '{"messages": "What is the revenue of Nike in 2023?"}' ``` -Checking the response from the service. The response should be similar to JSON: +**Note** : Access the ChatQnA UI by web browser through this URL: `http://${HOST_IP_EXTERNAL}:${CHATQNA_NGINX_PORT}` -```json -{ - "generated_text": " What is its application in Computer Vision?\nWhat is a Deep Learning?\nDeep learning is a subfield of machine learning that involves the use of artificial neural networks to model high-level abstractions in data. It involves the use of deep neural networks, which are composed of multiple layers, to learn complex patterns in data. The" -} -``` - -If the service response has a meaningful response in the value of the "generated_text" key, -then we consider the TGI service to be successfully launched +### Cleanup the Deployment -### 5. Validate the LLM Service (if your used application with FaqGen) +To stop the containers associated with the deployment, execute the following command: ```bash -DATA='{"messages":"Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source '\ -'text embeddings and sequence classification models. TEI enables high-performance extraction for the most '\ -'popular models, including FlagEmbedding, Ember, GTE and E5.","max_tokens": 128}' - -curl http://${HOST_IP}:${CHATQNA_LLM_FAQGEN_PORT}/v1/faqgen \ - -X POST \ - -d "$DATA" \ - -H 'Content-Type: application/json' -``` - -Checking the response from the service. The response should be similar to JSON: - -```json -{ - "id": "58f0632f5f03af31471b895b0d0d397b", - "text": " Q: What is Text Embeddings Inference (TEI)?\n A: TEI is a toolkit for deploying and serving open source text embeddings and sequence classification models.\n\n Q: What models does TEI support?\n A: TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5.\n\n Q: What is the purpose of TEI?\n A: The purpose of TEI is to enable high-performance extraction for text embeddings and sequence classification models.\n\n Q: What are the benefits of using TEI?\n A: The benefits of using TEI include high", - "prompt": "Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5." -} -``` - -If the service response has a meaningful response in the value of the "text" key, -then we consider the LLM service to be successfully launched +# if used TGI +docker compose -f compose.yaml down +# if used TGI with FaqGen +# docker compose -f compose_faqgen.yaml down +# if used vLLM +# docker compose -f compose_vllm.yaml down +# if used vLLM with FaqGen +# docker compose -f compose_faqgen_vllm.yaml down -### 6. Validate the MegaService -```bash -curl http://${HOST_IP}:${CHATQNA_BACKEND_SERVICE_PORT}/v1/chatqna \ - -H "Content-Type: application/json" \ - -d '{"messages": "What is the revenue of Nike in 2023?"}' ``` -Checking the response from the service. The response should be similar to text: - -```textmate -data: b' What' -data: b' is' -data: b' the' -data: b' revenue' -data: b' of' -data: b' Nike' -data: b' in' -data: b' ' -data: b'202' -data: b'3' -data: b'?\n' -data: b' ' -data: b' Answer' -data: b':' -data: b' According' -data: b' to' -data: b' the' -data: b' search' -data: b' results' -data: b',' -data: b' the' -data: b' revenue' -data: b' of' -data: b'' - -data: [DONE] +## ChatQnA Docker Compose Files -``` +In the context of deploying an ChatQnA pipeline on an Intel® Xeon® platform, we can pick and choose different large language model serving frameworks, or single English TTS/multi-language TTS component. The table below outlines the various configurations that are available as part of the application. These configurations can be used as templates and can be extended to different components available in [GenAIComps](https://github.com/opea-project/GenAIComps.git). -If the output lines in the "data" keys contain words (tokens) containing meaning, then the service -is considered launched successfully. +| File | Description | +| ------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------ | +| [compose.yaml](./compose.yaml) | The LLM serving framework is TGI. Default compose file using TGI as serving framework and redis as vector database | +| [compose_faqgen.yaml](./compose_faqgen.yaml) | The LLM serving framework is TGI with FaqGen. All other configurations remain the same as the default | +| [compose_vllm.yaml](./compose_vllm.yaml) | The LLM serving framework is vLLM. Compose file using vllm as serving framework and redis as vector database | +| [compose_faqgen_vllm.yaml](./compose_faqgen_vllm.yaml) | The LLM serving framework is vLLM with FaqGen. Compose file using vllm as serving framework and redis as vector database | -### 7. Validate the Frontend (UI) +## Validate MicroServices -To access the UI, use the URL - http://${EXTERNAL_HOST_IP}:${CHATQNA_NGINX_PORT} -A page should open when you click through to this address: +1. TEI Embedding Service -![UI start page](../../../../assets/img/ui-starting-page.png) + ```bash + curl http://${HOST_IP}:${CHATQNA_TEI_EMBEDDING_PORT}/embed \ + -X POST \ + -d '{"inputs":"What is Deep Learning?"}' \ + -H 'Content-Type: application/json' + ``` -If a page of this type has opened, then we believe that the service is running and responding, -and we can proceed to functional UI testing. +2. Retriever Microservice -Let's enter the task for the service in the "Enter prompt here" field. -For example, "What is a Deep Learning?" and press Enter. -After that, a page with the result of the task should open: + ```bash + export your_embedding=$(python3 -c "import random; embedding = [random.uniform(-1, 1) for _ in range(768)]; print(embedding)") + curl http://${HOST_IP}:${CHATQNA_REDIS_RETRIEVER_PORT}/v1/retrieval \ + -X POST \ + -d "{\"text\":\"test\",\"embedding\":${your_embedding}}" \ + -H 'Content-Type: application/json' + ``` -#### If used application without FaqGen +3. TEI Reranking Service -![UI result page](../../../../assets/img/ui-result-page.png) + ```bash + curl http://${HOST_IP}:${CHATQNA_TEI_RERANKING_PORT}/rerank \ + -X POST \ + -d '{"query":"What is Deep Learning?", "texts": ["Deep Learning is not...", "Deep learning is..."]}' \ + -H 'Content-Type: application/json' + ``` -#### If used application with FaqGen +4. vLLM/TGI Service -![UI result page](../../../../assets/img/ui-result-page-faqgen.png) + If you use vLLM: -If the result shown on the page is correct, then we consider the verification of the UI service to be successful. + ```bash + DATA='{"model": "meta-llama/Meta-Llama-3-8B-Instruct", '\ + '"messages": [{"role": "user", "content": "What is a Deep Learning?"}], "max_tokens": 64}' -### 5. Stop application + curl http://${HOST_IP}:${CHATQNA_VLLM_SERVICE_PORT}/v1/chat/completions \ + -X POST \ + -d "$DATA" \ + -H 'Content-Type: application/json' + ``` -#### If you use vLLM + If you use TGI: -```bash -cd ~/chatqna-install/GenAIExamples/ChatQnA/docker_compose/amd/gpu/rocm -docker compose -f compose_vllm.yaml down -``` + ```bash + DATA='{"inputs":"What is a Deep Learning?",'\ + '"parameters":{"max_new_tokens":64,"do_sample": true}}' -#### If you use vLLM with FaqGen + curl http://${HOST_IP}:${CHATQNA_TGI_SERVICE_PORT}/generate \ + -X POST \ + -d "$DATA" \ + -H 'Content-Type: application/json' + ``` -```bash -cd ~/chatqna-install/GenAIExamples/ChatQnA/docker_compose/amd/gpu/rocm -docker compose -f compose_faqgen_vllm.yaml down -``` +5. LLM Service (if your used application with FaqGen) -#### If you use TGI + ```bash + DATA='{"messages":"Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source '\ + 'text embeddings and sequence classification models. TEI enables high-performance extraction for the most '\ + 'popular models, including FlagEmbedding, Ember, GTE and E5.","max_tokens": 128}' -```bash -cd ~/chatqna-install/GenAIExamples/ChatQnA/docker_compose/amd/gpu/rocm -docker compose -f compose.yaml down -``` + curl http://${HOST_IP}:${CHATQNA_LLM_FAQGEN_PORT}/v1/faqgen \ + -X POST \ + -d "$DATA" \ + -H 'Content-Type: application/json' + ``` -#### If you use TGI with FaqGen +## Conclusion -```bash -cd ~/chatqna-install/GenAIExamples/ChatQnA/docker_compose/amd/gpu/rocm -docker compose -f compose_faqgen.yaml down -``` +This guide should enable developers to deploy the default configuration or any of the other compose yaml files for different configurations. It also highlights the configurable parameters that can be set before deployment. diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/compose.yaml b/ChatQnA/docker_compose/amd/gpu/rocm/compose.yaml index 078464bb9e..a71fcc830a 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/compose.yaml +++ b/ChatQnA/docker_compose/amd/gpu/rocm/compose.yaml @@ -165,7 +165,7 @@ services: chatqna-nginx-server: image: ${REGISTRY:-opea}/nginx:${TAG:-latest} - container_name: chaqna-nginx-server + container_name: chatqna-nginx-server depends_on: - chatqna-backend-server - chatqna-ui-server diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/compose_faqgen.yaml b/ChatQnA/docker_compose/amd/gpu/rocm/compose_faqgen.yaml index aec3987754..161bb4589f 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/compose_faqgen.yaml +++ b/ChatQnA/docker_compose/amd/gpu/rocm/compose_faqgen.yaml @@ -187,7 +187,7 @@ services: chatqna-nginx-server: image: ${REGISTRY:-opea}/nginx:${TAG:-latest} - container_name: chaqna-nginx-server + container_name: chatqna-nginx-server depends_on: - chatqna-backend-server - chatqna-ui-server diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/compose_faqgen_vllm.yaml b/ChatQnA/docker_compose/amd/gpu/rocm/compose_faqgen_vllm.yaml index 1c193f7181..b89b367e29 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/compose_faqgen_vllm.yaml +++ b/ChatQnA/docker_compose/amd/gpu/rocm/compose_faqgen_vllm.yaml @@ -192,7 +192,7 @@ services: chatqna-nginx-server: image: ${REGISTRY:-opea}/nginx:${TAG:-latest} - container_name: chaqna-nginx-server + container_name: chatqna-nginx-server depends_on: - chatqna-backend-server - chatqna-ui-server From ccc145ea1a646f3a68e382cbf920d51ab539a5d5 Mon Sep 17 00:00:00 2001 From: Artem Astafev Date: Fri, 25 Apr 2025 09:16:03 +0700 Subject: [PATCH 013/217] Refine README.MD for SearchQnA on AMD ROCm platform (#1876) Signed-off-by: Artem Astafev --- .../docker_compose/amd/gpu/rocm/README.md | 218 +++++++++++++++--- 1 file changed, 186 insertions(+), 32 deletions(-) diff --git a/SearchQnA/docker_compose/amd/gpu/rocm/README.md b/SearchQnA/docker_compose/amd/gpu/rocm/README.md index ae7458cadd..89f160501a 100644 --- a/SearchQnA/docker_compose/amd/gpu/rocm/README.md +++ b/SearchQnA/docker_compose/amd/gpu/rocm/README.md @@ -1,28 +1,29 @@ -# Example SearchQnA deployments on AMD GPU (ROCm) +# Deploying SearchQnA on AMD ROCm Platform -This document outlines the deployment process for a SearchQnA application utilizing the [GenAIComps](https://github.com/opea-project/GenAIComps.git) microservice pipeline on AMD GPU (ROCm). +This document outlines the single node deployment process for a SearchQnA application utilizing the [GenAIComps](https://github.com/opea-project/GenAIComps.git) microservices on AMD ROCm Platform. -This example includes the following sections: +## Table of Contents -- [SearchQnA Quick Start Deployment](#searchqna-quick-start-deployment): Demonstrates how to quickly deploy a SearchQnA application/pipeline on AMD GPU platform. -- [SearchQnA Docker Compose Files](#searchqna-docker-compose-files): Describes some example deployments and their docker compose files. -- [Launch the UI](#launch-the-ui): Guideline for UI usage +1. [SearchQnA Quick Start Deployment](#searchqna-quick-start-deployment) +2. [SearchQnA Docker Compose Files](#searchqna-docker-compose-files) +3. [Validate Microservices](#validate-microservices) +4. [Launch the UI](#launch-the-ui): Guideline for UI usage +5. [Conclusion](#conclusion) ## SearchQnA Quick Start Deployment -This section describes how to quickly deploy and test the SearchQnA service manually on AMD GPU (ROCm). The basic steps are: +This section describes how to quickly deploy and test the SearchQnA service manually on an AMD ROCm Platform. The basic steps are: 1. [Access the Code](#access-the-code) -2. [Generate a HuggingFace Access Token](#generate-a-huggingface-access-token) -3. [Configure the Deployment Environment](#configure-the-deployment-environment) -4. [Deploy the Services Using Docker Compose](#deploy-the-services-using-docker-compose) -5. [Check the Deployment Status](#check-the-deployment-status) -6. [Test the Pipeline](#test-the-pipeline) -7. [Cleanup the Deployment](#cleanup-the-deployment) +2. [Configure the Deployment Environment](#configure-the-deployment-environment) +3. [Deploy the Services Using Docker Compose](#deploy-the-services-using-docker-compose) +4. [Check the Deployment Status](#check-the-deployment-status) +5. [Validate the Pipeline](#validate-the-pipeline) +6. [Cleanup the Deployment](#cleanup-the-deployment) ### Access the Code -Clone the GenAIExample repository and access the SearchQnA AMD GPU (ROCm) Docker Compose files and supporting scripts: +Clone the GenAIExample repository and access the SearchQnA AMD ROCm Platform Docker Compose files and supporting scripts: ```bash git clone https://github.com/opea-project/GenAIExamples.git @@ -41,34 +42,56 @@ Some HuggingFace resources require an access token. Developers can create one by ### Configure the Deployment Environment -To set up environment variables for deploying SearchQnA services, source the _setup_env.sh_ script in this directory: +To set up environment variables for deploying SearchQnA services, set up some parameters specific to the deployment environment and source the `set_env.sh` script in this directory: -``` -//with TGI: -source ./set_env.sh -``` +#### For vLLM inference type deployment (default) -``` -//with VLLM: +```bash +export host_ip="External_Public_IP" # ip address of the node +export GOOGLE_CSE_ID="your cse id" +export GOOGLE_API_KEY="your google api key" +export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +export http_proxy="Your_HTTP_Proxy" # http proxy if any +export https_proxy="Your_HTTPs_Proxy" # https proxy if any +export no_proxy=localhost,127.0.0.1,$host_ip # additional no proxies if needed +export NGINX_PORT=${your_nginx_port} # your usable port for nginx, 80 for example source ./set_env_vllm.sh ``` -The _setup_env.sh_ script will prompt for required and optional environment variables used to configure the SearchQnA services based on TGI. The _setup_env_vllm.sh_ script will prompt for required and optional environment variables used to configure the SearchQnA services based on VLLM. If a value is not entered, the script will use a default value for the same. It will also generate a _.env_ file defining the desired configuration. Consult the section on [SearchQnA Service configuration](#SearchQnA-service-configuration) for information on how service specific configuration parameters affect deployments. +#### For TGI inference type deployment + +```bash +export host_ip="External_Public_IP" # ip address of the node +export GOOGLE_CSE_ID="your cse id" +export GOOGLE_API_KEY="your google api key" +export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +export http_proxy="Your_HTTP_Proxy" # http proxy if any +export https_proxy="Your_HTTPs_Proxy" # https proxy if any +export no_proxy=localhost,127.0.0.1,$host_ip # additional no proxies if needed +export NGINX_PORT=${your_nginx_port} # your usable port for nginx, 80 for example +source ./set_env.sh +``` + +Consult the section on [SearchQnA Service configuration](#SearchQnA-configuration) for information on how service specific configuration parameters affect deployments. ### Deploy the Services Using Docker Compose To deploy the SearchQnA services, execute the `docker compose up` command with the appropriate arguments. For a default deployment, execute: -```bash -//with TGI: -docker compose -f compose.yaml up -d -``` +#### For vLLM inference type deployment (default) ```bash //with VLLM: docker compose -f compose_vllm.yaml up -d ``` +#### For TGI inference type deployment + +```bash +//with TGI: +docker compose -f compose.yaml up -d +``` + **Note**: developers should build docker image from source when: - Developing off the git main branch (as the container's ports in the repo may be different from the published docker image). @@ -97,7 +120,40 @@ docker ps -a For the default deployment, the following containers should have started -### Test the Pipeline +#### For vLLM inference type deployment (default) + +``` +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +50e5f4a00fcc opea/searchqna-ui:latest "docker-entrypoint.s…" About a minute ago Up About a minute 0.0.0.0:18143->5173/tcp, [::]:18143->5173/tcp search-ui-server +a8f030d17e40 opea/searchqna:latest "python searchqna.py" About a minute ago Up About a minute 0.0.0.0:18142->8888/tcp, [::]:18142->8888/tcp search-backend-server +916c5db048a2 opea/llm-textgen:latest "bash entrypoint.sh" About a minute ago Up About a minute 0.0.0.0:3007->9000/tcp, [::]:3007->9000/tcp search-llm-server +bb46cdaf1794 opea/reranking:latest "python opea_reranki…" About a minute ago Up About a minute 0.0.0.0:3005->8000/tcp, [::]:3005->8000/tcp search-reranking-server +d89ab0ef3f41 opea/embedding:latest "sh -c 'python $( [ …" About a minute ago Up About a minute 0.0.0.0:3002->6000/tcp, [::]:3002->6000/tcp search-embedding-server +b248e55dd20f opea/vllm-rocm:latest "python3 /workspace/…" About a minute ago Up About a minute 0.0.0.0:3080->8011/tcp, [::]:3080->8011/tcp search-vllm-service +c3800753fac5 opea/web-retriever:latest "python opea_web_ret…" About a minute ago Up About a minute 0.0.0.0:3003->7077/tcp, [::]:3003->7077/tcp search-web-retriever-server +0db8af486bd0 ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 "text-embeddings-rou…" About a minute ago Up About a minute 0.0.0.0:3001->80/tcp, [::]:3001->80/tcp search-tei-embedding-server +3125915447ef ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 "text-embeddings-rou…" About a minute ago Up About a minute 0.0.0.0:3004->80/tcp, [::]:3004->80/tcp search-tei-reranking-server +``` + +#### For TGI inference type deployment + +``` +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +67cc886949a3 opea/searchqna-ui:latest "docker-entrypoint.s…" About a minute ago Up About a minute 0.0.0.0:18143->5173/tcp, [::]:18143->5173/tcp search-ui-server +6547aca0d5fd opea/searchqna:latest "python searchqna.py" About a minute ago Up About a minute 0.0.0.0:18142->8888/tcp, [::]:18142->8888/tcp search-backend-server +213b5d4d5fa5 opea/embedding:latest "sh -c 'python $( [ …" About a minute ago Up About a minute 0.0.0.0:3002->6000/tcp, [::]:3002->6000/tcp search-embedding-server +6b90d16100b2 opea/reranking:latest "python opea_reranki…" About a minute ago Up About a minute 0.0.0.0:3005->8000/tcp, [::]:3005->8000/tcp search-reranking-server +3266fd85207e opea/llm-textgen:latest "bash entrypoint.sh" About a minute ago Up About a minute 0.0.0.0:3007->9000/tcp, [::]:3007->9000/tcp search-llm-server +d7322b70c15d ghcr.io/huggingface/text-generation-inference:2.4.1-rocm "/tgi-entrypoint.sh …" About a minute ago Up About a minute 0.0.0.0:3006->80/tcp, [::]:3006->80/tcp search-tgi-service +a703b91b28ed ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 "text-embeddings-rou…" About a minute ago Up About a minute 0.0.0.0:3001->80/tcp, [::]:3001->80/tcp search-tei-embedding-server +22098a5eaf59 ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 "text-embeddings-rou…" About a minute ago Up About a minute 0.0.0.0:3004->80/tcp, [::]:3004->80/tcp search-tei-reranking-server +830fe84c971d opea/web-retriever:latest "python opea_web_ret…" About a minute ago Up About a minute 0.0.0.0:3003->7077/tcp, [::]:3003->7077/tcp search-web-retriever-server + +``` + +If any issues are encountered during deployment, refer to the [Troubleshooting](../../../../README_miscellaneous.md#troubleshooting) section. + +### Validate the Pipeline Once the SearchQnA services are running, test the pipeline using the following command: @@ -131,31 +187,125 @@ data: [DONE] A response text similar to the one above indicates that the service verification was successful. +**Note** : Access the SearchQnA UI by web browser through this URL: `http://${host_ip}:80`. Please confirm the `80` port is opened in the firewall. To validate each microservice used in the pipeline refer to the [Validate Microservices](#validate-microservices) section. + ### Cleanup the Deployment To stop the containers associated with the deployment, execute the following command: -```bash -//with TGI: -docker compose -f compose.yaml down -``` +#### For vLLM inference type deployment (default) ```bash //with VLLM: docker compose -f compose_vllm.yaml down ``` +#### For TGI inference type deployment + +```bash +//with TGI: +docker compose -f compose.yaml down +``` + All the SearchQnA containers will be stopped and then removed on completion of the "down" command. ## SearchQnA Docker Compose Files -When deploying the SearchQnA pipeline on AMD GPUs (ROCm), different large language model serving frameworks can be selected. The table below outlines the available configurations included in the application. +When deploying a SearchQnA pipeline on an AMD GPUs (ROCm), different large language model serving frameworks can be selected. The table below outlines the available configurations included in the application. These configurations can serve as templates and be extended to other components available in [GenAIComps](https://github.com/opea-project/GenAIComps.git). | File | Description | | ---------------------------------------- | ------------------------------------------------------------------------------------------ | | [compose.yaml](./compose.yaml) | Default compose file using tgi as serving framework | | [compose_vllm.yaml](./compose_vllm.yaml) | The LLM serving framework is vLLM. All other configurations remain the same as the default | +## Validate Microservices + +1. Embedding backend Service + + ```bash + curl http://${host_ip}:3001/embed \ + -X POST \ + -d '{"inputs":"What is Deep Learning?"}' \ + -H 'Content-Type: application/json' + ``` + +2. Embedding Microservice + + ```bash + curl http://${host_ip}:3002/v1/embeddings\ + -X POST \ + -d '{"text":"hello"}' \ + -H 'Content-Type: application/json' + ``` + +3. Web Retriever Microservice + + ```bash + export your_embedding=$(python3 -c "import random; embedding = [random.uniform(-1, 1) for _ in range(768)]; print(embedding)") + curl http://${host_ip}:3003/v1/web_retrieval \ + -X POST \ + -d "{\"text\":\"What is the 2024 holiday schedule?\",\"embedding\":${your_embedding}}" \ + -H 'Content-Type: application/json' + ``` + +4. Reranking backend Service + +```bash + # TEI Reranking service + curl http://${host_ip}:3004/rerank \ + -X POST \ + -d '{"query":"What is Deep Learning?", "texts": ["Deep Learning is not...", "Deep learning is..."]}' \ + -H 'Content-Type: application/json' +``` + +5. Reranking Microservice + +```bash + curl http://${host_ip}:3005/v1/reranking\ + -X POST \ + -d '{"initial_query":"What is Deep Learning?", "retrieved_docs": [{"text":"Deep Learning is not..."}, {"text":"Deep learning is..."}]}' \ + -H 'Content-Type: application/json' +``` + +6. LLM backend Service + +```bash + # TGI service + curl http://${host_ip}:3006/generate \ + -X POST \ + -d '{"inputs":"What is Deep Learning?","parameters":{"max_new_tokens":17, "do_sample": true}}' \ + -H 'Content-Type: application/json' +``` + +7. LLM Microservice + + ```bash + curl http://${host_ip}:3007/v1/chat/completions\ + -X POST \ + -d '{"query":"What is Deep Learning?","max_tokens":17,"top_k":10,"top_p":0.95,"typical_p":0.95,"temperature":0.01,"repetition_penalty":1.03,"stream":true}' \ + -H 'Content-Type: application/json' + ``` + +8. MegaService + + ```bash + curl http://${host_ip}:3008/v1/searchqna -H "Content-Type: application/json" -d '{ + "messages": "What is the latest news? Give me also the source link.", + "stream": "true" + }' + ``` + +9. Nginx Service + + ```bash + curl http://${host_ip}:${NGINX_PORT}/v1/searchqna \ + -H "Content-Type: application/json" \ + -d '{ + "messages": "What is the latest news? Give me also the source link.", + "stream": "true" + }' + ``` + ## Launch the UI Access the UI at http://${EXTERNAL_HOST_IP}:${SEARCH_FRONTEND_SERVICE_PORT}. A page should open when navigating to this address. @@ -167,3 +317,7 @@ Let's enter the task for the service in the "Enter prompt here" field. For examp ![UI start page](../../../../assets/img/searchqna-ui-response-example.png) A correct result displayed on the page indicates that the UI service has been successfully verified. + +## Conclusion + +This guide should enable developers to deploy the default configuration or any of the other compose yaml files for different configurations. It also highlights the configurable parameters that can be set before deployment. From 3b0bcb80a89a977e2ffdcb5e7c39f19b8217d7ae Mon Sep 17 00:00:00 2001 From: chyundunovDatamonsters Date: Fri, 25 Apr 2025 12:33:08 +0700 Subject: [PATCH 014/217] DocSum - Adding files to deploy an application in the K8S environment using Helm (#1758) Signed-off-by: Chingis Yundunov Signed-off-by: Chingis Yundunov Co-authored-by: Chingis Yundunov Co-authored-by: Artem Astafev Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: chen, suyue --- DocSum/kubernetes/helm/README.md | 147 ++++++++++++++++++++ DocSum/kubernetes/helm/rocm-tgi-values.yaml | 45 ++++++ DocSum/kubernetes/helm/rocm-values.yaml | 40 ++++++ 3 files changed, 232 insertions(+) create mode 100644 DocSum/kubernetes/helm/rocm-tgi-values.yaml create mode 100644 DocSum/kubernetes/helm/rocm-values.yaml diff --git a/DocSum/kubernetes/helm/README.md b/DocSum/kubernetes/helm/README.md index aa4bf07205..64537cc06f 100644 --- a/DocSum/kubernetes/helm/README.md +++ b/DocSum/kubernetes/helm/README.md @@ -16,3 +16,150 @@ helm install docsum oci://ghcr.io/opea-project/charts/docsum --set global.HUGGI export HFTOKEN="insert-your-huggingface-token-here" helm install docsum oci://ghcr.io/opea-project/charts/docsum --set global.HUGGINGFACEHUB_API_TOKEN=${HFTOKEN} -f gaudi-values.yaml ``` + +## Deploy on AMD ROCm using Helm charts from the binary Helm repository + +```bash +mkdir ~/docsum-k8s-install && cd ~/docsum-k8s-install +``` + +### Cloning repos + +```bash +git clone git clone https://github.com/opea-project/GenAIExamples.git +``` + +### Go to the installation directory + +```bash +cd GenAIExamples/DocSum/kubernetes/helm +``` + +### Settings system variables + +```bash +export HFTOKEN="your_huggingface_token" +export MODELDIR="/mnt/opea-models" +export MODELNAME="Intel/neural-chat-7b-v3-3" +``` + +### Setting variables in Values files + +#### If ROCm vLLM used +```bash +nano ~/docsum-k8s-install/GenAIExamples/DocSum/kubernetes/helm/rocm-values.yaml +``` + +- HIP_VISIBLE_DEVICES - this variable specifies the ID of the GPU that you want to use. + You can specify either one or several comma-separated ones - "0" or "0,1,2,3" +- TENSOR_PARALLEL_SIZE - must match the number of GPUs used +- resources: + limits: + amd.com/gpu: "1" - replace "1" with the number of GPUs used + +#### If ROCm TGI used + +```bash +nano ~/docsum-k8s-install/GenAIExamples/DocSum/kubernetes/helm/rocm-tgi-values.yaml +``` + +- HIP_VISIBLE_DEVICES - this variable specifies the ID of the GPU that you want to use. + You can specify either one or several comma-separated ones - "0" or "0,1,2,3" +- extraCmdArgs: [ "--num-shard","1" ] - replace "1" with the number of GPUs used +- resources: + limits: + amd.com/gpu: "1" - replace "1" with the number of GPUs used + +### Installing the Helm Chart + +#### If ROCm vLLM used +```bash +helm upgrade --install docsum oci://ghcr.io/opea-project/charts/docsum \ + --set global.HUGGINGFACEHUB_API_TOKEN=${HFTOKEN} \ + --values rocm-values.yaml +``` + +#### If ROCm TGI used +```bash +helm upgrade --install docsum oci://ghcr.io/opea-project/charts/docsum \ + --set global.HUGGINGFACEHUB_API_TOKEN=${HFTOKEN} \ + --values rocm-tgi-values.yaml +``` + +## Deploy on AMD ROCm using Helm charts from Git repositories + +### Creating working dirs + +```bash +mkdir ~/docsum-k8s-install && cd ~/docsum-k8s-install +``` + +### Cloning repos + +```bash +git clone git clone https://github.com/opea-project/GenAIExamples.git +git clone git clone https://github.com/opea-project/GenAIInfra.git +``` + +### Go to the installation directory + +```bash +cd GenAIExamples/DocSum/kubernetes/helm +``` + +### Settings system variables + +```bash +export HFTOKEN="your_huggingface_token" +export MODELDIR="/mnt/opea-models" +export MODELNAME="Intel/neural-chat-7b-v3-3" +``` + +### Setting variables in Values files + +#### If ROCm vLLM used +```bash +nano ~/docsum-k8s-install/GenAIExamples/DocSum/kubernetes/helm/rocm-values.yaml +``` + +- HIP_VISIBLE_DEVICES - this variable specifies the ID of the GPU that you want to use. +You can specify either one or several comma-separated ones - "0" or "0,1,2,3" +- TENSOR_PARALLEL_SIZE - must match the number of GPUs used +- resources: + limits: + amd.com/gpu: "1" - replace "1" with the number of GPUs used + +#### If ROCm TGI used + +```bash +nano ~/docsum-k8s-install/GenAIExamples/DocSum/kubernetes/helm/rocm-tgi-values.yaml +``` + +- HIP_VISIBLE_DEVICES - this variable specifies the ID of the GPU that you want to use. + You can specify either one or several comma-separated ones - "0" or "0,1,2,3" +- extraCmdArgs: [ "--num-shard","1" ] - replace "1" with the number of GPUs used +- resources: + limits: + amd.com/gpu: "1" - replace "1" with the number of GPUs used + +### Installing the Helm Chart + +#### If ROCm vLLM used +```bash +cd ~/docsum-k8s-install/GenAIInfra/helm-charts +./update_dependency.sh +helm dependency update docsum +helm upgrade --install docsum docsum \ + --set global.HUGGINGFACEHUB_API_TOKEN=${HFTOKEN} \ + --values ../../GenAIExamples/DocSum/kubernetes/helm/rocm-values.yaml +``` + +#### If ROCm TGI used +```bash +cd ~/docsum-k8s-install/GenAIInfra/helm-charts +./update_dependency.sh +helm dependency update docsum +helm upgrade --install docsum docsum \ + --set global.HUGGINGFACEHUB_API_TOKEN=${HFTOKEN} \ + --values ../../GenAIExamples/DocSum/kubernetes/helm/rocm-tgi-values.yaml +``` diff --git a/DocSum/kubernetes/helm/rocm-tgi-values.yaml b/DocSum/kubernetes/helm/rocm-tgi-values.yaml new file mode 100644 index 0000000000..d3b5e49722 --- /dev/null +++ b/DocSum/kubernetes/helm/rocm-tgi-values.yaml @@ -0,0 +1,45 @@ +# Copyright (C) 2025 Advanced Micro Devices, Inc. + +tgi: + enabled: true + accelDevice: "rocm" + image: + repository: ghcr.io/huggingface/text-generation-inference + tag: "2.4.1-rocm" + MAX_INPUT_LENGTH: "1024" + MAX_TOTAL_TOKENS: "2048" + USE_FLASH_ATTENTION: "false" + FLASH_ATTENTION_RECOMPUTE: "false" + HIP_VISIBLE_DEVICES: "0" + MAX_BATCH_SIZE: "4" + extraCmdArgs: [ "--num-shard","1" ] + resources: + limits: + amd.com/gpu: "1" + requests: + cpu: 1 + memory: 16Gi + securityContext: + readOnlyRootFilesystem: false + runAsNonRoot: false + runAsUser: 0 + capabilities: + add: + - SYS_PTRACE + readinessProbe: + initialDelaySeconds: 60 + periodSeconds: 5 + timeoutSeconds: 1 + failureThreshold: 120 + startupProbe: + initialDelaySeconds: 60 + periodSeconds: 5 + timeoutSeconds: 1 + failureThreshold: 120 + +llm-uservice: + DOCSUM_BACKEND: "TGI" + retryTimeoutSeconds: 720 + +vllm: + enabled: false diff --git a/DocSum/kubernetes/helm/rocm-values.yaml b/DocSum/kubernetes/helm/rocm-values.yaml new file mode 100644 index 0000000000..7236f50bd7 --- /dev/null +++ b/DocSum/kubernetes/helm/rocm-values.yaml @@ -0,0 +1,40 @@ +# Copyright (C) 2025 Advanced Micro Devices, Inc. + +tgi: + enabled: false + +llm-uservice: + DOCSUM_BACKEND: "vLLM" + retryTimeoutSeconds: 720 + +vllm: + enabled: true + accelDevice: "rocm" + image: + repository: opea/vllm-rocm + tag: latest + env: + HIP_VISIBLE_DEVICES: "0" + TENSOR_PARALLEL_SIZE: "1" + HF_HUB_DISABLE_PROGRESS_BARS: "1" + HF_HUB_ENABLE_HF_TRANSFER: "0" + VLLM_USE_TRITON_FLASH_ATTN: "0" + VLLM_WORKER_MULTIPROC_METHOD: "spawn" + PYTORCH_JIT: "0" + HF_HOME: "/data" + extraCmd: + command: [ "python3", "/workspace/api_server.py" ] + extraCmdArgs: [ "--swap-space", "16", + "--disable-log-requests", + "--dtype", "float16", + "--num-scheduler-steps", "1", + "--distributed-executor-backend", "mp" ] + resources: + limits: + amd.com/gpu: "1" + startupProbe: + failureThreshold: 180 + securityContext: + readOnlyRootFilesystem: false + runAsNonRoot: false + runAsUser: 0 From ef9290f2454a864bf67a3bebb3706128362e86c8 Mon Sep 17 00:00:00 2001 From: chyundunovDatamonsters Date: Fri, 25 Apr 2025 12:36:40 +0700 Subject: [PATCH 015/217] DocSum - refactoring README.md for deploy application on ROCm (#1881) Signed-off-by: Chingis Yundunov --- DocSum/docker_compose/amd/gpu/rocm/README.md | 140 +++++++++++++++---- 1 file changed, 110 insertions(+), 30 deletions(-) diff --git a/DocSum/docker_compose/amd/gpu/rocm/README.md b/DocSum/docker_compose/amd/gpu/rocm/README.md index 2c4a196149..fe37f39d57 100644 --- a/DocSum/docker_compose/amd/gpu/rocm/README.md +++ b/DocSum/docker_compose/amd/gpu/rocm/README.md @@ -23,17 +23,17 @@ This section describes how to quickly deploy and test the DocSum service manuall ### Access the Code -Clone the GenAIExample repository and access the ChatQnA AMD GPU platform Docker Compose files and supporting scripts: +Clone the GenAIExample repository and access the DocSum AMD GPU platform Docker Compose files and supporting scripts: -``` +```bash git clone https://github.com/opea-project/GenAIExamples.git cd GenAIExamples/DocSum/docker_compose/amd/gpu/rocm ``` -Checkout a released version, such as v1.2: +Checkout a released version, such as v1.3: ``` -git checkout v1.2 +git checkout v1.3 ``` ### Generate a HuggingFace Access Token @@ -42,33 +42,96 @@ Some HuggingFace resources, such as some models, are only accessible if you have ### Configure the Deployment Environment -To set up environment variables for deploying DocSum services, source the _set_env.sh_ script in this directory: +To set up environment variables for deploying DocSum services, set up some parameters specific to the deployment environment and source the `set_env_*.sh` script in this directory: -``` -source ./set_env.sh +- if used vLLM - set_env_vllm.sh +- if used TGI - set_env.sh + +Set the values of the variables: + +- **HOST_IP, HOST_IP_EXTERNAL** - These variables are used to configure the name/address of the service in the operating system environment for the application services to interact with each other and with the outside world. + + If your server uses only an internal address and is not accessible from the Internet, then the values for these two variables will be the same and the value will be equal to the server's internal name/address. + + If your server uses only an external, Internet-accessible address, then the values for these two variables will be the same and the value will be equal to the server's external name/address. + + If your server is located on an internal network, has an internal address, but is accessible from the Internet via a proxy/firewall/load balancer, then the HOST_IP variable will have a value equal to the internal name/address of the server, and the EXTERNAL_HOST_IP variable will have a value equal to the external name/address of the proxy/firewall/load balancer behind which the server is located. + + We set these values in the file set_env\*\*\*\*.sh + +- **Variables with names like "**\*\*\*\*\*\*\_PORT"\*\* - These variables set the IP port numbers for establishing network connections to the application services. + The values shown in the file set_env.sh or set_env_vllm.sh they are the values used for the development and testing of the application, as well as configured for the environment in which the development is performed. These values must be configured in accordance with the rules of network access to your environment's server, and must not overlap with the IP ports of other applications that are already in use. + +Setting variables in the operating system environment: + +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +source ./set_env_*.sh # replace the script name with the appropriate one ``` -The _set_env.sh_ script will prompt for required and optional environment variables used to configure the DocSum services. If a value is not entered, the script will use a default value for the same. It will also generate a _.env_ file defining the desired configuration. Consult the section on [DocSum Service configuration](#docsum-service-configuration) for information on how service specific configuration parameters affect deployments. +Consult the section on [DocSum Service configuration](#docsum-configuration) for information on how service specific configuration parameters affect deployments. ### Deploy the Services Using Docker Compose -To deploy the DocSum services, execute the `docker compose up` command with the appropriate arguments. For a default deployment, execute: +To deploy the DocSum services, execute the `docker compose up` command with the appropriate arguments. For a default deployment with TGI, execute the command below. It uses the 'compose.yaml' file. ```bash -docker compose up -d +cd docker_compose/amd/gpu/rocm +# if used TGI +docker compose -f compose.yaml up -d +# if used vLLM +# docker compose -f compose_vllm.yaml up -d +``` + +To enable GPU support for AMD GPUs, the following configuration is added to the Docker Compose file: + +- compose_vllm.yaml - for vLLM-based application +- compose.yaml - for TGI-based + +```yaml +shm_size: 1g +devices: + - /dev/kfd:/dev/kfd + - /dev/dri:/dev/dri +cap_add: + - SYS_PTRACE +group_add: + - video +security_opt: + - seccomp:unconfined ``` -**Note**: developers should build docker image from source when: +This configuration forwards all available GPUs to the container. To use a specific GPU, specify its `cardN` and `renderN` device IDs. For example: + +```yaml +shm_size: 1g +devices: + - /dev/kfd:/dev/kfd + - /dev/dri/card0:/dev/dri/card0 + - /dev/dri/render128:/dev/dri/render128 +cap_add: + - SYS_PTRACE +group_add: + - video +security_opt: + - seccomp:unconfined +``` + +**How to Identify GPU Device IDs:** +Use AMD GPU driver utilities to determine the correct `cardN` and `renderN` IDs for your GPU. -- Developing off the git main branch (as the container's ports in the repo may be different from the published docker image). -- Unable to download the docker image. -- Use a specific version of Docker image. +> **Note**: developers should build docker image from source when: +> +> - Developing off the git main branch (as the container's ports in the repo may be different > from the published docker image). +> - Unable to download the docker image. +> - Use a specific version of Docker image. Please refer to the table below to build different microservices from source: | Microservice | Deployment Guide | | ------------ | ------------------------------------------------------------------------------------------------------------------------------------- | | whisper | [whisper build guide](https://github.com/opea-project/GenAIComps/tree/main/comps/third_parties/whisper/src) | +| TGI | [TGI project](https://github.com/huggingface/text-generation-inference.git) | | vLLM | [vLLM build guide](https://github.com/opea-project/GenAIComps/tree/main/comps/third_parties/vllm#build-docker) | | llm-docsum | [LLM-DocSum build guide](https://github.com/opea-project/GenAIComps/tree/main/comps/llms/src/doc-summarization#12-build-docker-image) | | MegaService | [MegaService build guide](../../../../README_miscellaneous.md#build-megaservice-docker-image) | @@ -84,6 +147,8 @@ docker ps -a For the default deployment, the following 5 containers should have started: +If used TGI: + ``` CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 748f577b3c78 opea/whisper:latest "python whisper_s…" 5 minutes ago Up About a minute 0.0.0.0:7066->7066/tcp, :::7066->7066/tcp whisper-service @@ -93,24 +158,39 @@ fds3dd5b9fd8 opea/docsum:latest "py 78964d0c1hg5 ghcr.io/huggingface/text-generation-inference:2.4.1-rocm "/tgi-entrypoint.sh" 5 minutes ago Up 5 minutes (healthy) 0.0.0.0:8008->80/tcp, [::]:8008->80/tcp docsum-tgi-service ``` +If used vLLM: + +``` +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +748f577b3c78 opea/whisper:latest "python whisper_s…" 5 minutes ago Up About a minute 0.0.0.0:7066->7066/tcp, :::7066->7066/tcp whisper-service +4eq8b7034fd9 opea/docsum-gradio-ui:latest "docker-entrypoint.s…" 5 minutes ago Up About a minute 0.0.0.0:5173->5173/tcp, :::5173->5173/tcp docsum-ui-server +fds3dd5b9fd8 opea/docsum:latest "python docsum.py" 5 minutes ago Up About a minute 0.0.0.0:8888->8888/tcp, :::8888->8888/tcp docsum-backend-server +78fsd6fabfs7 opea/llm-docsum:latest "bash entrypoint.sh" 5 minutes ago Up About a minute 0.0.0.0:9000->9000/tcp, :::9000->9000/tcp docsum-llm-server +78964d0c1hg5 opea/vllm-rocm:latest "python3 /workspace/…" 5 minutes ago Up 5 minutes (healthy) 0.0.0.0:8008->80/tcp, [::]:8008->80/tcp docsum-vllm-service +``` + ### Test the Pipeline Once the DocSum services are running, test the pipeline using the following command: ```bash -curl -X POST http://${host_ip}:8888/v1/docsum \ +curl -X POST http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -H "Content-Type: application/json" \ -d '{"type": "text", "messages": "Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5."}' ``` -**Note** The value of _host_ip_ was set using the _set_env.sh_ script and can be found in the _.env_ file. +**Note** The value of _HOST_IP_ was set using the _set_env.sh_ script and can be found in the _.env_ file. ### Cleanup the Deployment To stop the containers associated with the deployment, execute the following command: -``` +```bash +# if used TGI docker compose -f compose.yaml down +# if used vLLM +# docker compose -f compose_vllm.yaml down + ``` All the DocSum containers will be stopped and then removed on completion of the "down" command. @@ -132,7 +212,7 @@ There are also some customized usage. ```bash # form input. Use English mode (default). -curl http://${host_ip}:8888/v1/docsum \ +curl http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -H "Content-Type: multipart/form-data" \ -F "type=text" \ -F "messages=Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5." \ @@ -141,7 +221,7 @@ curl http://${host_ip}:8888/v1/docsum \ -F "stream=True" # Use Chinese mode. -curl http://${host_ip}:8888/v1/docsum \ +curl http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -H "Content-Type: multipart/form-data" \ -F "type=text" \ -F "messages=2024年9月26日,北京——今日,英特尔正式发布英特尔® 至强® 6性能核处理器(代号Granite Rapids),为AI、数据分析、科学计算等计算密集型业务提供卓越性能。" \ @@ -150,7 +230,7 @@ curl http://${host_ip}:8888/v1/docsum \ -F "stream=True" # Upload file -curl http://${host_ip}:8888/v1/docsum \ +curl http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -H "Content-Type: multipart/form-data" \ -F "type=text" \ -F "messages=" \ @@ -166,11 +246,11 @@ curl http://${host_ip}:8888/v1/docsum \ Audio: ```bash -curl -X POST http://${host_ip}:8888/v1/docsum \ +curl -X POST http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -H "Content-Type: application/json" \ -d '{"type": "audio", "messages": "UklGRigAAABXQVZFZm10IBIAAAABAAEARKwAAIhYAQACABAAAABkYXRhAgAAAAEA"}' -curl http://${host_ip}:8888/v1/docsum \ +curl http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -H "Content-Type: multipart/form-data" \ -F "type=audio" \ -F "messages=UklGRigAAABXQVZFZm10IBIAAAABAAEARKwAAIhYAQACABAAAABkYXRhAgAAAAEA" \ @@ -182,11 +262,11 @@ curl http://${host_ip}:8888/v1/docsum \ Video: ```bash -curl -X POST http://${host_ip}:8888/v1/docsum \ +curl -X POST http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -H "Content-Type: application/json" \ -d '{"type": "video", "messages": "convert your video to base64 data type"}' -curl http://${host_ip}:8888/v1/docsum \ +curl http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -H "Content-Type: multipart/form-data" \ -F "type=video" \ -F "messages=convert your video to base64 data type" \ @@ -208,7 +288,7 @@ If you want to deal with long context, can set following parameters and select s "summary_type" is set to be "auto" by default, in this mode we will check input token length, if it exceed `MAX_INPUT_TOKENS`, `summary_type` will automatically be set to `refine` mode, otherwise will be set to `stuff` mode. ```bash -curl http://${host_ip}:8888/v1/docsum \ +curl http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -H "Content-Type: multipart/form-data" \ -F "type=text" \ -F "messages=" \ @@ -223,7 +303,7 @@ curl http://${host_ip}:8888/v1/docsum \ In this mode LLM generate summary based on complete input text. In this case please carefully set `MAX_INPUT_TOKENS` and `MAX_TOTAL_TOKENS` according to your model and device memory, otherwise it may exceed LLM context limit and raise error when meet long context. ```bash -curl http://${host_ip}:8888/v1/docsum \ +curl http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -H "Content-Type: multipart/form-data" \ -F "type=text" \ -F "messages=" \ @@ -238,7 +318,7 @@ curl http://${host_ip}:8888/v1/docsum \ Truncate mode will truncate the input text and keep only the first chunk, whose length is equal to `min(MAX_TOTAL_TOKENS - input.max_tokens - 50, MAX_INPUT_TOKENS)` ```bash -curl http://${host_ip}:8888/v1/docsum \ +curl http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -H "Content-Type: multipart/form-data" \ -F "type=text" \ -F "messages=" \ @@ -255,7 +335,7 @@ Map_reduce mode will split the inputs into multiple chunks, map each document to In this mode, default `chunk_size` is set to be `min(MAX_TOTAL_TOKENS - input.max_tokens - 50, MAX_INPUT_TOKENS)` ```bash -curl http://${host_ip}:8888/v1/docsum \ +curl http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -H "Content-Type: multipart/form-data" \ -F "type=text" \ -F "messages=" \ @@ -272,7 +352,7 @@ Refin mode will split the inputs into multiple chunks, generate summary for the In this mode, default `chunk_size` is set to be `min(MAX_TOTAL_TOKENS - 2 * input.max_tokens - 128, MAX_INPUT_TOKENS)`. ```bash -curl http://${host_ip}:8888/v1/docsum \ +curl http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -H "Content-Type: multipart/form-data" \ -F "type=text" \ -F "messages=" \ @@ -288,7 +368,7 @@ Several UI options are provided. If you need to work with multimedia documents, ### Gradio UI -To access the UI, use the URL - http://${EXTERNAL_HOST_IP}:${FAGGEN_UI_PORT} +To access the UI, use the URL - http://${HOST_IP}:${DOCSUM_FRONTEND_PORT} A page should open when you click through to this address: ![UI start page](../../../../assets/img/ui-starting-page.png) From 18b4f39f27a77ca7a7a7906aaee9c819e36f6f09 Mon Sep 17 00:00:00 2001 From: rbrugaro Date: Thu, 24 Apr 2025 23:58:08 -0700 Subject: [PATCH 016/217] README fixes Finance Example (#1882) Signed-off-by: Rita Brugarolas Co-authored-by: Ying Hu --- FinanceAgent/README.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/FinanceAgent/README.md b/FinanceAgent/README.md index 5c7c0b3b41..64ce01cc0a 100644 --- a/FinanceAgent/README.md +++ b/FinanceAgent/README.md @@ -44,6 +44,8 @@ git clone https://github.com/opea-project/GenAIExamples.git ### 2.2 Set up env vars ```bash +export ip_address="External_Public_IP" +export no_proxy=${your_no_proxy},${ip_address} export HF_CACHE_DIR=/path/to/your/model/cache/ export HF_TOKEN= export FINNHUB_API_KEY= # go to https://finnhub.io/ to get your free api key @@ -100,8 +102,8 @@ bash launch_dataprep.sh Validate datat ingest data and retrieval from database: ```bash -python $WORKPATH/tests/test_redis_finance.py --port 6007 --test_option ingest -python $WORKPATH/tests/test_redis_finance.py --port 6007 --test_option get +python $WORKDIR/GenAIExamples/FinanceAgent/tests/test_redis_finance.py --port 6007 --test_option ingest +python $WORKDIR/GenAIExamples/FinanceAgent/tests/test_redis_finance.py --port 6007 --test_option get ``` ### 3.3 Launch the multi-agent system From be5933ad852d4d8eab4db6a3ea003cb71f24d2ba Mon Sep 17 00:00:00 2001 From: "chen, suyue" Date: Fri, 25 Apr 2025 17:05:48 +0800 Subject: [PATCH 017/217] Update benchmark scripts (#1883) Signed-off-by: chensuyue Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- ChatQnA/benchmark_chatqna.yaml | 2 +- DocSum/benchmark_docsum.yaml | 10 +++------- README-deploy-benchmark.md | 8 +++++++- deploy.py | 17 ++++++++--------- deploy_and_benchmark.py | 2 +- requirements.txt | 2 +- 6 files changed, 21 insertions(+), 20 deletions(-) diff --git a/ChatQnA/benchmark_chatqna.yaml b/ChatQnA/benchmark_chatqna.yaml index 407d555ceb..ae74aa9b92 100644 --- a/ChatQnA/benchmark_chatqna.yaml +++ b/ChatQnA/benchmark_chatqna.yaml @@ -3,7 +3,7 @@ deploy: device: gaudi - version: 1.2.0 + version: 1.3.0 modelUseHostPath: /mnt/models HUGGINGFACEHUB_API_TOKEN: "" # mandatory node: [1, 2, 4, 8] diff --git a/DocSum/benchmark_docsum.yaml b/DocSum/benchmark_docsum.yaml index 66aab5ba61..908f2be3ed 100644 --- a/DocSum/benchmark_docsum.yaml +++ b/DocSum/benchmark_docsum.yaml @@ -3,7 +3,7 @@ deploy: device: gaudi - version: 1.2.0 + version: 1.3.0 modelUseHostPath: /mnt/models HUGGINGFACEHUB_API_TOKEN: "" # mandatory node: [1] @@ -20,14 +20,10 @@ deploy: memory_capacity: "8000Mi" replicaCount: [1] - teirerank: - enabled: False - llm: engine: vllm # or tgi model_id: "meta-llama/Llama-3.2-3B-Instruct" # mandatory - replicaCount: - without_teirerank: [1] # When teirerank.enabled is False + replicaCount: [1] resources: enabled: False cards_per_instance: 1 @@ -78,7 +74,7 @@ benchmark: # workload, all of the test cases will run for benchmark bench_target: ["docsumfixed"] # specify the bench_target for benchmark - dataset: "/home/sdp/upload.txt" # specify the absolute path to the dataset file + dataset: "/home/sdp/pubmed_10.txt" # specify the absolute path to the dataset file summary_type: "stuff" stream: True diff --git a/README-deploy-benchmark.md b/README-deploy-benchmark.md index 9f1a13f8ff..1b0f0ee530 100644 --- a/README-deploy-benchmark.md +++ b/README-deploy-benchmark.md @@ -1,4 +1,4 @@ -# ChatQnA Benchmarking +# Deploy and Benchmark ## Purpose @@ -8,6 +8,11 @@ We aim to run these benchmarks and share them with the OPEA community for three - To establish a baseline for validating optimization solutions across different implementations, providing clear guidance on which methods are most effective for your use case. - To inspire the community to build upon our benchmarks, allowing us to better quantify new solutions in conjunction with current leading LLMs, serving frameworks etc. +### Support Example List + +- ChatQnA +- DocSum + ## Table of Contents - [Prerequisites](#prerequisites) @@ -68,6 +73,7 @@ Before running the benchmarks, ensure you have: ```bash pip install -r requirements.txt ``` + notes: the benchmark need `opea-eval>=1.3`, if v1.3 is not released, please build the `opea-eval` from [source](https://github.com/opea-project/GenAIEval). ## Data Preparation diff --git a/deploy.py b/deploy.py index 6c7da7474e..e74700ca53 100644 --- a/deploy.py +++ b/deploy.py @@ -224,6 +224,7 @@ def generate_helm_values(example_type, deploy_config, chart_dir, action_type, no "modelUseHostPath": deploy_config.get("modelUseHostPath", ""), } } + os.environ["HF_TOKEN"] = deploy_config.get("HUGGINGFACEHUB_API_TOKEN", "") # Configure components values = configure_node_selectors(values, node_selector or {}, deploy_config) @@ -338,17 +339,15 @@ def get_hw_values_file(deploy_config, chart_dir): version = deploy_config.get("version", "1.1.0") if os.path.isdir(chart_dir): - # Determine which values file to use based on version - if version in ["1.0.0", "1.1.0"]: - hw_values_file = os.path.join(chart_dir, f"{device_type}-values.yaml") - else: - hw_values_file = os.path.join(chart_dir, f"{device_type}-{llm_engine}-values.yaml") - + hw_values_file = os.path.join(chart_dir, f"{device_type}-{llm_engine}-values.yaml") if not os.path.exists(hw_values_file): print(f"Warning: {hw_values_file} not found") - hw_values_file = None - else: - print(f"Device-specific values file found: {hw_values_file}") + hw_values_file = os.path.join(chart_dir, f"{device_type}-values.yaml") + if not os.path.exists(hw_values_file): + print(f"Warning: {hw_values_file} not found") + print(f"Error: Can not found a correct values file for {device_type} with {llm_engine}") + sys.exit(1) + print(f"Device-specific values file found: {hw_values_file}") else: print(f"Error: Could not find directory for {chart_dir}") hw_values_file = None diff --git a/deploy_and_benchmark.py b/deploy_and_benchmark.py index f210f215dc..bb729c7b48 100644 --- a/deploy_and_benchmark.py +++ b/deploy_and_benchmark.py @@ -54,7 +54,7 @@ def construct_deploy_config(deploy_config, target_node, batch_param_value=None, # First determine which llm replicaCount to use based on teirerank.enabled services = new_config.get("services", {}) - teirerank_enabled = services.get("teirerank", {}).get("enabled", True) + teirerank_enabled = services.get("teirerank", {}).get("enabled", False) # Process each service's configuration for service_name, service_config in services.items(): diff --git a/requirements.txt b/requirements.txt index 637668c3d1..f851f780cb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ kubernetes locust numpy -opea-eval>=1.2 +opea-eval>=1.3 prometheus_client pytest pyyaml From c546d96e9875bcae388c69b37a28395f8e313dd7 Mon Sep 17 00:00:00 2001 From: "chen, suyue" Date: Fri, 25 Apr 2025 23:00:36 +0800 Subject: [PATCH 018/217] downgrade tei version from 1.6 to 1.5, fix the chatqna perf regression (#1886) Signed-off-by: chensuyue --- .../kubernetes/intel/gaudi/README.md | 192 ------------ .../kubernetes/intel/gaudi/benchmark.sh | 102 ------- .../kubernetes/intel/gaudi/benchmark.yaml | 68 ----- .../kubernetes/intel/gaudi/deploy.py | 278 ------------------ .../intel/gaudi/generate_helm_values.py | 164 ----------- .../intel/cpu/aipc/compose.yaml | 4 +- .../intel/cpu/xeon/compose.yaml | 4 +- .../intel/cpu/xeon/compose_faqgen.yaml | 4 +- .../intel/cpu/xeon/compose_faqgen_tgi.yaml | 4 +- .../intel/cpu/xeon/compose_milvus.yaml | 4 +- .../intel/cpu/xeon/compose_pinecone.yaml | 4 +- .../intel/cpu/xeon/compose_remote.yaml | 4 +- .../intel/cpu/xeon/compose_tgi.yaml | 4 +- .../cpu/xeon/compose_without_rerank.yaml | 2 +- .../docker_compose/intel/hpu/gaudi/README.md | 16 +- .../intel/hpu/gaudi/compose.yaml | 2 +- .../intel/hpu/gaudi/compose_faqgen.yaml | 2 +- .../intel/hpu/gaudi/compose_faqgen_tgi.yaml | 2 +- .../intel/hpu/gaudi/compose_guardrails.yaml | 2 +- .../intel/hpu/gaudi/compose_tgi.yaml | 2 +- .../hpu/gaudi/compose_without_rerank.yaml | 2 +- .../hpu/gaudi/how_to_validate_service.md | 2 +- ChatQnA/tests/test_compose_faqgen_on_gaudi.sh | 2 - ChatQnA/tests/test_compose_faqgen_on_rocm.sh | 3 - ChatQnA/tests/test_compose_faqgen_on_xeon.sh | 1 - .../tests/test_compose_faqgen_tgi_on_gaudi.sh | 3 - .../tests/test_compose_faqgen_tgi_on_xeon.sh | 2 - .../tests/test_compose_guardrails_on_gaudi.sh | 3 - ChatQnA/tests/test_compose_milvus_on_xeon.sh | 2 - ChatQnA/tests/test_compose_on_gaudi.sh | 2 - ChatQnA/tests/test_compose_on_rocm.sh | 3 - ChatQnA/tests/test_compose_on_xeon.sh | 2 - .../tests/test_compose_pinecone_on_xeon.sh | 2 - ChatQnA/tests/test_compose_tgi_on_gaudi.sh | 4 - ChatQnA/tests/test_compose_tgi_on_xeon.sh | 3 - .../test_compose_without_rerank_on_gaudi.sh | 3 - .../test_compose_without_rerank_on_xeon.sh | 2 - 37 files changed, 32 insertions(+), 873 deletions(-) delete mode 100644 ChatQnA/benchmark/performance/kubernetes/intel/gaudi/README.md delete mode 100755 ChatQnA/benchmark/performance/kubernetes/intel/gaudi/benchmark.sh delete mode 100644 ChatQnA/benchmark/performance/kubernetes/intel/gaudi/benchmark.yaml delete mode 100644 ChatQnA/benchmark/performance/kubernetes/intel/gaudi/deploy.py delete mode 100644 ChatQnA/benchmark/performance/kubernetes/intel/gaudi/generate_helm_values.py diff --git a/ChatQnA/benchmark/performance/kubernetes/intel/gaudi/README.md b/ChatQnA/benchmark/performance/kubernetes/intel/gaudi/README.md deleted file mode 100644 index a59dce5a7a..0000000000 --- a/ChatQnA/benchmark/performance/kubernetes/intel/gaudi/README.md +++ /dev/null @@ -1,192 +0,0 @@ -# ChatQnA Benchmarking - -This folder contains a collection of Kubernetes manifest files for deploying the ChatQnA service across scalable nodes. It includes a comprehensive [benchmarking tool](https://github.com/opea-project/GenAIEval/blob/main/evals/benchmark/README.md) that enables throughput analysis to assess inference performance. - -By following this guide, you can run benchmarks on your deployment and share the results with the OPEA community. - -## Purpose - -We aim to run these benchmarks and share them with the OPEA community for three primary reasons: - -- To offer insights on inference throughput in real-world scenarios, helping you choose the best service or deployment for your needs. -- To establish a baseline for validating optimization solutions across different implementations, providing clear guidance on which methods are most effective for your use case. -- To inspire the community to build upon our benchmarks, allowing us to better quantify new solutions in conjunction with current leading llms, serving frameworks etc. - -## Metrics - -The benchmark will report the below metrics, including: - -- Number of Concurrent Requests -- End-to-End Latency: P50, P90, P99 (in milliseconds) -- End-to-End First Token Latency: P50, P90, P99 (in milliseconds) -- Average Next Token Latency (in milliseconds) -- Average Token Latency (in milliseconds) -- Requests Per Second (RPS) -- Output Tokens Per Second -- Input Tokens Per Second - -Results will be displayed in the terminal and saved as CSV file named `1_stats.csv` for easy export to spreadsheets. - -## Table of Contents - -- [Deployment](#deployment) - - [Prerequisites](#prerequisites) - - [Deployment Scenarios](#deployment-scenarios) - - [Case 1: Baseline Deployment with Rerank](#case-1-baseline-deployment-with-rerank) - - [Case 2: Baseline Deployment without Rerank](#case-2-baseline-deployment-without-rerank) - - [Case 3: Tuned Deployment with Rerank](#case-3-tuned-deployment-with-rerank) -- [Benchmark](#benchmark) - - [Test Configurations](#test-configurations) - - [Test Steps](#test-steps) - - [Upload Retrieval File](#upload-retrieval-file) - - [Run Benchmark Test](#run-benchmark-test) - - [Data collection](#data-collection) -- [Teardown](#teardown) - -## Deployment - -### Prerequisites - -- Kubernetes installation: Use [kubespray](https://github.com/opea-project/docs/blob/main/guide/installation/k8s_install/k8s_install_kubespray.md) or other official Kubernetes installation guides: -- (Optional) [Kubernetes set up guide on Intel Gaudi product](https://github.com/opea-project/GenAIInfra/blob/main/README.md#setup-kubernetes-cluster) -- Helm installation: Follow the [Helm documentation](https://helm.sh/docs/intro/install/#helm) to install Helm. -- Setup Hugging Face Token - - To access models and APIs from Hugging Face, set your token as environment variable. - ```bash - export HF_TOKEN="insert-your-huggingface-token-here" - ``` -- Prepare Shared Models (Optional but Strongly Recommended) - - Downloading models simultaneously to multiple nodes in your cluster can overload resources such as network bandwidth, memory and storage. To prevent resource exhaustion, it's recommended to preload the models in advance. - ```bash - pip install -U "huggingface_hub[cli]" - sudo mkdir -p /mnt/models - sudo chmod 777 /mnt/models - huggingface-cli download --cache-dir /mnt/models Intel/neural-chat-7b-v3-3 - export MODEL_DIR=/mnt/models - ``` - Once the models are downloaded, you can consider the following methods for sharing them across nodes: - - Persistent Volume Claim (PVC): This is the recommended approach for production setups. For more details on using PVC, refer to [PVC](https://github.com/opea-project/GenAIInfra/blob/main/helm-charts/README.md#using-persistent-volume). - - Local Host Path: For simpler testing, ensure that each node involved in the deployment follows the steps above to locally prepare the models. After preparing the models, use `--set global.modelUseHostPath=${MODELDIR}` in the deployment command. - -- Label Nodes - ```base - python deploy.py --add-label --num-nodes 2 - ``` - -### Deployment Scenarios - -The example below are based on a two-node setup. You can adjust the number of nodes by using the `--num-nodes` option. - -By default, these commands use the `default` namespace. To specify a different namespace, use the `--namespace` flag with deploy, uninstall, and kubernetes command. Additionally, update the `namespace` field in `benchmark.yaml` before running the benchmark test. - -For additional configuration options, run `python deploy.py --help` - -#### Case 1: Baseline Deployment with Rerank - -Deploy Command (with node number, Hugging Face token, model directory specified): -```bash -python deploy.py --hf-token $HF_TOKEN --model-dir $MODEL_DIR --num-nodes 2 --with-rerank -``` -Uninstall Command: -```bash -python deploy.py --uninstall -``` - -#### Case 2: Baseline Deployment without Rerank - -```bash -python deploy.py --hf-token $HFTOKEN --model-dir $MODELDIR --num-nodes 2 -``` -#### Case 3: Tuned Deployment with Rerank - -```bash -python deploy.py --hf-token $HFTOKEN --model-dir $MODELDIR --num-nodes 2 --with-rerank --tuned -``` - -## Benchmark - -### Test Configurations - -| Key | Value | -| -------- | ------- | -| Workload | ChatQnA | -| Tag | V1.1 | - -Models configuration -| Key | Value | -| ---------- | ------------------ | -| Embedding | BAAI/bge-base-en-v1.5 | -| Reranking | BAAI/bge-reranker-base | -| Inference | Intel/neural-chat-7b-v3-3 | - -Benchmark parameters -| Key | Value | -| ---------- | ------------------ | -| LLM input tokens | 1024 | -| LLM output tokens | 128 | - -Number of test requests for different scheduled node number: -| Node count | Concurrency | Query number | -| ----- | -------- | -------- | -| 1 | 128 | 640 | -| 2 | 256 | 1280 | -| 4 | 512 | 2560 | - -More detailed configuration can be found in configuration file [benchmark.yaml](./benchmark.yaml). - -### Test Steps - -Use `kubectl get pods` to confirm that all pods are `READY` before starting the test. - -#### Upload Retrieval File - -Before testing, upload a specified file to make sure the llm input have the token length of 1k. - -Get files: - -```bash -wget https://github.com/opea-project/GenAIEval/tree/main/evals/benchmark/data/upload_file.txt -``` - -Retrieve the `ClusterIP` of the `chatqna-data-prep` service. - -```bash -kubectl get svc -``` -Expected output: -```log -chatqna-data-prep ClusterIP xx.xx.xx.xx 6007/TCP 51m -``` - -Use the following `cURL` command to upload file: - -```bash -cd GenAIEval/evals/benchmark/data -curl -X POST "http://${cluster_ip}:6007/v1/dataprep/ingest" \ - -H "Content-Type: multipart/form-data" \ - -F "chunk_size=3800" \ - -F "files=@./upload_file.txt" -``` - -#### Run Benchmark Test - -Run the benchmark test using: -```bash -bash benchmark.sh -n 2 -``` -The `-n` argument specifies the number of test nodes. Required dependencies will be automatically installed when running the benchmark for the first time. - -#### Data collection - -All the test results will come to the folder `GenAIEval/evals/benchmark/benchmark_output`. - -## Teardown - -After completing the benchmark, use the following command to clean up the environment: - -Remove Node Labels: -```bash -python deploy.py --delete-label -``` diff --git a/ChatQnA/benchmark/performance/kubernetes/intel/gaudi/benchmark.sh b/ChatQnA/benchmark/performance/kubernetes/intel/gaudi/benchmark.sh deleted file mode 100755 index 9c3a9f84ed..0000000000 --- a/ChatQnA/benchmark/performance/kubernetes/intel/gaudi/benchmark.sh +++ /dev/null @@ -1,102 +0,0 @@ -#!/bin/bash - -# Copyright (C) 2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -deployment_type="k8s" -node_number=1 -service_port=8888 -query_per_node=640 - -benchmark_tool_path="$(pwd)/GenAIEval" - -usage() { - echo "Usage: $0 [-d deployment_type] [-n node_number] [-i service_ip] [-p service_port]" - echo " -d deployment_type ChatQnA deployment type, select between k8s and docker (default: k8s)" - echo " -n node_number Test node number, required only for k8s deployment_type, (default: 1)" - echo " -i service_ip chatqna service ip, required only for docker deployment_type" - echo " -p service_port chatqna service port, required only for docker deployment_type, (default: 8888)" - exit 1 -} - -while getopts ":d:n:i:p:" opt; do - case ${opt} in - d ) - deployment_type=$OPTARG - ;; - n ) - node_number=$OPTARG - ;; - i ) - service_ip=$OPTARG - ;; - p ) - service_port=$OPTARG - ;; - \? ) - echo "Invalid option: -$OPTARG" 1>&2 - usage - ;; - : ) - echo "Invalid option: -$OPTARG requires an argument" 1>&2 - usage - ;; - esac -done - -if [[ "$deployment_type" == "docker" && -z "$service_ip" ]]; then - echo "Error: service_ip is required for docker deployment_type" 1>&2 - usage -fi - -if [[ "$deployment_type" == "k8s" && ( -n "$service_ip" || -n "$service_port" ) ]]; then - echo "Warning: service_ip and service_port are ignored for k8s deployment_type" 1>&2 -fi - -function main() { - if [[ ! -d ${benchmark_tool_path} ]]; then - echo "Benchmark tool not found, setting up..." - setup_env - fi - run_benchmark -} - -function setup_env() { - git clone https://github.com/opea-project/GenAIEval.git - pushd ${benchmark_tool_path} - python3 -m venv stress_venv - source stress_venv/bin/activate - pip install -r requirements.txt - popd -} - -function run_benchmark() { - source ${benchmark_tool_path}/stress_venv/bin/activate - export DEPLOYMENT_TYPE=${deployment_type} - export SERVICE_IP=${service_ip:-"None"} - export SERVICE_PORT=${service_port:-"None"} - export LOAD_SHAPE=${load_shape:-"constant"} - export CONCURRENT_LEVEL=${concurrent_level:-5} - export ARRIVAL_RATE=${arrival_rate:-1.0} - if [[ -z $USER_QUERIES ]]; then - user_query=$((query_per_node*node_number)) - export USER_QUERIES="[${user_query}, ${user_query}, ${user_query}, ${user_query}]" - echo "USER_QUERIES not configured, setting to: ${USER_QUERIES}." - fi - export WARMUP=$(echo $USER_QUERIES | sed -e 's/[][]//g' -e 's/,.*//') - if [[ -z $WARMUP ]]; then export WARMUP=0; fi - if [[ -z $TEST_OUTPUT_DIR ]]; then - if [[ $DEPLOYMENT_TYPE == "k8s" ]]; then - export TEST_OUTPUT_DIR="${benchmark_tool_path}/evals/benchmark/benchmark_output/node_${node_number}" - else - export TEST_OUTPUT_DIR="${benchmark_tool_path}/evals/benchmark/benchmark_output/docker" - fi - echo "TEST_OUTPUT_DIR not configured, setting to: ${TEST_OUTPUT_DIR}." - fi - - envsubst < ./benchmark.yaml > ${benchmark_tool_path}/evals/benchmark/benchmark.yaml - cd ${benchmark_tool_path}/evals/benchmark - python benchmark.py -} - -main diff --git a/ChatQnA/benchmark/performance/kubernetes/intel/gaudi/benchmark.yaml b/ChatQnA/benchmark/performance/kubernetes/intel/gaudi/benchmark.yaml deleted file mode 100644 index 07945c2208..0000000000 --- a/ChatQnA/benchmark/performance/kubernetes/intel/gaudi/benchmark.yaml +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright (C) 2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -test_suite_config: # Overall configuration settings for the test suite - examples: ["chatqna"] # The specific test cases being tested, e.g., chatqna, codegen, codetrans, faqgen, audioqna, visualqna - deployment_type: ${DEPLOYMENT_TYPE} # Default is "k8s", can also be "docker" - service_ip: ${SERVICE_IP} # Leave as None for k8s, specify for Docker - service_port: ${SERVICE_PORT} # Leave as None for k8s, specify for Docker - warm_ups: ${WARMUP} # Number of test requests for warm-up - run_time: 60m # The max total run time for the test suite - seed: # The seed for all RNGs - user_queries: ${USER_QUERIES} # Number of test requests at each concurrency level - query_timeout: 120 # Number of seconds to wait for a simulated user to complete any executing task before exiting. 120 sec by defeult. - random_prompt: false # Use random prompts if true, fixed prompts if false - collect_service_metric: false # Collect service metrics if true, do not collect service metrics if false - data_visualization: false # Generate data visualization if true, do not generate data visualization if false - llm_model: "Intel/neural-chat-7b-v3-3" # The LLM model used for the test - test_output_dir: "${TEST_OUTPUT_DIR}" # The directory to store the test output - load_shape: # Tenant concurrency pattern - name: ${LOAD_SHAPE} # poisson or constant(locust default load shape) - params: # Loadshape-specific parameters - constant: # Constant load shape specific parameters, activate only if load_shape.name is constant - concurrent_level: ${CONCURRENT_LEVEL} # If user_queries is specified, concurrent_level is target number of requests per user. If not, it is the number of simulated users - poisson: # Poisson load shape specific parameters, activate only if load_shape.name is poisson - arrival_rate: ${ARRIVAL_RATE} # Request arrival rate - -test_cases: - chatqna: - embedding: - run_test: false - service_name: "chatqna-embedding-usvc" # Replace with your service name - embedserve: - run_test: false - service_name: "chatqna-tei" # Replace with your service name - retriever: - run_test: false - service_name: "chatqna-retriever-usvc" # Replace with your service name - parameters: - search_type: "similarity" - k: 1 - fetch_k: 20 - lambda_mult: 0.5 - score_threshold: 0.2 - reranking: - run_test: false - service_name: "chatqna-reranking-usvc" # Replace with your service name - parameters: - top_n: 1 - rerankserve: - run_test: false - service_name: "chatqna-teirerank" # Replace with your service name - llm: - run_test: false - service_name: "chatqna-llm-uservice" # Replace with your service name - parameters: - max_tokens: 128 - temperature: 0.01 - top_k: 10 - top_p: 0.95 - repetition_penalty: 1.03 - stream: true - llmserve: - run_test: false - service_name: "chatqna-tgi" # Replace with your service name - e2e: - run_test: true - service_name: "chatqna" # Replace with your service name - k: 1 diff --git a/ChatQnA/benchmark/performance/kubernetes/intel/gaudi/deploy.py b/ChatQnA/benchmark/performance/kubernetes/intel/gaudi/deploy.py deleted file mode 100644 index 5a81918d77..0000000000 --- a/ChatQnA/benchmark/performance/kubernetes/intel/gaudi/deploy.py +++ /dev/null @@ -1,278 +0,0 @@ -# Copyright (C) 2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import argparse -import glob -import json -import os -import shutil -import subprocess -import sys - -from generate_helm_values import generate_helm_values - - -def run_kubectl_command(command): - """Run a kubectl command and return the output.""" - try: - result = subprocess.run(command, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) - return result.stdout - except subprocess.CalledProcessError as e: - print(f"Error running command: {command}\n{e.stderr}") - exit(1) - - -def get_all_nodes(): - """Get the list of all nodes in the Kubernetes cluster.""" - command = ["kubectl", "get", "nodes", "-o", "json"] - output = run_kubectl_command(command) - nodes = json.loads(output) - return [node["metadata"]["name"] for node in nodes["items"]] - - -def add_label_to_node(node_name, label): - """Add a label to the specified node.""" - command = ["kubectl", "label", "node", node_name, label, "--overwrite"] - print(f"Labeling node {node_name} with {label}...") - run_kubectl_command(command) - print(f"Label {label} added to node {node_name} successfully.") - - -def add_labels_to_nodes(node_count=None, label=None, node_names=None): - """Add a label to the specified number of nodes or to specified nodes.""" - - if node_names: - # Add label to the specified nodes - for node_name in node_names: - add_label_to_node(node_name, label) - else: - # Fetch the node list and label the specified number of nodes - all_nodes = get_all_nodes() - if node_count is None or node_count > len(all_nodes): - print(f"Error: Node count exceeds the number of available nodes ({len(all_nodes)} available).") - sys.exit(1) - - selected_nodes = all_nodes[:node_count] - for node_name in selected_nodes: - add_label_to_node(node_name, label) - - -def clear_labels_from_nodes(label, node_names=None): - """Clear the specified label from specific nodes if provided, otherwise from all nodes.""" - label_key = label.split("=")[0] # Extract key from 'key=value' format - - # If specific nodes are provided, use them; otherwise, get all nodes - nodes_to_clear = node_names if node_names else get_all_nodes() - - for node_name in nodes_to_clear: - # Check if the node has the label by inspecting its metadata - command = ["kubectl", "get", "node", node_name, "-o", "json"] - node_info = run_kubectl_command(command) - node_metadata = json.loads(node_info) - - # Check if the label exists on this node - labels = node_metadata["metadata"].get("labels", {}) - if label_key in labels: - # Remove the label from the node - command = ["kubectl", "label", "node", node_name, f"{label_key}-"] - print(f"Removing label {label_key} from node {node_name}...") - run_kubectl_command(command) - print(f"Label {label_key} removed from node {node_name} successfully.") - else: - print(f"Label {label_key} not found on node {node_name}, skipping.") - - -def install_helm_release(release_name, chart_name, namespace, values_file, device_type): - """Deploy a Helm release with a specified name and chart. - - Parameters: - - release_name: The name of the Helm release. - - chart_name: The Helm chart name or path, e.g., "opea/chatqna". - - namespace: The Kubernetes namespace for deployment. - - values_file: The user values file for deployment. - - device_type: The device type (e.g., "gaudi") for specific configurations (optional). - """ - - # Check if the namespace exists; if not, create it - try: - # Check if the namespace exists - command = ["kubectl", "get", "namespace", namespace] - subprocess.run(command, check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) - except subprocess.CalledProcessError: - # Namespace does not exist, create it - print(f"Namespace '{namespace}' does not exist. Creating it...") - command = ["kubectl", "create", "namespace", namespace] - subprocess.run(command, check=True) - print(f"Namespace '{namespace}' created successfully.") - - # Handle gaudi-specific values file if device_type is "gaudi" - hw_values_file = None - untar_dir = None - if device_type == "gaudi": - print("Device type is gaudi. Pulling Helm chart to get gaudi-values.yaml...") - - # Combine chart_name with fixed prefix - chart_pull_url = f"oci://ghcr.io/opea-project/charts/{chart_name}" - - # Pull and untar the chart - subprocess.run(["helm", "pull", chart_pull_url, "--untar"], check=True) - - # Find the untarred directory - untar_dirs = glob.glob(f"{chart_name}*") - if untar_dirs: - untar_dir = untar_dirs[0] - hw_values_file = os.path.join(untar_dir, "gaudi-values.yaml") - print("gaudi-values.yaml pulled and ready for use.") - else: - print(f"Error: Could not find untarred directory for {chart_name}") - return - - # Prepare the Helm install command - command = ["helm", "install", release_name, chart_name, "--namespace", namespace] - - # Append additional values file for gaudi if it exists - if hw_values_file: - command.extend(["-f", hw_values_file]) - - # Append the main values file - command.extend(["-f", values_file]) - - # Execute the Helm install command - try: - print(f"Running command: {' '.join(command)}") # Print full command for debugging - subprocess.run(command, check=True) - print("Deployment initiated successfully.") - except subprocess.CalledProcessError as e: - print(f"Error occurred while deploying Helm release: {e}") - - # Cleanup: Remove the untarred directory - if untar_dir and os.path.isdir(untar_dir): - print(f"Removing temporary directory: {untar_dir}") - shutil.rmtree(untar_dir) - print("Temporary directory removed successfully.") - - -def uninstall_helm_release(release_name, namespace=None): - """Uninstall a Helm release and clean up resources, optionally delete the namespace if not 'default'.""" - # Default to 'default' namespace if none is specified - if not namespace: - namespace = "default" - - try: - # Uninstall the Helm release - command = ["helm", "uninstall", release_name, "--namespace", namespace] - print(f"Uninstalling Helm release {release_name} in namespace {namespace}...") - run_kubectl_command(command) - print(f"Helm release {release_name} uninstalled successfully.") - - # If the namespace is specified and not 'default', delete it - if namespace != "default": - print(f"Deleting namespace {namespace}...") - delete_namespace_command = ["kubectl", "delete", "namespace", namespace] - run_kubectl_command(delete_namespace_command) - print(f"Namespace {namespace} deleted successfully.") - else: - print("Namespace is 'default', skipping deletion.") - - except subprocess.CalledProcessError as e: - print(f"Error occurred while uninstalling Helm release or deleting namespace: {e}") - - -def main(): - parser = argparse.ArgumentParser(description="Manage Helm Deployment.") - parser.add_argument( - "--release-name", - type=str, - default="chatqna", - help="The Helm release name created during deployment (default: chatqna).", - ) - parser.add_argument( - "--chart-name", - type=str, - default="chatqna", - help="The chart name to deploy, composed of repo name and chart name (default: chatqna).", - ) - parser.add_argument("--namespace", default="default", help="Kubernetes namespace (default: default).") - parser.add_argument("--hf-token", help="Hugging Face API token.") - parser.add_argument( - "--model-dir", help="Model directory, mounted as volumes for service access to pre-downloaded models" - ) - parser.add_argument("--user-values", help="Path to a user-specified values.yaml file.") - parser.add_argument( - "--create-values-only", action="store_true", help="Only create the values.yaml file without deploying." - ) - parser.add_argument("--uninstall", action="store_true", help="Uninstall the Helm release.") - parser.add_argument("--num-nodes", type=int, default=1, help="Number of nodes to use (default: 1).") - parser.add_argument("--node-names", nargs="*", help="Optional specific node names to label.") - parser.add_argument("--add-label", action="store_true", help="Add label to specified nodes if this flag is set.") - parser.add_argument( - "--delete-label", action="store_true", help="Delete label from specified nodes if this flag is set." - ) - parser.add_argument( - "--label", default="node-type=opea-benchmark", help="Label to add/delete (default: node-type=opea-benchmark)." - ) - parser.add_argument("--with-rerank", action="store_true", help="Include rerank service in the deployment.") - parser.add_argument( - "--tuned", - action="store_true", - help="Modify resources for services and change extraCmdArgs when creating values.yaml.", - ) - parser.add_argument( - "--device-type", - type=str, - choices=["cpu", "gaudi"], - default="gaudi", - help="Specify the device type for deployment (choices: 'cpu', 'gaudi'; default: gaudi).", - ) - - args = parser.parse_args() - - # Adjust num-nodes based on node-names if specified - if args.node_names: - num_node_names = len(args.node_names) - if args.num_nodes != 1 and args.num_nodes != num_node_names: - parser.error("--num-nodes must match the number of --node-names if both are specified.") - else: - args.num_nodes = num_node_names - - # Node labeling management - if args.add_label: - add_labels_to_nodes(args.num_nodes, args.label, args.node_names) - return - elif args.delete_label: - clear_labels_from_nodes(args.label, args.node_names) - return - - # Uninstall Helm release if specified - if args.uninstall: - uninstall_helm_release(args.release_name, args.namespace) - return - - # Prepare values.yaml if not uninstalling - if args.user_values: - values_file_path = args.user_values - else: - if not args.hf_token: - parser.error("--hf-token are required") - node_selector = {args.label.split("=")[0]: args.label.split("=")[1]} - values_file_path = generate_helm_values( - with_rerank=args.with_rerank, - num_nodes=args.num_nodes, - hf_token=args.hf_token, - model_dir=args.model_dir, - node_selector=node_selector, - tune=args.tuned, - ) - - # Read back the generated YAML file for verification - with open(values_file_path, "r") as file: - print("Generated YAML contents:") - print(file.read()) - - # Deploy unless --create-values-only is specified - if not args.create_values_only: - install_helm_release(args.release_name, args.chart_name, args.namespace, values_file_path, args.device_type) - - -if __name__ == "__main__": - main() diff --git a/ChatQnA/benchmark/performance/kubernetes/intel/gaudi/generate_helm_values.py b/ChatQnA/benchmark/performance/kubernetes/intel/gaudi/generate_helm_values.py deleted file mode 100644 index ff15252e3c..0000000000 --- a/ChatQnA/benchmark/performance/kubernetes/intel/gaudi/generate_helm_values.py +++ /dev/null @@ -1,164 +0,0 @@ -# Copyright (C) 2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -import os - -import yaml - - -def generate_helm_values(with_rerank, num_nodes, hf_token, model_dir, node_selector=None, tune=False): - """Create a values.yaml file based on the provided configuration.""" - - # Log the received parameters - print("Received parameters:") - print(f"with_rerank: {with_rerank}") - print(f"num_nodes: {num_nodes}") - print(f"node_selector: {node_selector}") # Log the node_selector - print(f"tune: {tune}") - - if node_selector is None: - node_selector = {} - - # Construct the base values dictionary - values = { - "tei": {"nodeSelector": {key: value for key, value in node_selector.items()}}, - "tgi": {"nodeSelector": {key: value for key, value in node_selector.items()}}, - "data-prep": {"nodeSelector": {key: value for key, value in node_selector.items()}}, - "redis-vector-db": {"nodeSelector": {key: value for key, value in node_selector.items()}}, - "retriever-usvc": {"nodeSelector": {key: value for key, value in node_selector.items()}}, - "chatqna-ui": {"nodeSelector": {key: value for key, value in node_selector.items()}}, - "global": { - "HUGGINGFACEHUB_API_TOKEN": hf_token, # Use passed token - "modelUseHostPath": model_dir, # Use passed model directory - }, - "nodeSelector": {key: value for key, value in node_selector.items()}, - } - - if with_rerank: - values["teirerank"] = {"nodeSelector": {key: value for key, value in node_selector.items()}} - else: - values["image"] = {"repository": "opea/chatqna-without-rerank"} - values["teirerank"] = {"enabled": False} - - default_replicas = [ - {"name": "chatqna", "replicaCount": 2}, - {"name": "tei", "replicaCount": 1}, - {"name": "teirerank", "replicaCount": 1} if with_rerank else None, - {"name": "tgi", "replicaCount": 7 if with_rerank else 8}, - {"name": "data-prep", "replicaCount": 1}, - {"name": "redis-vector-db", "replicaCount": 1}, - {"name": "retriever-usvc", "replicaCount": 2}, - ] - - if num_nodes > 1: - # Scale replicas based on number of nodes - replicas = [ - {"name": "chatqna", "replicaCount": 1 * num_nodes}, - {"name": "tei", "replicaCount": 1 * num_nodes}, - {"name": "teirerank", "replicaCount": 1} if with_rerank else None, - {"name": "tgi", "replicaCount": (8 * num_nodes - 1) if with_rerank else 8 * num_nodes}, - {"name": "data-prep", "replicaCount": 1}, - {"name": "redis-vector-db", "replicaCount": 1}, - {"name": "retriever-usvc", "replicaCount": 1 * num_nodes}, - ] - else: - replicas = default_replicas - - # Remove None values for rerank disabled - replicas = [r for r in replicas if r] - - # Update values.yaml with replicas - for replica in replicas: - service_name = replica["name"] - if service_name == "chatqna": - values["replicaCount"] = replica["replicaCount"] - print(replica["replicaCount"]) - elif service_name in values: - values[service_name]["replicaCount"] = replica["replicaCount"] - - # Prepare resource configurations based on tuning - resources = [] - if tune: - resources = [ - { - "name": "chatqna", - "resources": { - "limits": {"cpu": "16", "memory": "8000Mi"}, - "requests": {"cpu": "16", "memory": "8000Mi"}, - }, - }, - { - "name": "tei", - "resources": { - "limits": {"cpu": "80", "memory": "20000Mi"}, - "requests": {"cpu": "80", "memory": "20000Mi"}, - }, - }, - {"name": "teirerank", "resources": {"limits": {"habana.ai/gaudi": 1}}} if with_rerank else None, - {"name": "tgi", "resources": {"limits": {"habana.ai/gaudi": 1}}}, - {"name": "retriever-usvc", "resources": {"requests": {"cpu": "8", "memory": "8000Mi"}}}, - ] - - # Filter out any None values directly as part of initialization - resources = [r for r in resources if r is not None] - - # Add resources for each service if tuning - for resource in resources: - service_name = resource["name"] - if service_name == "chatqna": - values["resources"] = resource["resources"] - elif service_name in values: - values[service_name]["resources"] = resource["resources"] - - # Add extraCmdArgs for tgi service with default values - if "tgi" in values: - values["tgi"]["extraCmdArgs"] = [ - "--max-input-length", - "1280", - "--max-total-tokens", - "2048", - "--max-batch-total-tokens", - "65536", - "--max-batch-prefill-tokens", - "4096", - ] - - yaml_string = yaml.dump(values, default_flow_style=False) - - # Determine the mode based on the 'tune' parameter - mode = "tuned" if tune else "oob" - - # Determine the filename based on 'with_rerank' and 'num_nodes' - if with_rerank: - filename = f"{mode}-{num_nodes}-gaudi-with-rerank-values.yaml" - else: - filename = f"{mode}-{num_nodes}-gaudi-without-rerank-values.yaml" - - # Write the YAML data to the file - with open(filename, "w") as file: - file.write(yaml_string) - - # Get the current working directory and construct the file path - current_dir = os.getcwd() - filepath = os.path.join(current_dir, filename) - - print(f"YAML file {filepath} has been generated.") - return filepath # Optionally return the file path - - -# Main execution for standalone use of create_values_yaml -if __name__ == "__main__": - # Example values for standalone execution - with_rerank = True - num_nodes = 2 - hftoken = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" - modeldir = "/mnt/model" - node_selector = {"node-type": "opea-benchmark"} - tune = True - - filename = generate_helm_values(with_rerank, num_nodes, hftoken, modeldir, node_selector, tune) - - # Read back the generated YAML file for verification - with open(filename, "r") as file: - print("Generated YAML contents:") - print(file.read()) diff --git a/ChatQnA/docker_compose/intel/cpu/aipc/compose.yaml b/ChatQnA/docker_compose/intel/cpu/aipc/compose.yaml index 6e3578e97a..9035642c5d 100644 --- a/ChatQnA/docker_compose/intel/cpu/aipc/compose.yaml +++ b/ChatQnA/docker_compose/intel/cpu/aipc/compose.yaml @@ -32,7 +32,7 @@ services: retries: 50 restart: unless-stopped tei-embedding-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-embedding-server ports: - "6006:80" @@ -65,7 +65,7 @@ services: RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tei-reranking-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-reranking-server ports: - "8808:80" diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose.yaml index 12b1799968..1e5fef6d40 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose.yaml @@ -39,7 +39,7 @@ services: retries: 50 restart: unless-stopped tei-embedding-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-embedding-server ports: - "6006:80" @@ -72,7 +72,7 @@ services: RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tei-reranking-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-reranking-server ports: - "8808:80" diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_faqgen.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_faqgen.yaml index 06232276f6..eb31dfb1fa 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_faqgen.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_faqgen.yaml @@ -32,7 +32,7 @@ services: retries: 50 restart: unless-stopped tei-embedding-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-embedding-server ports: - "6006:80" @@ -65,7 +65,7 @@ services: RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tei-reranking-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-reranking-server ports: - "8808:80" diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_faqgen_tgi.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_faqgen_tgi.yaml index 1cd3f4fe71..a66be60327 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_faqgen_tgi.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_faqgen_tgi.yaml @@ -32,7 +32,7 @@ services: retries: 50 restart: unless-stopped tei-embedding-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-embedding-server ports: - "6006:80" @@ -65,7 +65,7 @@ services: RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tei-reranking-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-reranking-server ports: - "8808:80" diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml index d7a5b687d7..eb81c3ec2e 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml @@ -113,7 +113,7 @@ services: restart: unless-stopped tei-embedding-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-embedding-server ports: - "6006:80" @@ -127,7 +127,7 @@ services: command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate tei-reranking-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-reranking-server ports: - "8808:80" diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_pinecone.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_pinecone.yaml index ffd397209f..8a2af3c117 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_pinecone.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_pinecone.yaml @@ -29,7 +29,7 @@ services: retries: 50 restart: unless-stopped tei-embedding-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-embedding-server ports: - "6006:80" @@ -60,7 +60,7 @@ services: RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_PINECONE" restart: unless-stopped tei-reranking-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-reranking-server ports: - "8808:80" diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_remote.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_remote.yaml index 665f97d449..2acc51bbe4 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_remote.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_remote.yaml @@ -33,7 +33,7 @@ services: TEI_ENDPOINT: http://tei-embedding-service:80 HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} tei-embedding-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-embedding-server ports: - "6006:80" @@ -66,7 +66,7 @@ services: RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tei-reranking-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-reranking-server ports: - "8808:80" diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml index 7e138435c7..434ae34eac 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml @@ -32,7 +32,7 @@ services: retries: 50 restart: unless-stopped tei-embedding-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-embedding-server ports: - "6006:80" @@ -65,7 +65,7 @@ services: RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tei-reranking-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-reranking-server ports: - "8808:80" diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml index dea6826269..b813852c74 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml @@ -32,7 +32,7 @@ services: retries: 50 restart: unless-stopped tei-embedding-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-embedding-server ports: - "6006:80" diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/README.md b/ChatQnA/docker_compose/intel/hpu/gaudi/README.md index ec9d07a184..6dea162563 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/README.md +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/README.md @@ -95,7 +95,7 @@ d560c232b120 opea/retriever:latest a1d7ca2d3787 ghcr.io/huggingface/tei-gaudi:1.5.0 "text-embeddings-rou…" 2 minutes ago Up 2 minutes 0.0.0.0:8808->80/tcp, [::]:8808->80/tcp tei-reranking-gaudi-server 9a9f3fd4fd4c opea/vllm-gaudi:latest "python3 -m vllm.ent…" 2 minutes ago Exited (1) 2 minutes ago vllm-gaudi-server 1ab9bbdf5182 redis/redis-stack:7.2.0-v9 "/entrypoint.sh" 2 minutes ago Up 2 minutes 0.0.0.0:6379->6379/tcp, :::6379->6379/tcp, 0.0.0.0:8001->8001/tcp, :::8001->8001/tcp redis-vector-db -9ee0789d819e ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 "text-embeddings-rou…" 2 minutes ago Up 2 minutes 0.0.0.0:8090->80/tcp, [::]:8090->80/tcp tei-embedding-gaudi-server +9ee0789d819e ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 "text-embeddings-rou…" 2 minutes ago Up 2 minutes 0.0.0.0:8090->80/tcp, [::]:8090->80/tcp tei-embedding-gaudi-server ``` ### Test the Pipeline @@ -148,7 +148,7 @@ The default deployment utilizes Gaudi devices primarily for the `vllm-service`, | ---------------------------- | ----------------------------------------------------- | ------------ | | redis-vector-db | redis/redis-stack:7.2.0-v9 | No | | dataprep-redis-service | opea/dataprep:latest | No | -| tei-embedding-service | ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 | No | +| tei-embedding-service | ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 | No | | retriever | opea/retriever:latest | No | | tei-reranking-service | ghcr.io/huggingface/tei-gaudi:1.5.0 | 1 card | | vllm-service | opea/vllm-gaudi:latest | Configurable | @@ -164,7 +164,7 @@ The TGI (Text Generation Inference) deployment and the default deployment differ | ---------------------------- | ----------------------------------------------------- | -------------- | | redis-vector-db | redis/redis-stack:7.2.0-v9 | No | | dataprep-redis-service | opea/dataprep:latest | No | -| tei-embedding-service | ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 | No | +| tei-embedding-service | ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 | No | | retriever | opea/retriever:latest | No | | tei-reranking-service | ghcr.io/huggingface/tei-gaudi:1.5.0 | 1 card | | **tgi-service** | ghcr.io/huggingface/tgi-gaudi:2.3.1 | Configurable | @@ -184,7 +184,7 @@ The TGI (Text Generation Inference) deployment and the default deployment differ | ---------------------------- | ----------------------------------------------------- | ------------ | | redis-vector-db | redis/redis-stack:7.2.0-v9 | No | | dataprep-redis-service | opea/dataprep:latest | No | -| tei-embedding-service | ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 | No | +| tei-embedding-service | ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 | No | | retriever | opea/retriever:latest | No | | tei-reranking-service | ghcr.io/huggingface/tei-gaudi:1.5.0 | 1 card | | vllm-service | opea/vllm-gaudi:latest | Configurable | @@ -203,7 +203,7 @@ The _compose_without_rerank.yaml_ Docker Compose file is distinct from the defau | ---------------------------- | ----------------------------------------------------- | -------------- | | redis-vector-db | redis/redis-stack:7.2.0-v9 | No | | dataprep-redis-service | opea/dataprep:latest | No | -| tei-embedding-service | ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 | No | +| tei-embedding-service | ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 | No | | retriever | opea/retriever:latest | No | | vllm-service | opea/vllm-gaudi:latest | Configurable | | chatqna-gaudi-backend-server | opea/chatqna:latest | No | @@ -222,7 +222,7 @@ The _compose_guardrails.yaml_ Docker Compose file introduces enhancements over t | dataprep-redis-service | opea/dataprep:latest | No | No | | _vllm-guardrails-service_ | opea/vllm-gaudi:latest | 1 card | Yes | | _guardrails_ | opea/guardrails:latest | No | No | -| tei-embedding-service | ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 | No | No | +| tei-embedding-service | ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 | No | No | | retriever | opea/retriever:latest | No | No | | tei-reranking-service | ghcr.io/huggingface/tei-gaudi:1.5.0 | 1 card | No | | vllm-service | opea/vllm-gaudi:latest | Configurable | Yes | @@ -258,7 +258,7 @@ The table provides a comprehensive overview of the ChatQnA services utilized acr | ---------------------------- | ----------------------------------------------------- | -------- | -------------------------------------------------------------------------------------------------- | | redis-vector-db | redis/redis-stack:7.2.0-v9 | No | Acts as a Redis database for storing and managing data. | | dataprep-redis-service | opea/dataprep:latest | No | Prepares data and interacts with the Redis database. | -| tei-embedding-service | ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 | No | Provides text embedding services, often using Hugging Face models. | +| tei-embedding-service | ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 | No | Provides text embedding services, often using Hugging Face models. | | retriever | opea/retriever:latest | No | Retrieves data from the Redis database and interacts with embedding services. | | tei-reranking-service | ghcr.io/huggingface/tei-gaudi:1.5.0 | Yes | Reranks text embeddings, typically using Gaudi hardware for enhanced performance. | | vllm-service | opea/vllm-gaudi:latest | No | Handles large language model (LLM) tasks, utilizing Gaudi hardware. | @@ -284,7 +284,7 @@ ChatQnA now supports running the latest DeepSeek models, including [deepseek-ai/ ### tei-embedding-service & tei-reranking-service -The `ghcr.io/huggingface/text-embeddings-inference:cpu-1.6` image supporting `tei-embedding-service` and `tei-reranking-service` depends on the `EMBEDDING_MODEL_ID` or `RERANK_MODEL_ID` environment variables respectively to specify the embedding model and reranking model used for converting text into vector representations and rankings. This choice impacts the quality and relevance of the embeddings rerankings for various applications. Unlike the `vllm-service`, the `tei-embedding-service` and `tei-reranking-service` each typically acquires only one Gaudi device and does not use the `NUM_CARDS` parameter; embedding and reranking tasks generally do not require extensive parallel processing and one Gaudi per service is appropriate. The list of [supported embedding and reranking models](https://github.com/huggingface/tei-gaudi?tab=readme-ov-file#supported-models) can be found at the [huggingface/tei-gaudi](https://github.com/huggingface/tei-gaudi?tab=readme-ov-file#supported-models) website. +The `ghcr.io/huggingface/text-embeddings-inference:cpu-1.5` image supporting `tei-embedding-service` and `tei-reranking-service` depends on the `EMBEDDING_MODEL_ID` or `RERANK_MODEL_ID` environment variables respectively to specify the embedding model and reranking model used for converting text into vector representations and rankings. This choice impacts the quality and relevance of the embeddings rerankings for various applications. Unlike the `vllm-service`, the `tei-embedding-service` and `tei-reranking-service` each typically acquires only one Gaudi device and does not use the `NUM_CARDS` parameter; embedding and reranking tasks generally do not require extensive parallel processing and one Gaudi per service is appropriate. The list of [supported embedding and reranking models](https://github.com/huggingface/tei-gaudi?tab=readme-ov-file#supported-models) can be found at the [huggingface/tei-gaudi](https://github.com/huggingface/tei-gaudi?tab=readme-ov-file#supported-models) website. ### tgi-guardrails-service diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/compose.yaml b/ChatQnA/docker_compose/intel/hpu/gaudi/compose.yaml index b8b96ad048..49d7ff99a5 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/compose.yaml @@ -39,7 +39,7 @@ services: retries: 50 restart: unless-stopped tei-embedding-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-embedding-gaudi-server ports: - "8090:80" diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_faqgen.yaml b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_faqgen.yaml index f84c955b36..951956be8f 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_faqgen.yaml +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_faqgen.yaml @@ -33,7 +33,7 @@ services: retries: 50 restart: unless-stopped tei-embedding-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-embedding-gaudi-server ports: - "8090:80" diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_faqgen_tgi.yaml b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_faqgen_tgi.yaml index e54e616c4d..8c2b0d1d54 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_faqgen_tgi.yaml +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_faqgen_tgi.yaml @@ -33,7 +33,7 @@ services: retries: 50 restart: unless-stopped tei-embedding-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-embedding-gaudi-server ports: - "8090:80" diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_guardrails.yaml b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_guardrails.yaml index cc0ea6f5b3..7f44764413 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_guardrails.yaml +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_guardrails.yaml @@ -76,7 +76,7 @@ services: HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} restart: unless-stopped tei-embedding-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-embedding-gaudi-server ports: - "8090:80" diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml index 5be2c4387f..02d99098b6 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml @@ -32,7 +32,7 @@ services: retries: 50 restart: unless-stopped tei-embedding-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-embedding-gaudi-server ports: - "8090:80" diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_without_rerank.yaml b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_without_rerank.yaml index a59138bbd5..9704984f1a 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_without_rerank.yaml +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_without_rerank.yaml @@ -32,7 +32,7 @@ services: retries: 50 restart: unless-stopped tei-embedding-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-embedding-gaudi-server ports: - "8090:80" diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/how_to_validate_service.md b/ChatQnA/docker_compose/intel/hpu/gaudi/how_to_validate_service.md index ba18ce7ae0..ce515d4509 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/how_to_validate_service.md +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/how_to_validate_service.md @@ -51,7 +51,7 @@ f810f3b4d329 opea/embedding:latest "python embed 174bd43fa6b5 ghcr.io/huggingface/tei-gaudi:1.5.0 "text-embeddings-rou…" 2 minutes ago Up 2 minutes 0.0.0.0:8090->80/tcp, :::8090->80/tcp tei-embedding-gaudi-server 05c40b636239 ghcr.io/huggingface/tgi-gaudi:2.3.1 "text-generation-lau…" 2 minutes ago Exited (1) About a minute ago tgi-gaudi-server 74084469aa33 redis/redis-stack:7.2.0-v9 "/entrypoint.sh" 2 minutes ago Up 2 minutes 0.0.0.0:6379->6379/tcp, :::6379->6379/tcp, 0.0.0.0:8001->8001/tcp, :::8001->8001/tcp redis-vector-db -88399dbc9e43 ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 "text-embeddings-rou…" 2 minutes ago Up 2 minutes 0.0.0.0:8808->80/tcp, :::8808->80/tcp tei-reranking-gaudi-server +88399dbc9e43 ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 "text-embeddings-rou…" 2 minutes ago Up 2 minutes 0.0.0.0:8808->80/tcp, :::8808->80/tcp tei-reranking-gaudi-server ``` In this case, `ghcr.io/huggingface/tgi-gaudi:2.3.1` Existed. diff --git a/ChatQnA/tests/test_compose_faqgen_on_gaudi.sh b/ChatQnA/tests/test_compose_faqgen_on_gaudi.sh index 751b81029d..be040bdbef 100644 --- a/ChatQnA/tests/test_compose_faqgen_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_faqgen_on_gaudi.sh @@ -31,8 +31,6 @@ function build_docker_images() { service_list="chatqna chatqna-ui dataprep retriever llm-faqgen vllm-gaudi nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 - docker pull ghcr.io/huggingface/tei-gaudi:1.5.0 docker images && sleep 1s } diff --git a/ChatQnA/tests/test_compose_faqgen_on_rocm.sh b/ChatQnA/tests/test_compose_faqgen_on_rocm.sh index 781da0d048..7b05bb8c06 100644 --- a/ChatQnA/tests/test_compose_faqgen_on_rocm.sh +++ b/ChatQnA/tests/test_compose_faqgen_on_rocm.sh @@ -69,9 +69,6 @@ function build_docker_images() { service_list="chatqna chatqna-ui dataprep retriever llm-faqgen nginx" docker compose -f build.yaml build ${service_list} --no-cache > "${LOG_PATH}"/docker_image_build.log - docker pull ghcr.io/huggingface/text-generation-inference:2.3.1-rocm - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 - docker images && sleep 1s } diff --git a/ChatQnA/tests/test_compose_faqgen_on_xeon.sh b/ChatQnA/tests/test_compose_faqgen_on_xeon.sh index 256732094b..dc42798732 100644 --- a/ChatQnA/tests/test_compose_faqgen_on_xeon.sh +++ b/ChatQnA/tests/test_compose_faqgen_on_xeon.sh @@ -32,7 +32,6 @@ function build_docker_images() { service_list="chatqna chatqna-ui dataprep retriever llm-faqgen vllm nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 docker images && sleep 1s } diff --git a/ChatQnA/tests/test_compose_faqgen_tgi_on_gaudi.sh b/ChatQnA/tests/test_compose_faqgen_tgi_on_gaudi.sh index 39760087d6..8eef9c6040 100644 --- a/ChatQnA/tests/test_compose_faqgen_tgi_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_faqgen_tgi_on_gaudi.sh @@ -28,9 +28,6 @@ function build_docker_images() { service_list="chatqna chatqna-ui dataprep retriever llm-faqgen nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/tgi-gaudi:2.0.6 - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 - docker pull ghcr.io/huggingface/tei-gaudi:1.5.0 docker images && sleep 1s } diff --git a/ChatQnA/tests/test_compose_faqgen_tgi_on_xeon.sh b/ChatQnA/tests/test_compose_faqgen_tgi_on_xeon.sh index 50e69abf61..d4140a3ab1 100644 --- a/ChatQnA/tests/test_compose_faqgen_tgi_on_xeon.sh +++ b/ChatQnA/tests/test_compose_faqgen_tgi_on_xeon.sh @@ -32,8 +32,6 @@ function build_docker_images() { service_list="chatqna chatqna-ui dataprep retriever llm-faqgen nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/text-generation-inference:2.4.0-intel-cpu - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 docker images && sleep 1s } diff --git a/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh b/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh index 00cd7b1247..06b58bedc0 100644 --- a/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh @@ -31,9 +31,6 @@ function build_docker_images() { service_list="chatqna chatqna-ui dataprep retriever vllm-gaudi guardrails nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 - docker pull ghcr.io/huggingface/tei-gaudi:1.5.0 - docker images && sleep 1s } diff --git a/ChatQnA/tests/test_compose_milvus_on_xeon.sh b/ChatQnA/tests/test_compose_milvus_on_xeon.sh index 19beb81752..47a5b43ddc 100644 --- a/ChatQnA/tests/test_compose_milvus_on_xeon.sh +++ b/ChatQnA/tests/test_compose_milvus_on_xeon.sh @@ -35,8 +35,6 @@ function build_docker_images() { service_list="chatqna chatqna-ui dataprep retriever vllm nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 - docker images && sleep 1s } function start_services() { diff --git a/ChatQnA/tests/test_compose_on_gaudi.sh b/ChatQnA/tests/test_compose_on_gaudi.sh index 268c8e27fc..0fe3cf11a9 100644 --- a/ChatQnA/tests/test_compose_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_on_gaudi.sh @@ -31,8 +31,6 @@ function build_docker_images() { service_list="chatqna chatqna-ui dataprep retriever vllm-gaudi nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 - docker pull ghcr.io/huggingface/tei-gaudi:1.5.0 docker images && sleep 1s } diff --git a/ChatQnA/tests/test_compose_on_rocm.sh b/ChatQnA/tests/test_compose_on_rocm.sh index a4668d9bbb..3ff91522c8 100644 --- a/ChatQnA/tests/test_compose_on_rocm.sh +++ b/ChatQnA/tests/test_compose_on_rocm.sh @@ -67,9 +67,6 @@ function build_docker_images() { service_list="chatqna chatqna-ui dataprep retriever nginx" docker compose -f build.yaml build ${service_list} --no-cache > "${LOG_PATH}"/docker_image_build.log - docker pull ghcr.io/huggingface/text-generation-inference:2.3.1-rocm - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 - docker images && sleep 1s } diff --git a/ChatQnA/tests/test_compose_on_xeon.sh b/ChatQnA/tests/test_compose_on_xeon.sh index 1019469623..38226ec9be 100644 --- a/ChatQnA/tests/test_compose_on_xeon.sh +++ b/ChatQnA/tests/test_compose_on_xeon.sh @@ -34,8 +34,6 @@ function build_docker_images() { service_list="chatqna chatqna-ui dataprep retriever vllm nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 - docker images && sleep 1s } diff --git a/ChatQnA/tests/test_compose_pinecone_on_xeon.sh b/ChatQnA/tests/test_compose_pinecone_on_xeon.sh index 3d99e0cce4..98bfd21368 100755 --- a/ChatQnA/tests/test_compose_pinecone_on_xeon.sh +++ b/ChatQnA/tests/test_compose_pinecone_on_xeon.sh @@ -35,8 +35,6 @@ function build_docker_images() { service_list="chatqna chatqna-ui dataprep retriever vllm nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 - docker images && sleep 1s } diff --git a/ChatQnA/tests/test_compose_tgi_on_gaudi.sh b/ChatQnA/tests/test_compose_tgi_on_gaudi.sh index c4d921dce0..b334fc35c8 100644 --- a/ChatQnA/tests/test_compose_tgi_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_tgi_on_gaudi.sh @@ -27,10 +27,6 @@ function build_docker_images() { service_list="chatqna chatqna-ui dataprep retriever nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/tgi-gaudi:2.3.1 - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 - docker pull ghcr.io/huggingface/tei-gaudi:1.5.0 - docker images && sleep 1s } diff --git a/ChatQnA/tests/test_compose_tgi_on_xeon.sh b/ChatQnA/tests/test_compose_tgi_on_xeon.sh index 07bd3fc3c2..12c9552ca5 100644 --- a/ChatQnA/tests/test_compose_tgi_on_xeon.sh +++ b/ChatQnA/tests/test_compose_tgi_on_xeon.sh @@ -27,9 +27,6 @@ function build_docker_images() { service_list="chatqna chatqna-ui dataprep retriever nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/text-generation-inference:2.4.0-intel-cpu - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 - docker images && sleep 1s } diff --git a/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh b/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh index ed979db97d..6e4782ba79 100644 --- a/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh @@ -31,9 +31,6 @@ function build_docker_images() { service_list="chatqna chatqna-ui dataprep retriever vllm-gaudi nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 - docker pull ghcr.io/huggingface/tei-gaudi:1.5.0 - docker images && sleep 1s } diff --git a/ChatQnA/tests/test_compose_without_rerank_on_xeon.sh b/ChatQnA/tests/test_compose_without_rerank_on_xeon.sh index 66f7a535a9..2d79b0e7a2 100644 --- a/ChatQnA/tests/test_compose_without_rerank_on_xeon.sh +++ b/ChatQnA/tests/test_compose_without_rerank_on_xeon.sh @@ -35,8 +35,6 @@ function build_docker_images() { service_list="chatqna chatqna-ui dataprep retriever vllm nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 - docker images && sleep 1s } From 99b62ae49e768e875a94e3128a5961b00c22cf52 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Mon, 28 Apr 2025 13:35:05 +0800 Subject: [PATCH 019/217] Integrate DocSum set_env to ut scripts. (#1860) Integrate DocSum set_env to ut scripts. Add README.md for DocSum and InstructionTuning UT scripts. Signed-off-by: ZePan110 --- DocSum/docker_compose/amd/gpu/rocm/set_env.sh | 2 +- .../amd/gpu/rocm/set_env_vllm.sh | 2 +- DocSum/docker_compose/set_env.sh | 6 ++- DocSum/tests/README.md | 45 +++++++++++++++++++ DocSum/tests/test_compose_on_gaudi.sh | 25 +++-------- DocSum/tests/test_compose_on_rocm.sh | 29 ++++-------- DocSum/tests/test_compose_on_xeon.sh | 24 +++------- DocSum/tests/test_compose_tgi_on_gaudi.sh | 26 +++-------- DocSum/tests/test_compose_tgi_on_xeon.sh | 25 +++-------- DocSum/tests/test_compose_vllm_on_rocm.sh | 30 ++++--------- InstructionTuning/tests/README.md | 15 +++++++ 11 files changed, 108 insertions(+), 121 deletions(-) create mode 100644 DocSum/tests/README.md create mode 100644 InstructionTuning/tests/README.md diff --git a/DocSum/docker_compose/amd/gpu/rocm/set_env.sh b/DocSum/docker_compose/amd/gpu/rocm/set_env.sh index f4d5081a2d..f597849987 100644 --- a/DocSum/docker_compose/amd/gpu/rocm/set_env.sh +++ b/DocSum/docker_compose/amd/gpu/rocm/set_env.sh @@ -3,7 +3,7 @@ # Copyright (C) 2024 Advanced Micro Devices, Inc. # SPDX-License-Identifier: Apache-2.0 -export HOST_IP='' +export HOST_IP=${ip_address} export DOCSUM_MAX_INPUT_TOKENS="2048" export DOCSUM_MAX_TOTAL_TOKENS="4096" export DOCSUM_LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" diff --git a/DocSum/docker_compose/amd/gpu/rocm/set_env_vllm.sh b/DocSum/docker_compose/amd/gpu/rocm/set_env_vllm.sh index eb12d07dd8..800e502071 100644 --- a/DocSum/docker_compose/amd/gpu/rocm/set_env_vllm.sh +++ b/DocSum/docker_compose/amd/gpu/rocm/set_env_vllm.sh @@ -3,7 +3,7 @@ # Copyright (C) 2024 Advanced Micro Devices, Inc. # SPDX-License-Identifier: Apache-2.0 -export HOST_IP='' +export HOST_IP=${ip_address} export DOCSUM_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export DOCSUM_MAX_INPUT_TOKENS=2048 export DOCSUM_MAX_TOTAL_TOKENS=4096 diff --git a/DocSum/docker_compose/set_env.sh b/DocSum/docker_compose/set_env.sh index f41b3c875a..6f5351479e 100644 --- a/DocSum/docker_compose/set_env.sh +++ b/DocSum/docker_compose/set_env.sh @@ -10,7 +10,7 @@ export no_proxy="${no_proxy},${host_ip}" # Example: no_proxy="localhost, 127.0.0 export http_proxy=$http_proxy export https_proxy=$https_proxy export host_ip=$(hostname -I | awk '{print $1}') # Example: host_ip="192.168.1.1" -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export LLM_ENDPOINT_PORT=8008 export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" @@ -20,10 +20,12 @@ export MAX_TOTAL_TOKENS=2048 export LLM_PORT=9000 export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}" export DocSum_COMPONENT_NAME="OpeaDocSumvLLM" # OpeaDocSumTgi - +export FRONTEND_SERVICE_PORT=5173 export MEGA_SERVICE_HOST_IP=${host_ip} export LLM_SERVICE_HOST_IP=${host_ip} export ASR_SERVICE_HOST_IP=${host_ip} export BACKEND_SERVICE_PORT=8888 export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" + +export LOGFLAG=True diff --git a/DocSum/tests/README.md b/DocSum/tests/README.md new file mode 100644 index 0000000000..6d5f55c7f1 --- /dev/null +++ b/DocSum/tests/README.md @@ -0,0 +1,45 @@ +# DocSum E2E test scripts + +## Set the required environment variable + +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +``` + +## Run test + +On Intel Xeon with vLLM: + +```bash +bash test_compose_on_xeon.sh +``` + +On Intel Xeon with TGI: + +```bash +bash test_compose_tgi_on_xeon.sh +``` + +On Intel Gaudi with vLLM: + +```bash +bash test_compose_on_gaudi.sh +``` + +On Intel Gaudi with TGI: + +```bash +bash test_compose_tgi_on_gaudi.sh +``` + +On AMD ROCm with TGI: + +```bash +bash test_compose_on_rocm.sh +``` + +On AMD ROCm with vLLM: + +```bash +bash test_compose_vllm_on_rocm.sh +``` diff --git a/DocSum/tests/test_compose_on_gaudi.sh b/DocSum/tests/test_compose_on_gaudi.sh index ffaa50a9f2..aecdc006c7 100644 --- a/DocSum/tests/test_compose_on_gaudi.sh +++ b/DocSum/tests/test_compose_on_gaudi.sh @@ -10,35 +10,22 @@ export http_proxy=$http_proxy export https_proxy=$https_proxy export host_ip=$(hostname -I | awk '{print $1}') +WORKPATH=$(dirname "$PWD") +LOG_PATH="$WORKPATH/tests" echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" echo "TAG=IMAGE_TAG=${IMAGE_TAG}" -export no_proxy="${no_proxy},${host_ip}" -export MODEL_CACHE=${model_cache:-"./data"} export REGISTRY=${IMAGE_REPO} export TAG=${IMAGE_TAG} -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -export LLM_ENDPOINT_PORT=8008 -export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" +source $WORKPATH/docker_compose/set_env.sh + +export MODEL_CACHE=${model_cache:-"./data"} + export NUM_CARDS=1 export BLOCK_SIZE=128 export MAX_NUM_SEQS=256 export MAX_SEQ_LEN_TO_CAPTURE=2048 export MAX_INPUT_TOKENS=2048 export MAX_TOTAL_TOKENS=4096 -export LLM_PORT=9000 -export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}" -export DocSum_COMPONENT_NAME="OpeaDocSumvLLM" -export MEGA_SERVICE_HOST_IP=${host_ip} -export LLM_SERVICE_HOST_IP=${host_ip} -export ASR_SERVICE_HOST_IP=${host_ip} -export FRONTEND_SERVICE_PORT=5173 -export BACKEND_SERVICE_PORT=8888 -export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" -export LOGFLAG=True - -WORKPATH=$(dirname "$PWD") -LOG_PATH="$WORKPATH/tests" - # Get the root folder of the current script ROOT_FOLDER=$(dirname "$(readlink -f "$0")") diff --git a/DocSum/tests/test_compose_on_rocm.sh b/DocSum/tests/test_compose_on_rocm.sh index c0d4c22d0a..ee95ffc0be 100644 --- a/DocSum/tests/test_compose_on_rocm.sh +++ b/DocSum/tests/test_compose_on_rocm.sh @@ -14,21 +14,8 @@ export MODEL_CACHE=${model_cache:-"./data"} WORKPATH=$(dirname "$PWD") LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') - -export HOST_IP=${ip_address} export host_ip=${ip_address} -export DOCSUM_MAX_INPUT_TOKENS="2048" -export DOCSUM_MAX_TOTAL_TOKENS="4096" -export DOCSUM_LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" -export DOCSUM_TGI_SERVICE_PORT="8008" -export DOCSUM_TGI_LLM_ENDPOINT="http://${HOST_IP}:${DOCSUM_TGI_SERVICE_PORT}" -export DOCSUM_HUGGINGFACEHUB_API_TOKEN='' -export DOCSUM_WHISPER_PORT="7066" -export ASR_SERVICE_HOST_IP="${HOST_IP}" -export DOCSUM_LLM_SERVER_PORT="9000" -export DOCSUM_BACKEND_SERVER_PORT="18072" -export DOCSUM_FRONTEND_PORT="18073" -export BACKEND_SERVICE_ENDPOINT="http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" +source $WORKPATH/docker_compose/amd/gpu/rocm/set_env.sh function build_docker_images() { opea_branch=${opea_branch:-"main"} @@ -129,7 +116,7 @@ function validate_microservices() { # whisper microservice ulimit -s 65536 validate_services \ - "${host_ip}:${DOCSUM_WHISPER_PORT}/v1/asr" \ + "${HOST_IP}:${DOCSUM_WHISPER_PORT}/v1/asr" \ '{"asr_result":"well"}' \ "whisper-service" \ "whisper-service" \ @@ -137,7 +124,7 @@ function validate_microservices() { # tgi for llm service validate_services \ - "${host_ip}:${DOCSUM_TGI_SERVICE_PORT}/generate" \ + "${HOST_IP}:${DOCSUM_TGI_SERVICE_PORT}/generate" \ "generated_text" \ "docsum-tgi-service" \ "docsum-tgi-service" \ @@ -145,7 +132,7 @@ function validate_microservices() { # llm microservice validate_services \ - "${host_ip}:${DOCSUM_LLM_SERVER_PORT}/v1/docsum" \ + "${HOST_IP}:${DOCSUM_LLM_SERVER_PORT}/v1/docsum" \ "text" \ "docsum-llm-server" \ "docsum-llm-server" \ @@ -158,7 +145,7 @@ function validate_megaservice() { local DOCKER_NAME="docsum-backend-server" local EXPECTED_RESULT="[DONE]" local INPUT_DATA="messages=Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5." - local URL="${host_ip}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" + local URL="${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" local DATA_TYPE="type=text" local HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -X POST -F "$DATA_TYPE" -F "$INPUT_DATA" -H 'Content-Type: multipart/form-data' "$URL") @@ -188,7 +175,7 @@ function validate_megaservice_json() { echo "" echo ">>> Checking text data with Content-Type: application/json" validate_services \ - "${host_ip}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" \ + "${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" \ "[DONE]" \ "docsum-backend-server" \ "docsum-backend-server" \ @@ -196,7 +183,7 @@ function validate_megaservice_json() { echo ">>> Checking audio data" validate_services \ - "${host_ip}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" \ + "${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" \ "[DONE]" \ "docsum-backend-server" \ "docsum-backend-server" \ @@ -204,7 +191,7 @@ function validate_megaservice_json() { echo ">>> Checking video data" validate_services \ - "${host_ip}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" \ + "${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" \ "[DONE]" \ "docsum-backend-server" \ "docsum-backend-server" \ diff --git a/DocSum/tests/test_compose_on_xeon.sh b/DocSum/tests/test_compose_on_xeon.sh index c0b340b7cc..5ff7add6be 100644 --- a/DocSum/tests/test_compose_on_xeon.sh +++ b/DocSum/tests/test_compose_on_xeon.sh @@ -10,30 +10,18 @@ export http_proxy=$http_proxy export https_proxy=$https_proxy export host_ip=$(hostname -I | awk '{print $1}') +WORKPATH=$(dirname "$PWD") +LOG_PATH="$WORKPATH/tests" echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" echo "TAG=IMAGE_TAG=${IMAGE_TAG}" -export no_proxy="${no_proxy},${host_ip}" -export MODEL_CACHE=${model_cache:-"./data"} export REGISTRY=${IMAGE_REPO} export TAG=${IMAGE_TAG} -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -export LLM_ENDPOINT_PORT=8008 -export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" + +source $WORKPATH/docker_compose/set_env.sh +export MODEL_CACHE=${model_cache:-"./data"} + export MAX_INPUT_TOKENS=2048 export MAX_TOTAL_TOKENS=4096 -export LLM_PORT=9000 -export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}" -export DocSum_COMPONENT_NAME="OpeaDocSumvLLM" -export MEGA_SERVICE_HOST_IP=${host_ip} -export LLM_SERVICE_HOST_IP=${host_ip} -export ASR_SERVICE_HOST_IP=${host_ip} -export FRONTEND_SERVICE_PORT=5173 -export BACKEND_SERVICE_PORT=8888 -export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" -export LOGFLAG=True - -WORKPATH=$(dirname "$PWD") -LOG_PATH="$WORKPATH/tests" # Get the root folder of the current script ROOT_FOLDER=$(dirname "$(readlink -f "$0")") diff --git a/DocSum/tests/test_compose_tgi_on_gaudi.sh b/DocSum/tests/test_compose_tgi_on_gaudi.sh index 3f21cdee82..6859e5354a 100644 --- a/DocSum/tests/test_compose_tgi_on_gaudi.sh +++ b/DocSum/tests/test_compose_tgi_on_gaudi.sh @@ -9,32 +9,20 @@ IMAGE_TAG=${IMAGE_TAG:-"latest"} export http_proxy=$http_proxy export https_proxy=$https_proxy export host_ip=$(hostname -I | awk '{print $1}') - +WORKPATH=$(dirname "$PWD") +LOG_PATH="$WORKPATH/tests" echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" echo "TAG=IMAGE_TAG=${IMAGE_TAG}" -export no_proxy="${no_proxy},${host_ip}" -export MODEL_CACHE=${model_cache:-"./data"} export REGISTRY=${IMAGE_REPO} export TAG=${IMAGE_TAG} -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -export LLM_ENDPOINT_PORT=8008 -export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" + +source $WORKPATH/docker_compose/set_env.sh +export MODEL_CACHE=${model_cache:-"./data"} + export MAX_INPUT_TOKENS=2048 export MAX_TOTAL_TOKENS=4096 -export LLM_PORT=9000 -export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}" -export DocSum_COMPONENT_NAME="OpeaDocSumTgi" -export MEGA_SERVICE_HOST_IP=${host_ip} -export LLM_SERVICE_HOST_IP=${host_ip} -export ASR_SERVICE_HOST_IP=${host_ip} -export FRONTEND_SERVICE_PORT=5173 -export BACKEND_SERVICE_PORT=8888 -export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" -export LOGFLAG=True - -WORKPATH=$(dirname "$PWD") -LOG_PATH="$WORKPATH/tests" +export DocSum_COMPONENT_NAME="OpeaDocSumTgi" # Get the root folder of the current script ROOT_FOLDER=$(dirname "$(readlink -f "$0")") diff --git a/DocSum/tests/test_compose_tgi_on_xeon.sh b/DocSum/tests/test_compose_tgi_on_xeon.sh index 3d7b3f1b22..f94eabf0c8 100644 --- a/DocSum/tests/test_compose_tgi_on_xeon.sh +++ b/DocSum/tests/test_compose_tgi_on_xeon.sh @@ -9,31 +9,20 @@ IMAGE_TAG=${IMAGE_TAG:-"latest"} export http_proxy=$http_proxy export https_proxy=$https_proxy export host_ip=$(hostname -I | awk '{print $1}') - +WORKPATH=$(dirname "$PWD") +LOG_PATH="$WORKPATH/tests" echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" echo "TAG=IMAGE_TAG=${IMAGE_TAG}" -export no_proxy="${no_proxy},${host_ip}" -export MODEL_CACHE=${model_cache:-"./data"} export REGISTRY=${IMAGE_REPO} export TAG=${IMAGE_TAG} -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -export LLM_ENDPOINT_PORT=8008 -export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" + +source $WORKPATH/docker_compose/set_env.sh +export MODEL_CACHE=${model_cache:-"./data"} + export MAX_INPUT_TOKENS=2048 export MAX_TOTAL_TOKENS=4096 -export LLM_PORT=9000 -export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}" -export DocSum_COMPONENT_NAME="OpeaDocSumTgi" -export MEGA_SERVICE_HOST_IP=${host_ip} -export LLM_SERVICE_HOST_IP=${host_ip} -export ASR_SERVICE_HOST_IP=${host_ip} -export FRONTEND_SERVICE_PORT=5173 -export BACKEND_SERVICE_PORT=8888 -export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" -export LOGFLAG=True -WORKPATH=$(dirname "$PWD") -LOG_PATH="$WORKPATH/tests" +export DocSum_COMPONENT_NAME="OpeaDocSumTgi" # Get the root folder of the current script ROOT_FOLDER=$(dirname "$(readlink -f "$0")") diff --git a/DocSum/tests/test_compose_vllm_on_rocm.sh b/DocSum/tests/test_compose_vllm_on_rocm.sh index 40d10b039a..2eb360f178 100644 --- a/DocSum/tests/test_compose_vllm_on_rocm.sh +++ b/DocSum/tests/test_compose_vllm_on_rocm.sh @@ -16,21 +16,7 @@ WORKPATH=$(dirname "$PWD") LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') -export host_ip=${ip_address} -export HOST_IP=${ip_address} -export EXTERNAL_HOST_IP=${ip_address} -export DOCSUM_HUGGINGFACEHUB_API_TOKEN="${HUGGINGFACEHUB_API_TOKEN}" -export DOCSUM_MAX_INPUT_TOKENS=2048 -export DOCSUM_MAX_TOTAL_TOKENS=4096 -export DOCSUM_LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" -export DOCSUM_VLLM_SERVICE_PORT="8008" -export DOCSUM_LLM_ENDPOINT="http://${HOST_IP}:${DOCSUM_VLLM_SERVICE_PORT}" -export DOCSUM_WHISPER_PORT="7066" -export ASR_SERVICE_HOST_IP="${HOST_IP}" -export DOCSUM_LLM_SERVER_PORT="9000" -export DOCSUM_BACKEND_SERVER_PORT="18072" -export DOCSUM_FRONTEND_PORT="18073" -export BACKEND_SERVICE_ENDPOINT="http://${EXTERNAL_HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" +source $WORKPATH/docker_compose/amd/gpu/rocm/set_env_vllm.sh function build_docker_images() { opea_branch=${opea_branch:-"main"} @@ -130,7 +116,7 @@ function validate_microservices() { # whisper microservice ulimit -s 65536 validate_services \ - "${host_ip}:${DOCSUM_WHISPER_PORT}/v1/asr" \ + "${HOST_IP}:${DOCSUM_WHISPER_PORT}/v1/asr" \ '{"asr_result":"well"}' \ "whisper-service" \ "whisper-service" \ @@ -138,7 +124,7 @@ function validate_microservices() { # vLLM service validate_services \ - "${host_ip}:${DOCSUM_VLLM_SERVICE_PORT}/v1/chat/completions" \ + "${HOST_IP}:${DOCSUM_VLLM_SERVICE_PORT}/v1/chat/completions" \ "content" \ "docsum-vllm-service" \ "docsum-vllm-service" \ @@ -146,7 +132,7 @@ function validate_microservices() { # llm microservice validate_services \ - "${host_ip}:${DOCSUM_LLM_SERVER_PORT}/v1/docsum" \ + "${HOST_IP}:${DOCSUM_LLM_SERVER_PORT}/v1/docsum" \ "text" \ "docsum-llm-server" \ "docsum-llm-server" \ @@ -159,7 +145,7 @@ function validate_megaservice() { local DOCKER_NAME="docsum-backend-server" local EXPECTED_RESULT="[DONE]" local INPUT_DATA="messages=Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5." - local URL="${host_ip}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" + local URL="${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" local DATA_TYPE="type=text" local HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -X POST -F "$DATA_TYPE" -F "$INPUT_DATA" -H 'Content-Type: multipart/form-data' "$URL") @@ -189,7 +175,7 @@ function validate_megaservice_json() { echo "" echo ">>> Checking text data with Content-Type: application/json" validate_services \ - "${host_ip}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" \ + "${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" \ "[DONE]" \ "docsum-backend-server" \ "docsum-backend-server" \ @@ -197,7 +183,7 @@ function validate_megaservice_json() { echo ">>> Checking audio data" validate_services \ - "${host_ip}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" \ + "${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" \ "[DONE]" \ "docsum-backend-server" \ "docsum-backend-server" \ @@ -205,7 +191,7 @@ function validate_megaservice_json() { echo ">>> Checking video data" validate_services \ - "${host_ip}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" \ + "${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum" \ "[DONE]" \ "docsum-backend-server" \ "docsum-backend-server" \ diff --git a/InstructionTuning/tests/README.md b/InstructionTuning/tests/README.md new file mode 100644 index 0000000000..fd43a2b4a1 --- /dev/null +++ b/InstructionTuning/tests/README.md @@ -0,0 +1,15 @@ +# Translation E2E test scripts + +## Set the required environment variable + +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +``` + +## Run test + +On Intel Xeon: + +```bash +bash test_compose_on_xeon.sh +``` From 13c4749ca3bd83436e486b316150ec0e1b16b181 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Mon, 28 Apr 2025 13:52:50 +0800 Subject: [PATCH 020/217] Fix security issue (#1884) Signed-off-by: ZePan110 --- .github/workflows/_helm-e2e.yml | 12 ++++++++++++ .github/workflows/pr-chart-e2e.yml | 3 +++ 2 files changed, 15 insertions(+) diff --git a/.github/workflows/_helm-e2e.yml b/.github/workflows/_helm-e2e.yml index 99387d9769..7f150f3890 100644 --- a/.github/workflows/_helm-e2e.yml +++ b/.github/workflows/_helm-e2e.yml @@ -131,6 +131,18 @@ jobs: ref: ${{ steps.get-checkout-ref.outputs.CHECKOUT_REF }} fetch-depth: 0 + - name: Validate Inputs + run: | + cd ${{ github.workspace }} + folders=($(find . -maxdepth 1 -type d ! -name ".*" -printf "%f\n" | tr '[:upper:]' '[:lower:]')) + echo "folders: ${folders[@]}" + echo "example: ${{ inputs.example }}" + example_lower=$(echo "${{ inputs.example }}" | tr '[:upper:]' '[:lower:]') + if [[ ! " ${folders[@]} " =~ " ${example_lower} " ]]; then + echo "Error: Input '${example_lower}' is not in the list of folders." + exit 1 + fi + - name: Set variables env: example: ${{ inputs.example }} diff --git a/.github/workflows/pr-chart-e2e.yml b/.github/workflows/pr-chart-e2e.yml index 52d7a6354f..876960e7d9 100644 --- a/.github/workflows/pr-chart-e2e.yml +++ b/.github/workflows/pr-chart-e2e.yml @@ -19,6 +19,9 @@ concurrency: jobs: job1: name: Get-Test-Matrix + permissions: + contents: read + pull-requests: read runs-on: ubuntu-latest outputs: run_matrix: ${{ steps.get-test-matrix.outputs.run_matrix }} From 04d527d3b078bb55c009506e8f25313e77b37e9d Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Mon, 28 Apr 2025 13:53:50 +0800 Subject: [PATCH 021/217] Integrate set_env to ut scripts for CodeTrans. (#1868) Signed-off-by: ZePan110 --- .../docker_compose/amd/gpu/rocm/set_env.sh | 14 +++--- .../amd/gpu/rocm/set_env_vllm.sh | 14 +++--- CodeTrans/tests/README.md | 45 +++++++++++++++++++ CodeTrans/tests/test_compose_on_gaudi.sh | 19 ++------ CodeTrans/tests/test_compose_on_rocm.sh | 16 +------ CodeTrans/tests/test_compose_on_xeon.sh | 16 ++----- CodeTrans/tests/test_compose_tgi_on_gaudi.sh | 16 ++----- CodeTrans/tests/test_compose_tgi_on_xeon.sh | 16 ++----- CodeTrans/tests/test_compose_vllm_on_rocm.sh | 17 +------ 9 files changed, 76 insertions(+), 97 deletions(-) create mode 100644 CodeTrans/tests/README.md diff --git a/CodeTrans/docker_compose/amd/gpu/rocm/set_env.sh b/CodeTrans/docker_compose/amd/gpu/rocm/set_env.sh index c62b26477b..c1acc4464d 100644 --- a/CodeTrans/docker_compose/amd/gpu/rocm/set_env.sh +++ b/CodeTrans/docker_compose/amd/gpu/rocm/set_env.sh @@ -8,14 +8,14 @@ # which can be used to connect to the server from the Internet. It must be specified in the EXTERNAL_HOST_IP variable. # If the server is used only on the internal network or has a direct external address, # specify it in HOST_IP and in EXTERNAL_HOST_IP. -export HOST_IP='' -export EXTERNAL_HOST_IP='' +export HOST_IP=${ip_address} +export EXTERNAL_HOST_IP=${ip_address} ### Model ID export CODETRANS_LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" ### The port of the TGI service. On this port, the TGI service will accept connections -export CODETRANS_TGI_SERVICE_PORT=18156 +export CODETRANS_TGI_SERVICE_PORT=8008 ### The endpoint of the TGI service to which requests to this service will be sent (formed from previously set variables) export CODETRANS_TGI_LLM_ENDPOINT="http://${HOST_IP}:${CODETRANS_TGI_SERVICE_PORT}" @@ -24,7 +24,7 @@ export CODETRANS_TGI_LLM_ENDPOINT="http://${HOST_IP}:${CODETRANS_TGI_SERVICE_POR export CODETRANS_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} ### The port of the LLM service. On this port, the LLM service will accept connections -export CODETRANS_LLM_SERVICE_PORT=18157 +export CODETRANS_LLM_SERVICE_PORT=9000 ### The IP address or domain name of the server for CodeTrans MegaService export CODETRANS_MEGA_SERVICE_HOST_IP=${HOST_IP} @@ -36,7 +36,7 @@ export CODETRANS_LLM_SERVICE_HOST_IP=${HOST_IP} export CODETRANS_FRONTEND_SERVICE_IP=${HOST_IP} ### The port of the frontend service -export CODETRANS_FRONTEND_SERVICE_PORT=18155 +export CODETRANS_FRONTEND_SERVICE_PORT=5173 ### Name of GenAI service for route requests to application export CODETRANS_BACKEND_SERVICE_NAME=codetrans @@ -45,10 +45,10 @@ export CODETRANS_BACKEND_SERVICE_NAME=codetrans export CODETRANS_BACKEND_SERVICE_IP=${HOST_IP} ### The port of the backend service -export CODETRANS_BACKEND_SERVICE_PORT=18154 +export CODETRANS_BACKEND_SERVICE_PORT=7777 ### The port of the Nginx reverse proxy for application -export CODETRANS_NGINX_PORT=18153 +export CODETRANS_NGINX_PORT=8088 ### Endpoint of the backend service export CODETRANS_BACKEND_SERVICE_URL="http://${EXTERNAL_HOST_IP}:${CODETRANS_BACKEND_SERVICE_PORT}/v1/codetrans" diff --git a/CodeTrans/docker_compose/amd/gpu/rocm/set_env_vllm.sh b/CodeTrans/docker_compose/amd/gpu/rocm/set_env_vllm.sh index cafa4a19a1..ffcbd35df5 100644 --- a/CodeTrans/docker_compose/amd/gpu/rocm/set_env_vllm.sh +++ b/CodeTrans/docker_compose/amd/gpu/rocm/set_env_vllm.sh @@ -8,14 +8,14 @@ # which can be used to connect to the server from the Internet. It must be specified in the EXTERNAL_HOST_IP variable. # If the server is used only on the internal network or has a direct external address, # specify it in HOST_IP and in EXTERNAL_HOST_IP. -export HOST_IP='' -export EXTERNAL_HOST_IP='' +export HOST_IP=${ip_address} +export EXTERNAL_HOST_IP=${ip_address} ### Model ID export CODETRANS_LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" ### The port of the TGI service. On this port, the TGI service will accept connections -export CODETRANS_VLLM_SERVICE_PORT=18156 +export CODETRANS_VLLM_SERVICE_PORT=8008 ### The endpoint of the TGI service to which requests to this service will be sent (formed from previously set variables) export CODETRANS_LLM_ENDPOINT="http://${HOST_IP}:${CODETRANS_VLLM_SERVICE_PORT}" @@ -24,7 +24,7 @@ export CODETRANS_LLM_ENDPOINT="http://${HOST_IP}:${CODETRANS_VLLM_SERVICE_PORT}" export CODETRANS_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} ### The port of the LLM service. On this port, the LLM service will accept connections -export CODETRANS_LLM_SERVICE_PORT=18157 +export CODETRANS_LLM_SERVICE_PORT=9000 ### The IP address or domain name of the server for CodeTrans MegaService export CODETRANS_MEGA_SERVICE_HOST_IP=${HOST_IP} @@ -36,7 +36,7 @@ export CODETRANS_LLM_SERVICE_HOST_IP=${HOST_IP} export CODETRANS_FRONTEND_SERVICE_IP=${HOST_IP} ### The port of the frontend service -export CODETRANS_FRONTEND_SERVICE_PORT=18155 +export CODETRANS_FRONTEND_SERVICE_PORT=5173 ### Name of GenAI service for route requests to application export CODETRANS_BACKEND_SERVICE_NAME=codetrans @@ -45,10 +45,10 @@ export CODETRANS_BACKEND_SERVICE_NAME=codetrans export CODETRANS_BACKEND_SERVICE_IP=${HOST_IP} ### The port of the backend service -export CODETRANS_BACKEND_SERVICE_PORT=18154 +export CODETRANS_BACKEND_SERVICE_PORT=7777 ### The port of the Nginx reverse proxy for application -export CODETRANS_NGINX_PORT=18153 +export CODETRANS_NGINX_PORT=8088 ### Endpoint of the backend service export CODETRANS_BACKEND_SERVICE_URL="http://${EXTERNAL_HOST_IP}:${CODETRANS_BACKEND_SERVICE_PORT}/v1/codetrans" diff --git a/CodeTrans/tests/README.md b/CodeTrans/tests/README.md new file mode 100644 index 0000000000..62edebc6a8 --- /dev/null +++ b/CodeTrans/tests/README.md @@ -0,0 +1,45 @@ +# CodeTrans E2E test scripts + +## Set the required environment variable + +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +``` + +## Run test + +On Intel Xeon with TGI: + +```bash +bash test_compose_tgi_on_xeon.sh +``` + +On Intel Xeon with vLLM: + +```bash +bash test_compose_on_xeon.sh +``` + +On Intel Gaudi with TGI: + +```bash +bash test_compose_tgi_on_gaudi.sh +``` + +On Intel Gaudi with vLLM: + +```bash +bash test_compose_on_gaudi.sh +``` + +On AMD ROCm with TGI: + +```bash +bash test_compose_on_rocm.sh +``` + +On AMD ROCm with vLLM: + +```bash +bash test_compose_vllm_on_rocm.sh +``` diff --git a/CodeTrans/tests/test_compose_on_gaudi.sh b/CodeTrans/tests/test_compose_on_gaudi.sh index a9bf0c0f79..5f287eb025 100644 --- a/CodeTrans/tests/test_compose_on_gaudi.sh +++ b/CodeTrans/tests/test_compose_on_gaudi.sh @@ -42,25 +42,12 @@ function build_docker_images() { } function start_services() { - cd $WORKPATH/docker_compose/intel/hpu/gaudi - export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" - export LLM_ENDPOINT="http://${ip_address}:8008" - export LLM_COMPONENT_NAME="OpeaTextGenService" - export NUM_CARDS=1 - export BLOCK_SIZE=128 - export MAX_NUM_SEQS=256 - export MAX_SEQ_LEN_TO_CAPTURE=2048 + cd $WORKPATH/docker_compose export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export MEGA_SERVICE_HOST_IP=${ip_address} - export LLM_SERVICE_HOST_IP=${ip_address} - export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:7777/v1/codetrans" - export FRONTEND_SERVICE_IP=${ip_address} - export FRONTEND_SERVICE_PORT=5173 - export BACKEND_SERVICE_NAME=codetrans - export BACKEND_SERVICE_IP=${ip_address} - export BACKEND_SERVICE_PORT=7777 export NGINX_PORT=80 export host_ip=${ip_address} + source set_env.sh + cd intel/hpu/gaudi sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/CodeTrans/tests/test_compose_on_rocm.sh b/CodeTrans/tests/test_compose_on_rocm.sh index 16b25c78d0..ef429636ba 100644 --- a/CodeTrans/tests/test_compose_on_rocm.sh +++ b/CodeTrans/tests/test_compose_on_rocm.sh @@ -42,21 +42,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/amd/gpu/rocm/ - export CODETRANS_TGI_SERVICE_PORT=8008 - export CODETRANS_LLM_SERVICE_PORT=9000 - export CODETRANS_LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" - export CODETRANS_TGI_LLM_ENDPOINT="http://${ip_address}:${CODETRANS_TGI_SERVICE_PORT}" - export CODETRANS_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export CODETRANS_MEGA_SERVICE_HOST_IP=${ip_address} - export CODETRANS_LLM_SERVICE_HOST_IP=${ip_address} - export CODETRANS_FRONTEND_SERVICE_IP=${ip_address} - export CODETRANS_FRONTEND_SERVICE_PORT=5173 - export CODETRANS_BACKEND_SERVICE_NAME=codetrans - export CODETRANS_BACKEND_SERVICE_IP=${ip_address} - export CODETRANS_BACKEND_SERVICE_PORT=7777 - export CODETRANS_NGINX_PORT=8088 - export CODETRANS_BACKEND_SERVICE_URL="http://${ip_address}:${CODETRANS_BACKEND_SERVICE_PORT}/v1/codetrans" - export HOST_IP=${ip_address} + source set_env.sh sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/CodeTrans/tests/test_compose_on_xeon.sh b/CodeTrans/tests/test_compose_on_xeon.sh index 7b27375682..4deb89fe00 100644 --- a/CodeTrans/tests/test_compose_on_xeon.sh +++ b/CodeTrans/tests/test_compose_on_xeon.sh @@ -44,21 +44,13 @@ function build_docker_images() { } function start_services() { - cd $WORKPATH/docker_compose/intel/cpu/xeon/ - export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" - export LLM_ENDPOINT="http://${ip_address}:8008" - export LLM_COMPONENT_NAME="OpeaTextGenService" + cd $WORKPATH/docker_compose export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export MEGA_SERVICE_HOST_IP=${ip_address} - export LLM_SERVICE_HOST_IP=${ip_address} - export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:7777/v1/codetrans" - export FRONTEND_SERVICE_IP=${ip_address} - export FRONTEND_SERVICE_PORT=5173 - export BACKEND_SERVICE_NAME=codetrans - export BACKEND_SERVICE_IP=${ip_address} - export BACKEND_SERVICE_PORT=7777 + export NGINX_PORT=80 export host_ip=${ip_address} + source set_env.sh + cd intel/cpu/xeon/ sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/CodeTrans/tests/test_compose_tgi_on_gaudi.sh b/CodeTrans/tests/test_compose_tgi_on_gaudi.sh index c0f5e1e714..cb4bb53659 100644 --- a/CodeTrans/tests/test_compose_tgi_on_gaudi.sh +++ b/CodeTrans/tests/test_compose_tgi_on_gaudi.sh @@ -40,21 +40,13 @@ function build_docker_images() { } function start_services() { - cd $WORKPATH/docker_compose/intel/hpu/gaudi/ - export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" - export LLM_ENDPOINT="http://${ip_address}:8008" - export LLM_COMPONENT_NAME="OpeaTextGenService" + cd $WORKPATH/docker_compose export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export MEGA_SERVICE_HOST_IP=${ip_address} - export LLM_SERVICE_HOST_IP=${ip_address} - export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:7777/v1/codetrans" - export FRONTEND_SERVICE_IP=${ip_address} - export FRONTEND_SERVICE_PORT=5173 - export BACKEND_SERVICE_NAME=codetrans - export BACKEND_SERVICE_IP=${ip_address} - export BACKEND_SERVICE_PORT=7777 + export NGINX_PORT=80 export host_ip=${ip_address} + source set_env.sh + cd intel/hpu/gaudi/ sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/CodeTrans/tests/test_compose_tgi_on_xeon.sh b/CodeTrans/tests/test_compose_tgi_on_xeon.sh index be7aec935d..57bd46348f 100644 --- a/CodeTrans/tests/test_compose_tgi_on_xeon.sh +++ b/CodeTrans/tests/test_compose_tgi_on_xeon.sh @@ -40,21 +40,13 @@ function build_docker_images() { } function start_services() { - cd $WORKPATH/docker_compose/intel/cpu/xeon/ - export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" - export LLM_ENDPOINT="http://${ip_address}:8008" - export LLM_COMPONENT_NAME="OpeaTextGenService" + cd $WORKPATH/docker_compose export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export MEGA_SERVICE_HOST_IP=${ip_address} - export LLM_SERVICE_HOST_IP=${ip_address} - export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:7777/v1/codetrans" - export FRONTEND_SERVICE_IP=${ip_address} - export FRONTEND_SERVICE_PORT=5173 - export BACKEND_SERVICE_NAME=codetrans - export BACKEND_SERVICE_IP=${ip_address} - export BACKEND_SERVICE_PORT=7777 + export NGINX_PORT=80 export host_ip=${ip_address} + source set_env.sh + cd intel/cpu/xeon/ sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/CodeTrans/tests/test_compose_vllm_on_rocm.sh b/CodeTrans/tests/test_compose_vllm_on_rocm.sh index 5279336ba4..558a3a02ad 100644 --- a/CodeTrans/tests/test_compose_vllm_on_rocm.sh +++ b/CodeTrans/tests/test_compose_vllm_on_rocm.sh @@ -40,22 +40,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/amd/gpu/rocm/ - export HOST_IP=${ip_address} - export CODETRANS_VLLM_SERVICE_PORT=8008 - export CODETRANS_LLM_SERVICE_PORT=9000 - export CODETRANS_LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" - export CODETRANS_LLM_ENDPOINT="http://${ip_address}:${CODETRANS_VLLM_SERVICE_PORT}" - export CODETRANS_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export CODETRANS_MEGA_SERVICE_HOST_IP=${ip_address} - export CODETRANS_LLM_SERVICE_HOST_IP=${ip_address} - export CODETRANS_FRONTEND_SERVICE_IP=${ip_address} - export CODETRANS_FRONTEND_SERVICE_PORT=5173 - export CODETRANS_BACKEND_SERVICE_NAME=codetrans - export CODETRANS_BACKEND_SERVICE_IP=${ip_address} - export CODETRANS_BACKEND_SERVICE_PORT=7777 - export CODETRANS_NGINX_PORT=8088 - export CODETRANS_BACKEND_SERVICE_URL="http://${ip_address}:${CODETRANS_BACKEND_SERVICE_PORT}/v1/codetrans" - export HOST_IP=${ip_address} + source set_env_vllm.sh sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env From 555c4100b3ffb2a296c3a5250ad1476920ff2910 Mon Sep 17 00:00:00 2001 From: Zhu Yongbo Date: Tue, 29 Apr 2025 10:08:23 +0800 Subject: [PATCH 022/217] Install cpu version for components (#1888) Signed-off-by: Yongbozzz --- EdgeCraftRAG/Dockerfile.server | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/EdgeCraftRAG/Dockerfile.server b/EdgeCraftRAG/Dockerfile.server index 5d0aee5cd1..13efc304ca 100644 --- a/EdgeCraftRAG/Dockerfile.server +++ b/EdgeCraftRAG/Dockerfile.server @@ -40,7 +40,7 @@ USER user WORKDIR /home/user/edgecraftrag RUN pip install --no-cache-dir --upgrade pip setuptools==70.0.0 && \ - pip install --no-cache-dir -r requirements.txt + pip install --no-cache-dir --extra-index-url https://download.pytorch.org/whl/cpu -r requirements.txt WORKDIR /home/user/ RUN git clone https://github.com/openvinotoolkit/openvino.genai.git genai From 670d9f3d180e888b77ae89dba71bb2455acae430 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Tue, 29 Apr 2025 19:44:48 +0800 Subject: [PATCH 023/217] Fix security issue. (#1892) Signed-off-by: ZePan110 --- .github/workflows/_helm-e2e.yml | 44 ++++++++++++++++----------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/.github/workflows/_helm-e2e.yml b/.github/workflows/_helm-e2e.yml index 7f150f3890..a66b3204bd 100644 --- a/.github/workflows/_helm-e2e.yml +++ b/.github/workflows/_helm-e2e.yml @@ -131,32 +131,32 @@ jobs: ref: ${{ steps.get-checkout-ref.outputs.CHECKOUT_REF }} fetch-depth: 0 - - name: Validate Inputs - run: | - cd ${{ github.workspace }} - folders=($(find . -maxdepth 1 -type d ! -name ".*" -printf "%f\n" | tr '[:upper:]' '[:lower:]')) - echo "folders: ${folders[@]}" - echo "example: ${{ inputs.example }}" - example_lower=$(echo "${{ inputs.example }}" | tr '[:upper:]' '[:lower:]') - if [[ ! " ${folders[@]} " =~ " ${example_lower} " ]]; then - echo "Error: Input '${example_lower}' is not in the list of folders." - exit 1 - fi - - name: Set variables env: example: ${{ inputs.example }} run: | - CHART_NAME="${example,,}" # CodeGen - echo "CHART_NAME=$CHART_NAME" >> $GITHUB_ENV - echo "RELEASE_NAME=${CHART_NAME}$(date +%Y%m%d%H%M%S)" >> $GITHUB_ENV - echo "NAMESPACE=${CHART_NAME}-$(head -c 4 /dev/urandom | xxd -p)" >> $GITHUB_ENV - echo "ROLLOUT_TIMEOUT_SECONDS=600s" >> $GITHUB_ENV - echo "TEST_TIMEOUT_SECONDS=600s" >> $GITHUB_ENV - echo "KUBECTL_TIMEOUT_SECONDS=60s" >> $GITHUB_ENV - echo "should_cleanup=false" >> $GITHUB_ENV - echo "skip_validate=false" >> $GITHUB_ENV - echo "CHART_FOLDER=${example}/kubernetes/helm" >> $GITHUB_ENV + if [[ ! "$example" =~ ^[a-zA-Z]{1,20}$ ]] || [[ "$example" =~ \.\. ]] || [[ "$example" == -* || "$example" == *- ]]; then + echo "Error: Invalid input - only lowercase alphanumeric and internal hyphens allowed" + exit 1 + fi + # SAFE_PREFIX="kb-" + CHART_NAME="${SAFE_PREFIX}$(echo "$example" | tr '[:upper:]' '[:lower:]')" + RAND_SUFFIX=$(openssl rand -hex 2 | tr -dc 'a-f0-9') + + cat <> $GITHUB_ENV + CHART_NAME=${CHART_NAME} + RELEASE_NAME=${CHART_NAME}-$(date +%s) + NAMESPACE=ns-${CHART_NAME}-${RAND_SUFFIX} + ROLLOUT_TIMEOUT_SECONDS=600s + TEST_TIMEOUT_SECONDS=600s + KUBECTL_TIMEOUT_SECONDS=60s + should_cleanup=false + skip_validate=false + CHART_FOLDER=${example}/kubernetes/helm + EOF + + echo "Generated safe variables:" >> $GITHUB_STEP_SUMMARY + echo "- CHART_NAME: ${CHART_NAME}" >> $GITHUB_STEP_SUMMARY - name: Helm install id: install From d334f5c8fd1fd51fa4c29ff685485e50bf4c5d73 Mon Sep 17 00:00:00 2001 From: lkk <33276950+lkk12014402@users.noreply.github.com> Date: Tue, 29 Apr 2025 23:58:52 +0800 Subject: [PATCH 024/217] build cpu agent ui docker image. (#1894) --- .../workflows/scripts/codeScan/hadolint.sh | 2 +- AgentQnA/ui/docker/Dockerfile | 217 +++++++++++++++--- 2 files changed, 187 insertions(+), 32 deletions(-) diff --git a/.github/workflows/scripts/codeScan/hadolint.sh b/.github/workflows/scripts/codeScan/hadolint.sh index e698d3634e..a9a41f865f 100644 --- a/.github/workflows/scripts/codeScan/hadolint.sh +++ b/.github/workflows/scripts/codeScan/hadolint.sh @@ -7,7 +7,7 @@ source /GenAIExamples/.github/workflows/scripts/change_color log_dir=/GenAIExamples/.github/workflows/scripts/codeScan ERROR_WARN=false -find . -type f \( -name "Dockerfile*" \) -print -exec hadolint --ignore DL3006 --ignore DL3007 --ignore DL3008 --ignore DL3013 {} \; > ${log_dir}/hadolint.log +find . -type f \( -name "Dockerfile*" \) -print -exec hadolint --ignore DL3006 --ignore DL3007 --ignore DL3008 --ignore DL3013 --ignore DL3018 --ignore DL3016 {} \; > ${log_dir}/hadolint.log if [[ $(grep -c "error" ${log_dir}/hadolint.log) != 0 ]]; then $BOLD_RED && echo "Error!! Please Click on the artifact button to download and check error details." && $RESET diff --git a/AgentQnA/ui/docker/Dockerfile b/AgentQnA/ui/docker/Dockerfile index 8cd279898c..1f5605a15b 100644 --- a/AgentQnA/ui/docker/Dockerfile +++ b/AgentQnA/ui/docker/Dockerfile @@ -1,49 +1,204 @@ -# Copyright (C) 2024 Intel Corporation +# Copyright (C) 2025 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -#FROM python:3.11-slim -FROM node:22.9.0 +# syntax=docker/dockerfile:1 +# Initialize device type args +# use build args in the docker build command with --build-arg="BUILDARG=true" +ARG USE_CUDA=false +ARG USE_OLLAMA=false +# Tested with cu117 for CUDA 11 and cu121 for CUDA 12 (default) +ARG USE_CUDA_VER=cu121 +# any sentence transformer model; models to use can be found at https://huggingface.co/models?library=sentence-transformers +# Leaderboard: https://huggingface.co/spaces/mteb/leaderboard +# for better performance and multilangauge support use "intfloat/multilingual-e5-large" (~2.5GB) or "intfloat/multilingual-e5-base" (~1.5GB) +# IMPORTANT: If you change the embedding model (sentence-transformers/all-MiniLM-L6-v2) and vice versa, you aren't able to use RAG Chat with your previous documents loaded in the WebUI! You need to re-embed them. +ARG USE_EMBEDDING_MODEL=sentence-transformers/all-MiniLM-L6-v2 +ARG USE_RERANKING_MODEL="" -ENV LANG=C.UTF-8 -ARG ARCH=cpu +# Tiktoken encoding name; models to use can be found at https://huggingface.co/models?library=tiktoken +ARG USE_TIKTOKEN_ENCODING_NAME="cl100k_base" -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - build-essential \ - libgl1-mesa-glx \ - libjemalloc-dev \ - git \ - python3-venv +ARG BUILD_HASH=dev-build +# Override at your own risk - non-root configurations are untested +ARG UID=0 +ARG GID=0 +######## WebUI frontend ######## +FROM --platform=$BUILDPLATFORM node:22-alpine3.20 AS build +ARG BUILD_HASH -WORKDIR /root/ +WORKDIR /app -ENV HOME=/root -ENV VIRTUAL_ENV=$HOME/.env/open-webui +COPY open_webui_patches /app/patches +ARG WEBUI_VERSION=v0.5.20 +RUN apk add --no-cache git + +# Clone code and use patch +RUN git config --global user.name "opea" && \ + git config --global user.email "" && \ + git clone https://github.com/open-webui/open-webui.git + +WORKDIR /app/open-webui + +RUN git checkout ${WEBUI_VERSION} && \ + if [ -d patches ] && [ "$(ls -A patches)" ]; then git am /app/patches/*.patch; fi + +WORKDIR /app + +RUN mv open-webui/* . && rm -fr open-webui && ls -lrth /app/backend/ -COPY open_webui_patches /root/patches +RUN npm install onnxruntime-node --onnxruntime-node-install-cuda=skip +RUN apk update && \ + apk add --no-cache wget && \ + wget https://github.com/microsoft/onnxruntime/releases/download/v1.20.1/onnxruntime-linux-x64-gpu-1.20.1.tgz -RUN git clone https://github.com/open-webui/open-webui.git && \ - git config --global user.name "opea" && git config --global user.email "" && \ - mkdir -p $HOME/.env && python3 -m venv $VIRTUAL_ENV && \ - $VIRTUAL_ENV/bin/python -m pip install --no-cache-dir --upgrade pip && \ - $VIRTUAL_ENV/bin/python -m pip install --no-cache-dir build +ENV APP_BUILD_HASH=${BUILD_HASH} +RUN npm run build -WORKDIR /root/open-webui +######## WebUI backend ######## +FROM python:3.11-slim-bookworm AS base -ENV PATH="$VIRTUAL_ENV/bin:$PATH" +# Use args +ARG USE_CUDA +ARG USE_OLLAMA +ARG USE_CUDA_VER +ARG USE_EMBEDDING_MODEL +ARG USE_RERANKING_MODEL +ARG UID +ARG GID -RUN git checkout v0.5.20 && \ - git am ../patches/*.patch && \ - python -m build && \ - pip install --no-cache-dir dist/open_webui-0.5.20-py3-none-any.whl +## Basis ## +ENV ENV=prod \ + PORT=8080 \ + # pass build args to the build + USE_OLLAMA_DOCKER=${USE_OLLAMA} \ + USE_CUDA_DOCKER=${USE_CUDA} \ + USE_CUDA_DOCKER_VER=${USE_CUDA_VER} \ + USE_EMBEDDING_MODEL_DOCKER=${USE_EMBEDDING_MODEL} \ + USE_RERANKING_MODEL_DOCKER=${USE_RERANKING_MODEL} -ENV LANG=en_US.UTF-8 +## Basis URL Config ## +ENV OLLAMA_BASE_URL="/ollama" \ + OPENAI_API_BASE_URL="" -WORKDIR /root/ +## API Key and Security Config ## +ENV OPENAI_API_KEY="" \ + WEBUI_SECRET_KEY="" \ + SCARF_NO_ANALYTICS=true \ + DO_NOT_TRACK=true \ + ANONYMIZED_TELEMETRY=false -RUN rm -fr /root/open-webui && rm -fr /root/patches +#### Other models ######################################################### +## whisper TTS model settings ## +ENV WHISPER_MODEL="base" \ + WHISPER_MODEL_DIR="/app/backend/data/cache/whisper/models" -# CMD ["/bin/bash"] -ENTRYPOINT ["open-webui", "serve"] +## RAG Embedding model settings ## +ENV RAG_EMBEDDING_MODEL="$USE_EMBEDDING_MODEL_DOCKER" \ + RAG_RERANKING_MODEL="$USE_RERANKING_MODEL_DOCKER" \ + SENTENCE_TRANSFORMERS_HOME="/app/backend/data/cache/embedding/models" +## Tiktoken model settings ## +ENV TIKTOKEN_ENCODING_NAME="cl100k_base" \ + TIKTOKEN_CACHE_DIR="/app/backend/data/cache/tiktoken" + +## Hugging Face download cache ## +ENV HF_HOME="/app/backend/data/cache/embedding/models" +## Torch Extensions ## +# ENV TORCH_EXTENSIONS_DIR="/.cache/torch_extensions" + +#### Other models ########################################################## + +COPY --from=build /app/backend /app/backend + +WORKDIR /app/backend + + +ENV HOME=/root +# Create user and group if not root +RUN if [ $UID -ne 0 ]; then \ + if [ $GID -ne 0 ]; then \ + addgroup --gid $GID app; \ + fi; \ + adduser --uid $UID --gid $GID --home $HOME --disabled-password --no-create-home app; \ + fi + +RUN mkdir -p $HOME/.cache/chroma +RUN printf 00000000-0000-0000-0000-000000000000 > $HOME/.cache/chroma/telemetry_user_id + +# Make sure the user has access to the app and root directory +RUN chown -R $UID:$GID /app $HOME + +SHELL ["/bin/bash", "-o", "pipefail", "-c"] + +RUN if [ "$USE_OLLAMA" = "true" ]; then \ + apt-get update && \ + # Install pandoc and netcat + apt-get install -y --no-install-recommends git build-essential pandoc netcat-openbsd curl && \ + apt-get install -y --no-install-recommends gcc python3-dev && \ + # for RAG OCR + apt-get install -y --no-install-recommends ffmpeg libsm6 libxext6 && \ + # install helper tools + apt-get install -y --no-install-recommends curl jq && \ + # install ollama + curl -fsSL https://ollama.com/install.sh | sh && \ + # cleanup + rm -rf /var/lib/apt/lists/*; \ + else \ + apt-get update && \ + # Install pandoc, netcat and gcc + apt-get install -y --no-install-recommends git build-essential pandoc gcc netcat-openbsd curl jq && \ + apt-get install -y --no-install-recommends gcc python3-dev && \ + # for RAG OCR + apt-get install -y --no-install-recommends ffmpeg libsm6 libxext6 && \ + # cleanup + rm -rf /var/lib/apt/lists/*; \ + fi + +# install python dependencies +# COPY --chown=$UID:$GID ./backend/requirements.txt ./requirements.txt +# RUN cp /app/backend/requirements.txt ./requirements.txt + +RUN pip3 install --no-cache-dir uv && \ + if [ "$USE_CUDA" = "true" ]; then \ + # If you use CUDA the whisper and embedding model will be downloaded on first use + pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/$USE_CUDA_DOCKER_VER --no-cache-dir && \ + uv pip install --system -r requirements.txt --no-cache-dir && \ + python -c "import os; from sentence_transformers import SentenceTransformer; SentenceTransformer(os.environ['RAG_EMBEDDING_MODEL'], device='cpu')" && \ + python -c "import os; from faster_whisper import WhisperModel; WhisperModel(os.environ['WHISPER_MODEL'], device='cpu', compute_type='int8', download_root=os.environ['WHISPER_MODEL_DIR'])"; \ + python -c "import os; import tiktoken; tiktoken.get_encoding(os.environ['TIKTOKEN_ENCODING_NAME'])"; \ + else \ + pip3 install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu --no-cache-dir && \ + uv pip install --system -r requirements.txt --no-cache-dir && \ + python -c "import os; from sentence_transformers import SentenceTransformer; SentenceTransformer(os.environ['RAG_EMBEDDING_MODEL'], device='cpu')" && \ + python -c "import os; from faster_whisper import WhisperModel; WhisperModel(os.environ['WHISPER_MODEL'], device='cpu', compute_type='int8', download_root=os.environ['WHISPER_MODEL_DIR'])"; \ + python -c "import os; import tiktoken; tiktoken.get_encoding(os.environ['TIKTOKEN_ENCODING_NAME'])"; \ + fi; \ + chown -R $UID:$GID /app/backend/data/ + + + +# copy embedding weight from build +# RUN mkdir -p /root/.cache/chroma/onnx_models/all-MiniLM-L6-v2 +# COPY --from=build /app/onnx /root/.cache/chroma/onnx_models/all-MiniLM-L6-v2/onnx + +# copy built frontend files +COPY --chown=$UID:$GID --from=build /app/build /app/build +COPY --chown=$UID:$GID --from=build /app/CHANGELOG.md /app/CHANGELOG.md +COPY --chown=$UID:$GID --from=build /app/package.json /app/package.json + +# copy backend files +# COPY --chown=$UID:$GID ./backend . + +EXPOSE 8080 + +HEALTHCHECK CMD curl --silent --fail http://localhost:${PORT:-8080}/health | jq -ne 'input.status == true' || exit 1 + +USER $UID:$GID + +ARG BUILD_HASH +ENV WEBUI_BUILD_VERSION=${BUILD_HASH} +ENV DOCKER=true + +CMD [ "bash", "start.sh"] From 5c7f5718eddc5f0975a7ba66601baa0e09109cc4 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Wed, 30 Apr 2025 11:09:21 +0800 Subject: [PATCH 025/217] Restore context in EdgeCraftRAG build.yaml. (#1895) Restore context in EdgeCraftRAG build.yaml to avoid the issue of can't find Dockerfiles. Signed-off-by: ZePan110 --- EdgeCraftRAG/docker_image_build/build.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/EdgeCraftRAG/docker_image_build/build.yaml b/EdgeCraftRAG/docker_image_build/build.yaml index de4a781d49..18ad867c75 100644 --- a/EdgeCraftRAG/docker_image_build/build.yaml +++ b/EdgeCraftRAG/docker_image_build/build.yaml @@ -14,16 +14,19 @@ services: image: ${REGISTRY:-opea}/edgecraftrag:${TAG:-latest} edgecraftrag-server: build: + context: ../ dockerfile: ./Dockerfile.server extends: edgecraftrag image: ${REGISTRY:-opea}/edgecraftrag-server:${TAG:-latest} edgecraftrag-ui: build: + context: ../ dockerfile: ./ui/docker/Dockerfile.ui extends: edgecraftrag image: ${REGISTRY:-opea}/edgecraftrag-ui:${TAG:-latest} edgecraftrag-ui-gradio: build: + context: ../ dockerfile: ./ui/docker/Dockerfile.gradio extends: edgecraftrag image: ${REGISTRY:-opea}/edgecraftrag-ui-gradio:${TAG:-latest} From 9259ba41a5d582138c7e7c9c84a0b2461a60fb86 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Wed, 30 Apr 2025 13:24:42 +0800 Subject: [PATCH 026/217] Remove invalid codeowner. (#1896) Signed-off-by: ZePan110 --- .github/CODEOWNERS | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 22a42faadf..4894db074f 100755 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -4,13 +4,13 @@ /AudioQnA/ sihan.chen@intel.com wenjiao.yue@intel.com /AvatarChatbot/ chun.tao@intel.com kaokao.lv@intel.com /ChatQnA/ liang1.lv@intel.com letong.han@intel.com -/CodeGen/ liang1.lv@intel.com xinyao.wang@intel.com -/CodeTrans/ sihan.chen@intel.com xinyao.wang@intel.com +/CodeGen/ liang1.lv@intel.com +/CodeTrans/ sihan.chen@intel.com /DBQnA/ supriya.krishnamurthi@intel.com liang1.lv@intel.com /DocIndexRetriever/ kaokao.lv@intel.com chendi.xue@intel.com -/DocSum/ letong.han@intel.com xinyao.wang@intel.com +/DocSum/ letong.han@intel.com /EdgeCraftRAG/ yongbo.zhu@intel.com mingyuan.qi@intel.com -/FaqGen/ yogesh.pandey@intel.com xinyao.wang@intel.com +/FaqGen/ yogesh.pandey@intel.com /GraphRAG/ rita.brugarolas.brufau@intel.com abolfazl.shahbazi@intel.com /InstructionTuning/ xinyu.ye@intel.com kaokao.lv@intel.com /MultimodalQnA/ melanie.h.buehler@intel.com tiep.le@intel.com @@ -19,5 +19,5 @@ /SearchQnA/ sihan.chen@intel.com letong.han@intel.com /Text2Image/ wenjiao.yue@intel.com xinyu.ye@intel.com /Translation/ liang1.lv@intel.com sihan.chen@intel.com -/VideoQnA/ huiling.bao@intel.com xinyao.wang@intel.com +/VideoQnA/ huiling.bao@intel.com /VisualQnA/ liang1.lv@intel.com sihan.chen@intel.com \ No newline at end of file From 40e44dfcd66a78f898ba96198d41d42e4ac7547b Mon Sep 17 00:00:00 2001 From: Ying Hu Date: Tue, 6 May 2025 13:21:31 +0800 Subject: [PATCH 027/217] Update README.md of ChatQnA for broken URL (#1907) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Neo Zhang Jianyu --- ChatQnA/README.md | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/ChatQnA/README.md b/ChatQnA/README.md index cedfd8637a..75de89d1b8 100644 --- a/ChatQnA/README.md +++ b/ChatQnA/README.md @@ -96,20 +96,21 @@ flowchart LR The table below lists currently available deployment options. They outline in detail the implementation of this example on selected hardware. -| Category | Deployment Option | Description | -| ----------------------- | ---------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| On-premise Deployments | Docker compose | [ChatQnA deployment on Xeon](./docker_compose/intel/cpu/xeon) | -| | | [ChatQnA deployment on AI PC](./docker_compose/intel/cpu/aipc) | -| | | [ChatQnA deployment on Gaudi](./docker_compose/intel/hpu/gaudi) | -| | | [ChatQnA deployment on Nvidia GPU](./docker_compose/nvidia/gpu) | -| | | [ChatQnA deployment on AMD ROCm](./docker_compose/amd/gpu/rocm) | -| | Kubernetes | [Helm Charts](./kubernetes/helm) | -| Cloud Service Providers | AWS | [Terraform deployment on 4th Gen Intel Xeon with Intel AMX using meta-llama/Meta-Llama-3-8B-Instruct ](https://github.com/intel/terraform-intel-aws-vm/tree/main/examples/gen-ai-xeon-opea-chatqna) | -| | | [Terraform deployment on 4th Gen Intel Xeon with Intel AMX using TII Falcon2-11B](https://github.com/intel/terraform-intel-aws-vm/tree/main/examples/gen-ai-xeon-opea-chatqna-falcon11B) | -| | GCP | [Terraform deployment on 5th Gen Intel Xeon with Intel AMX(support Confidential AI by using Intel® TDX](https://github.com/intel/terraform-intel-gcp-vm/tree/main/examples/gen-ai-xeon-opea-chatqna) | -| | Azure | [Terraform deployment on 4th/5th Gen Intel Xeon with Intel AMX & Intel TDX](https://github.com/intel/terraform-intel-azure-linux-vm/tree/main/examples/azure-gen-ai-xeon-opea-chatqna-tdx) | -| | Intel Tiber AI Cloud | Coming Soon | -| | Any Xeon based Ubuntu system | [ChatQnA Ansible Module for Ubuntu 20.04](https://github.com/intel/optimized-cloud-recipes/tree/main/recipes/ai-opea-chatqna-xeon) .Use this if you are not using Terraform and have provisioned your system either manually or with another tool, including directly on bare metal. | +| Category | Deployment Option | Description | +| ------------------------------------------------------------------------------------------------------------------------------ | ---------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| On-premise Deployments | Docker compose | [ChatQnA deployment on Xeon](./docker_compose/intel/cpu/xeon/README.md) | +| | | [ChatQnA deployment on AI PC](./docker_compose/intel/cpu/aipc/README.md) | +| | | [ChatQnA deployment on Gaudi](./docker_compose/intel/hpu/gaudi/README.md) | +| | | [ChatQnA deployment on Nvidia GPU](./docker_compose/nvidia/gpu/README.md) | +| | | [ChatQnA deployment on AMD ROCm](./docker_compose/amd/gpu/rocm/README.md) | +| Cloud Platforms Deployment on AWS, GCP, Azure, IBM Cloud,Oracle Cloud, [Intel® Tiber™ AI Cloud](https://ai.cloud.intel.com/) | Docker Compose | [Getting Started Guide: Deploy the ChatQnA application across multiple cloud platforms](https://github.com/opea-project/docs/tree/main/getting-started/README.md) | +| | Kubernetes | [Helm Charts](./kubernetes/helm/README.md) | +| Automated Terraform Deployment on Cloud Service Providers | AWS | [Terraform deployment on 4th Gen Intel Xeon with Intel AMX using meta-llama/Meta-Llama-3-8B-Instruct ](https://github.com/intel/terraform-intel-aws-vm/tree/main/examples/gen-ai-xeon-opea-chatqna) | +| | | [Terraform deployment on 4th Gen Intel Xeon with Intel AMX using TII Falcon2-11B](https://github.com/intel/terraform-intel-aws-vm/tree/main/examples/gen-ai-xeon-opea-chatqna-falcon11B) | +| | GCP | [Terraform deployment on 5th Gen Intel Xeon with Intel AMX(support Confidential AI by using Intel® TDX](https://github.com/intel/terraform-intel-gcp-vm/tree/main/examples/gen-ai-xeon-opea-chatqna) | +| | Azure | [Terraform deployment on 4th/5th Gen Intel Xeon with Intel AMX & Intel TDX](https://github.com/intel/terraform-intel-azure-linux-vm/tree/main/examples/azure-gen-ai-xeon-opea-chatqna-tdx) | +| | Intel Tiber AI Cloud | Coming Soon | +| | Any Xeon based Ubuntu system | [ChatQnA Ansible Module for Ubuntu 20.04](https://github.com/intel/optimized-cloud-recipes/tree/main/recipes/ai-opea-chatqna-xeon). Use this if you are not using Terraform and have provisioned your system either manually or with another tool, including directly on bare metal. | ## Monitor and Tracing From df3380094574e7a13ea1950d4c07db3f6efde8f6 Mon Sep 17 00:00:00 2001 From: Omar Khleif Date: Mon, 5 May 2025 22:41:21 -0700 Subject: [PATCH 028/217] CodeGen Gradio UI Enhancements (#1904) Signed-off-by: okhleif-IL --- CodeGen/ui/gradio/codegen_ui_gradio.py | 48 +++++++++++++++++++------- 1 file changed, 36 insertions(+), 12 deletions(-) diff --git a/CodeGen/ui/gradio/codegen_ui_gradio.py b/CodeGen/ui/gradio/codegen_ui_gradio.py index ad1c5b6c2b..b84269a577 100644 --- a/CodeGen/ui/gradio/codegen_ui_gradio.py +++ b/CodeGen/ui/gradio/codegen_ui_gradio.py @@ -46,13 +46,24 @@ # Define the functions that will be used in the app + + +def add_to_history(prompt, history): + history.append([prompt["text"], ""]) + return history, "" + + def conversation_history(prompt, index, use_agent, history): print(f"Generating code for prompt: {prompt} using index: {index} and use_agent is {use_agent}") - history.append([prompt, ""]) - response_generator = generate_code(prompt, index, use_agent) + history = add_to_history(prompt, history)[0] + response_generator = generate_code(prompt["text"], index, use_agent) for token in response_generator: history[-1][-1] += token - yield history + yield history, "" + + +def clear_history(): + return "" def upload_media(media, index=None, chunk_size=1500, chunk_overlap=100): @@ -287,19 +298,32 @@ def get_file_names(files): # Define UI components with gr.Blocks() as ui: with gr.Tab("Code Generation"): - gr.Markdown("### Generate Code from Natural Language") - chatbot = gr.Chatbot(label="Chat History") - prompt_input = gr.Textbox(label="Enter your query") - with gr.Column(): - with gr.Row(equal_height=True): + with gr.Row(): + with gr.Column(scale=2): database_dropdown = gr.Dropdown(choices=get_indices(), label="Select Index", value="None", scale=10) db_refresh_button = gr.Button("Refresh Dropdown", scale=0.1) db_refresh_button.click(update_indices_dropdown, outputs=database_dropdown) use_agent = gr.Checkbox(label="Use Agent", container=False) - generate_button = gr.Button("Generate Code") - generate_button.click( - conversation_history, inputs=[prompt_input, database_dropdown, use_agent, chatbot], outputs=chatbot + with gr.Column(scale=9): + gr.Markdown("### Generate Code from Natural Language") + chatbot = gr.Chatbot(label="Chat History") + with gr.Row(equal_height=True): + with gr.Column(scale=8): + prompt_input = gr.MultimodalTextbox( + show_label=False, interactive=True, placeholder="Enter your query", sources=[] + ) + with gr.Column(scale=1, min_width=150): + with gr.Row(elem_id="buttons") as button_row: + clear_btn = gr.Button(value="🗑️ Clear", interactive=True) + clear_btn.click(clear_history, None, chatbot) + + prompt_input.submit(add_to_history, inputs=[prompt_input, chatbot], outputs=[chatbot, prompt_input]) + + prompt_input.submit( + conversation_history, + inputs=[prompt_input, database_dropdown, use_agent, chatbot], + outputs=[chatbot, prompt_input], ) with gr.Tab("Resource Management"): @@ -315,7 +339,7 @@ def get_file_names(files): ) with gr.Column(scale=3): file_upload = gr.File(label="Upload Files", file_count="multiple") - url_input = gr.Textbox(label="Media to be ingested (Append URL's in a new line)") + url_input = gr.Textbox(label="Media to be ingested. Append URL's in a new line (Shift + Enter)") upload_button = gr.Button("Upload", variant="primary") upload_status = gr.Textbox(label="Upload Status") file_upload.change(get_file_names, inputs=file_upload, outputs=url_input) From 5375332fb394d2601e5a8ef9126c030e2f0c318b Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Tue, 6 May 2025 15:54:43 +0800 Subject: [PATCH 029/217] Fix security issues for helm test workflow (#1908) Signed-off-by: ZePan110 Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .github/workflows/_helm-e2e.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/_helm-e2e.yml b/.github/workflows/_helm-e2e.yml index a66b3204bd..edff2d7f36 100644 --- a/.github/workflows/_helm-e2e.yml +++ b/.github/workflows/_helm-e2e.yml @@ -2,7 +2,9 @@ # SPDX-License-Identifier: Apache-2.0 name: Helm Chart E2e Test For Call -permissions: read-all +permissions: + contents: read + on: workflow_call: inputs: From ff66600ab40d7ed91f5c289aa39726d83edeec36 Mon Sep 17 00:00:00 2001 From: lkk <33276950+lkk12014402@users.noreply.github.com> Date: Tue, 6 May 2025 16:34:16 +0800 Subject: [PATCH 030/217] Fix ui dockerfile. (#1909) Signed-off-by: lkk <33276950+lkk12014402@users.noreply.github.com> --- AgentQnA/ui/docker/Dockerfile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/AgentQnA/ui/docker/Dockerfile b/AgentQnA/ui/docker/Dockerfile index 1f5605a15b..380dffed7a 100644 --- a/AgentQnA/ui/docker/Dockerfile +++ b/AgentQnA/ui/docker/Dockerfile @@ -40,8 +40,7 @@ RUN git config --global user.name "opea" && \ WORKDIR /app/open-webui -RUN git checkout ${WEBUI_VERSION} && \ - if [ -d patches ] && [ "$(ls -A patches)" ]; then git am /app/patches/*.patch; fi +RUN git checkout ${WEBUI_VERSION} && git am /app/patches/*.patch WORKDIR /app From 505ec6d4b65c6d290b60d56b231420598caa50d0 Mon Sep 17 00:00:00 2001 From: "chen, suyue" Date: Wed, 7 May 2025 11:28:04 +0800 Subject: [PATCH 031/217] update PR reviewers (#1913) Signed-off-by: chensuyue --- .github/CODEOWNERS | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 4894db074f..e57bd74544 100755 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,16 +1,18 @@ -* liang1.lv@intel.com feng.tian@intel.com suyue.chen@intel.com +# Code owners will review PRs within their respective folders. + +* liang1.lv@intel.com feng.tian@intel.com suyue.chen@intel.com kaokao.lv@intel.com minmin.hou@intel.com rita.brugarolas.brufau@intel.com /.github/ suyue.chen@intel.com ze.pan@intel.com -/AgentQnA/ kaokao.lv@intel.com minmin.hou@intel.com +/AgentQnA/ abolfazl.shahbazi@intel.com kaokao.lv@intel.com minmin.hou@intel.com /AudioQnA/ sihan.chen@intel.com wenjiao.yue@intel.com /AvatarChatbot/ chun.tao@intel.com kaokao.lv@intel.com /ChatQnA/ liang1.lv@intel.com letong.han@intel.com /CodeGen/ liang1.lv@intel.com /CodeTrans/ sihan.chen@intel.com /DBQnA/ supriya.krishnamurthi@intel.com liang1.lv@intel.com -/DocIndexRetriever/ kaokao.lv@intel.com chendi.xue@intel.com +/DocIndexRetriever/ abolfazl.shahbazi@intel.com kaokao.lv@intel.com chendi.xue@intel.com /DocSum/ letong.han@intel.com /EdgeCraftRAG/ yongbo.zhu@intel.com mingyuan.qi@intel.com -/FaqGen/ yogesh.pandey@intel.com +/FinanceAgent/ abolfazl.shahbazi@intel.com kaokao.lv@intel.com minmin.hou@intel.com rita.brugarolas.brufau@intel.com /GraphRAG/ rita.brugarolas.brufau@intel.com abolfazl.shahbazi@intel.com /InstructionTuning/ xinyu.ye@intel.com kaokao.lv@intel.com /MultimodalQnA/ melanie.h.buehler@intel.com tiep.le@intel.com @@ -20,4 +22,5 @@ /Text2Image/ wenjiao.yue@intel.com xinyu.ye@intel.com /Translation/ liang1.lv@intel.com sihan.chen@intel.com /VideoQnA/ huiling.bao@intel.com -/VisualQnA/ liang1.lv@intel.com sihan.chen@intel.com \ No newline at end of file +/VisualQnA/ liang1.lv@intel.com sihan.chen@intel.com +/WorkflowExecAgent/ joshua.jian.ern.liew@intel.com kaokao.lv@intel.com \ No newline at end of file From f6013b867904244cbf49878775f8410495357fe6 Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Wed, 7 May 2025 11:35:37 +0800 Subject: [PATCH 032/217] Add exempt-issue-labels to stale check workflow (#1861) Signed-off-by: Sun, Xuehao --- .github/workflows/daily_check_issue_and_pr.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/daily_check_issue_and_pr.yml b/.github/workflows/daily_check_issue_and_pr.yml index 4664b250c0..21e1c1d835 100644 --- a/.github/workflows/daily_check_issue_and_pr.yml +++ b/.github/workflows/daily_check_issue_and_pr.yml @@ -26,3 +26,4 @@ jobs: close-pr-message: "This PR was closed because it has been stalled for 7 days with no activity." repo-token: ${{ secrets.ACTION_TOKEN }} start-date: "2025-03-01T00:00:00Z" + exempt-issue-labels: "Backlog" From 7bb05585b6b6ccd6154c68be4405893dc8146f25 Mon Sep 17 00:00:00 2001 From: Melanie Hart Buehler Date: Wed, 7 May 2025 18:05:30 -0700 Subject: [PATCH 033/217] Move file processing from UI to DocSum backend service (#1899) Signed-off-by: Melanie Buehler --- DocSum/docker_compose/amd/gpu/rocm/README.md | 24 +- .../docker_compose/intel/cpu/xeon/README.md | 26 ++- .../docker_compose/intel/hpu/gaudi/README.md | 24 +- DocSum/docsum.py | 44 ++-- DocSum/tests/test_compose_on_gaudi.sh | 28 +++ DocSum/tests/test_compose_on_xeon.sh | 28 +++ DocSum/tests/test_compose_tgi_on_gaudi.sh | 28 +++ DocSum/tests/test_compose_tgi_on_xeon.sh | 28 +++ DocSum/ui/gradio/docsum_ui_gradio.py | 220 ++++++++---------- 9 files changed, 301 insertions(+), 149 deletions(-) diff --git a/DocSum/docker_compose/amd/gpu/rocm/README.md b/DocSum/docker_compose/amd/gpu/rocm/README.md index fe37f39d57..da9d7d749f 100644 --- a/DocSum/docker_compose/amd/gpu/rocm/README.md +++ b/DocSum/docker_compose/amd/gpu/rocm/README.md @@ -239,13 +239,16 @@ curl http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -F "language=en" \ ``` +Note that the `-F "messages="` flag is required, even for file uploads. Multiple files can be uploaded in a single call with multiple `-F "files=@/path"` inputs. + ### Query with audio and video -> Audio and Video file uploads are not supported in docsum with curl request, please use the Gradio-UI. +> Audio and video can be passed as base64 strings or uploaded by providing a local file path. Audio: ```bash +# Send base64 string curl -X POST http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -H "Content-Type: application/json" \ -d '{"type": "audio", "messages": "UklGRigAAABXQVZFZm10IBIAAAABAAEARKwAAIhYAQACABAAAABkYXRhAgAAAAEA"}' @@ -257,11 +260,21 @@ curl http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -F "max_tokens=32" \ -F "language=en" \ -F "stream=True" + +# Upload file +curl http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ + -H "Content-Type: multipart/form-data" \ + -F "type=audio" \ + -F "messages=" \ + -F "files=@/path to your file (.mp3, .wav)" \ + -F "max_tokens=32" \ + -F "language=en" ``` Video: ```bash +# Send base64 string curl -X POST http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -H "Content-Type: application/json" \ -d '{"type": "video", "messages": "convert your video to base64 data type"}' @@ -273,6 +286,15 @@ curl http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ -F "max_tokens=32" \ -F "language=en" \ -F "stream=True" + +# Upload file +curl http://${HOST_IP}:${DOCSUM_BACKEND_SERVER_PORT}/v1/docsum \ + -H "Content-Type: multipart/form-data" \ + -F "type=video" \ + -F "messages=" \ + -F "files=@/path to your file (.mp4)" \ + -F "max_tokens=32" \ + -F "language=en" ``` ### Query with long context diff --git a/DocSum/docker_compose/intel/cpu/xeon/README.md b/DocSum/docker_compose/intel/cpu/xeon/README.md index 0930ab227e..06d3e4378d 100644 --- a/DocSum/docker_compose/intel/cpu/xeon/README.md +++ b/DocSum/docker_compose/intel/cpu/xeon/README.md @@ -156,16 +156,19 @@ curl http://${host_ip}:8888/v1/docsum \ -F "messages=" \ -F "files=@/path to your file (.txt, .docx, .pdf)" \ -F "max_tokens=32" \ - -F "language=en" \ + -F "language=en" ``` +Note that the `-F "messages="` flag is required, even for file uploads. Multiple files can be uploaded in a single call with multiple `-F "files=@/path"` inputs. + ### Query with audio and video -> Audio and Video file uploads are not supported in docsum with curl request, please use the Gradio-UI. +> Audio and video can be passed as base64 strings or uploaded by providing a local file path. Audio: ```bash +# Send base64 string curl -X POST http://${host_ip}:8888/v1/docsum \ -H "Content-Type: application/json" \ -d '{"type": "audio", "messages": "UklGRigAAABXQVZFZm10IBIAAAABAAEARKwAAIhYAQACABAAAABkYXRhAgAAAAEA"}' @@ -177,11 +180,21 @@ curl http://${host_ip}:8888/v1/docsum \ -F "max_tokens=32" \ -F "language=en" \ -F "stream=True" + +# Upload file +curl http://${host_ip}:8888/v1/docsum \ + -H "Content-Type: multipart/form-data" \ + -F "type=audio" \ + -F "messages=" \ + -F "files=@/path to your file (.mp3, .wav)" \ + -F "max_tokens=32" \ + -F "language=en" ``` Video: ```bash +# Send base64 string curl -X POST http://${host_ip}:8888/v1/docsum \ -H "Content-Type: application/json" \ -d '{"type": "video", "messages": "convert your video to base64 data type"}' @@ -193,6 +206,15 @@ curl http://${host_ip}:8888/v1/docsum \ -F "max_tokens=32" \ -F "language=en" \ -F "stream=True" + +# Upload file +curl http://${host_ip}:8888/v1/docsum \ + -H "Content-Type: multipart/form-data" \ + -F "type=video" \ + -F "messages=" \ + -F "files=@/path to your file (.mp4)" \ + -F "max_tokens=32" \ + -F "language=en" ``` ### Query with long context diff --git a/DocSum/docker_compose/intel/hpu/gaudi/README.md b/DocSum/docker_compose/intel/hpu/gaudi/README.md index 7b552fd5b8..5cf9e77477 100644 --- a/DocSum/docker_compose/intel/hpu/gaudi/README.md +++ b/DocSum/docker_compose/intel/hpu/gaudi/README.md @@ -161,13 +161,16 @@ curl http://${host_ip}:8888/v1/docsum \ -F "language=en" \ ``` +Note that the `-F "messages="` flag is required, even for file uploads. Multiple files can be uploaded in a single call with multiple `-F "files=@/path"` inputs. + ### Query with audio and video -> Audio and Video file uploads are not supported in docsum with curl request, please use the Gradio-UI. +> Audio and video can be passed as base64 strings or uploaded by providing a local file path. Audio: ```bash +# Send base64 string curl -X POST http://${host_ip}:8888/v1/docsum \ -H "Content-Type: application/json" \ -d '{"type": "audio", "messages": "UklGRigAAABXQVZFZm10IBIAAAABAAEARKwAAIhYAQACABAAAABkYXRhAgAAAAEA"}' @@ -179,11 +182,21 @@ curl http://${host_ip}:8888/v1/docsum \ -F "max_tokens=32" \ -F "language=en" \ -F "stream=True" + +# Upload file +curl http://${host_ip}:8888/v1/docsum \ + -H "Content-Type: multipart/form-data" \ + -F "type=audio" \ + -F "messages=" \ + -F "files=@/path to your file (.mp3, .wav)" \ + -F "max_tokens=32" \ + -F "language=en" ``` Video: ```bash +# Send base64 string curl -X POST http://${host_ip}:8888/v1/docsum \ -H "Content-Type: application/json" \ -d '{"type": "video", "messages": "convert your video to base64 data type"}' @@ -195,6 +208,15 @@ curl http://${host_ip}:8888/v1/docsum \ -F "max_tokens=32" \ -F "language=en" \ -F "stream=True" + +# Upload file +curl http://${host_ip}:8888/v1/docsum \ + -H "Content-Type: multipart/form-data" \ + -F "type=video" \ + -F "messages=" \ + -F "files=@/path to your file (.mp4)" \ + -F "max_tokens=32" \ + -F "language=en" ``` ### Query with long context diff --git a/DocSum/docsum.py b/DocSum/docsum.py index 34e58c1df0..786e48a264 100644 --- a/DocSum/docsum.py +++ b/DocSum/docsum.py @@ -63,6 +63,20 @@ def read_pdf(file): return docs +def encode_file_to_base64(file_path): + """Encode the content of a file to a base64 string. + + Args: + file_path (str): The path to the file to be encoded. + + Returns: + str: The base64 encoded string of the file content. + """ + with open(file_path, "rb") as f: + base64_str = base64.b64encode(f.read()).decode("utf-8") + return base64_str + + def video2audio( video_base64: str, ) -> str: @@ -163,7 +177,6 @@ def add_remote_service(self): async def handle_request(self, request: Request, files: List[UploadFile] = File(default=None)): """Accept pure text, or files .txt/.pdf.docx, audio/video base64 string.""" - if "application/json" in request.headers.get("content-type"): data = await request.json() stream_opt = data.get("stream", True) @@ -193,25 +206,24 @@ async def handle_request(self, request: Request, files: List[UploadFile] = File( uid = str(uuid.uuid4()) file_path = f"/tmp/{uid}" - if data_type is not None and data_type in ["audio", "video"]: - raise ValueError( - "Audio and Video file uploads are not supported in docsum with curl request, \ - please use the UI or pass base64 string of the content directly." - ) - - else: - import aiofiles + import aiofiles - async with aiofiles.open(file_path, "wb") as f: - await f.write(await file.read()) + async with aiofiles.open(file_path, "wb") as f: + await f.write(await file.read()) + if data_type == "text": docs = read_text_from_file(file, file_path) - os.remove(file_path) + elif data_type in ["audio", "video"]: + docs = encode_file_to_base64(file_path) + else: + raise ValueError(f"Data type not recognized: {data_type}") + + os.remove(file_path) - if isinstance(docs, list): - file_summaries.extend(docs) - else: - file_summaries.append(docs) + if isinstance(docs, list): + file_summaries.extend(docs) + else: + file_summaries.append(docs) if file_summaries: prompt = handle_message(chat_request.messages) + "\n".join(file_summaries) diff --git a/DocSum/tests/test_compose_on_gaudi.sh b/DocSum/tests/test_compose_on_gaudi.sh index aecdc006c7..3c0f3d695b 100644 --- a/DocSum/tests/test_compose_on_gaudi.sh +++ b/DocSum/tests/test_compose_on_gaudi.sh @@ -237,6 +237,20 @@ function validate_megaservice_multimedia() { "language=en" \ "stream=False" + echo ">>> Checking audio data in form format, upload file" + validate_service \ + "${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" \ + "well" \ + "docsum-gaudi-backend-server" \ + "docsum-gaudi-backend-server" \ + "media" "" \ + "type=audio" \ + "messages=" \ + "files=@$ROOT_FOLDER/data/test.wav" \ + "max_tokens=32" \ + "language=en" \ + "stream=False" + echo ">>> Checking video data in json format" validate_service \ "${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" \ @@ -258,6 +272,20 @@ function validate_megaservice_multimedia() { "max_tokens=32" \ "language=en" \ "stream=False" + + echo ">>> Checking video data in form format, upload file" + validate_service \ + "${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" \ + "bye" \ + "docsum-gaudi-backend-server" \ + "docsum-gaudi-backend-server" \ + "media" "" \ + "type=video" \ + "messages=" \ + "files=@$ROOT_FOLDER/data/test.mp4" \ + "max_tokens=32" \ + "language=en" \ + "stream=False" } function validate_megaservice_long_text() { diff --git a/DocSum/tests/test_compose_on_xeon.sh b/DocSum/tests/test_compose_on_xeon.sh index 5ff7add6be..c231e7264e 100644 --- a/DocSum/tests/test_compose_on_xeon.sh +++ b/DocSum/tests/test_compose_on_xeon.sh @@ -237,6 +237,20 @@ function validate_megaservice_multimedia() { "language=en" \ "stream=False" + echo ">>> Checking audio data in form format, upload file" + validate_service \ + "${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" \ + "well" \ + "docsum-xeon-backend-server" \ + "docsum-xeon-backend-server" \ + "media" "" \ + "type=audio" \ + "messages=" \ + "files=@$ROOT_FOLDER/data/test.wav" \ + "max_tokens=32" \ + "language=en" \ + "stream=False" + echo ">>> Checking video data in json format" validate_service \ "${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" \ @@ -258,6 +272,20 @@ function validate_megaservice_multimedia() { "max_tokens=32" \ "language=en" \ "stream=False" + + echo ">>> Checking video data in form format, upload file" + validate_service \ + "${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" \ + "bye" \ + "docsum-xeon-backend-server" \ + "docsum-xeon-backend-server" \ + "media" "" \ + "type=video" \ + "messages=" \ + "files=@$ROOT_FOLDER/data/test.mp4" \ + "max_tokens=32" \ + "language=en" \ + "stream=False" } function validate_megaservice_long_text() { diff --git a/DocSum/tests/test_compose_tgi_on_gaudi.sh b/DocSum/tests/test_compose_tgi_on_gaudi.sh index 6859e5354a..06dd9b7292 100644 --- a/DocSum/tests/test_compose_tgi_on_gaudi.sh +++ b/DocSum/tests/test_compose_tgi_on_gaudi.sh @@ -229,6 +229,20 @@ function validate_megaservice_multimedia() { "language=en" \ "stream=False" + echo ">>> Checking audio data in form format, upload file" + validate_service \ + "${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" \ + "well" \ + "docsum-gaudi-backend-server" \ + "docsum-gaudi-backend-server" \ + "media" "" \ + "type=audio" \ + "messages=" \ + "files=@$ROOT_FOLDER/data/test.wav" \ + "max_tokens=32" \ + "language=en" \ + "stream=False" + echo ">>> Checking video data in json format" validate_service \ "${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" \ @@ -250,6 +264,20 @@ function validate_megaservice_multimedia() { "max_tokens=32" \ "language=en" \ "stream=False" + + echo ">>> Checking video data in form format, upload file" + validate_service \ + "${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" \ + "bye" \ + "docsum-gaudi-backend-server" \ + "docsum-gaudi-backend-server" \ + "media" "" \ + "type=video" \ + "messages=" \ + "files=@$ROOT_FOLDER/data/test.mp4" \ + "max_tokens=32" \ + "language=en" \ + "stream=False" } function validate_megaservice_long_text() { diff --git a/DocSum/tests/test_compose_tgi_on_xeon.sh b/DocSum/tests/test_compose_tgi_on_xeon.sh index f94eabf0c8..52edea31f8 100644 --- a/DocSum/tests/test_compose_tgi_on_xeon.sh +++ b/DocSum/tests/test_compose_tgi_on_xeon.sh @@ -229,6 +229,20 @@ function validate_megaservice_multimedia() { "language=en" \ "stream=False" + echo ">>> Checking audio data in form format, upload file" + validate_service \ + "${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" \ + "well" \ + "docsum-xeon-backend-server" \ + "docsum-xeon-backend-server" \ + "media" "" \ + "type=audio" \ + "messages=" \ + "files=@$ROOT_FOLDER/data/test.wav" \ + "max_tokens=32" \ + "language=en" \ + "stream=False" + echo ">>> Checking video data in json format" validate_service \ "${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" \ @@ -250,6 +264,20 @@ function validate_megaservice_multimedia() { "max_tokens=32" \ "language=en" \ "stream=False" + + echo ">>> Checking video data in form format, upload file" + validate_service \ + "${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" \ + "bye" \ + "docsum-xeon-backend-server" \ + "docsum-xeon-backend-server" \ + "media" "" \ + "type=video" \ + "messages=" \ + "files=@$ROOT_FOLDER/data/test.mp4" \ + "max_tokens=32" \ + "language=en" \ + "stream=False" } function validate_megaservice_long_text() { diff --git a/DocSum/ui/gradio/docsum_ui_gradio.py b/DocSum/ui/gradio/docsum_ui_gradio.py index 5bb9a7091c..8d8a440ce3 100644 --- a/DocSum/ui/gradio/docsum_ui_gradio.py +++ b/DocSum/ui/gradio/docsum_ui_gradio.py @@ -22,76 +22,12 @@ class DocSumUI: def __init__(self): """Initialize the DocSumUI class with accepted file types, headers, and backend service endpoint.""" - self.ACCEPTED_FILE_TYPES = ["pdf", "doc", "docx"] + self.ACCEPTED_TEXT_FILE_TYPES = [".pdf", ".doc", ".docx"] + self.ACCEPTED_AUDIO_FILE_TYPES = [".mp3", ".wav"] + self.ACCEPTED_VIDEO_FILE_TYPES = [".mp4"] self.HEADERS = {"Content-Type": "application/json"} self.BACKEND_SERVICE_ENDPOINT = os.getenv("BACKEND_SERVICE_ENDPOINT", "http://localhost:8888/v1/docsum") - def encode_file_to_base64(self, file_path): - """Encode the content of a file to a base64 string. - - Args: - file_path (str): The path to the file to be encoded. - - Returns: - str: The base64 encoded string of the file content. - """ - logger.info(">>> Encoding file to base64: %s", file_path) - with open(file_path, "rb") as f: - base64_str = base64.b64encode(f.read()).decode("utf-8") - return base64_str - - def read_file(self, file): - """Read and process the content of a file. - - Args: - file (file-like object): The file to be read. - - Returns: - str: The content of the file or an error message if the file type is unsupported. - """ - self.page_content = "" - self.pages = [] - - if file.name.endswith(".pdf"): - loader = PyPDFLoader(file) - elif file.name.endswith((".doc", ".docx")): - loader = Docx2txtLoader(file) - else: - msg = f"Unsupported file type '{file.name}'. Choose from {self.ACCEPTED_FILE_TYPES}" - logger.error(msg) - return msg - - for page in loader.lazy_load(): - self.page_content += page.page_content - - return self.page_content - - def read_audio_file(self, file): - """Read and process the content of an audio file. - - Args: - file (file-like object): The audio file to be read. - - Returns: - str: The base64 encoded content of the audio file. - """ - logger.info(">>> Reading audio file: %s", file.name) - base64_str = self.encode_file_to_base64(file) - return base64_str - - def read_video_file(self, file): - """Read and process the content of a video file. - - Args: - file (file-like object): The video file to be read. - - Returns: - str: The base64 encoded content of the video file. - """ - logger.info(">>> Reading video file: %s", file.name) - base64_str = self.encode_file_to_base64(file) - return base64_str - def is_valid_url(self, url): try: result = urlparse(url) @@ -128,78 +64,107 @@ def read_url(self, url): return self.page_content - def generate_summary(self, doc_content, document_type="text"): + def process_response(self, response): + if response.status_code == 200: + try: + # Check if the specific log path is in the response text + if "/logs/LLMChain/final_output" in response.text: + # Extract the relevant part of the response + temp = ast.literal_eval( + [ + i.split("data: ")[1] + for i in response.text.split("\n\n") + if "/logs/LLMChain/final_output" in i + ][0] + )["ops"] + + # Find the final output value + final_output = [i["value"] for i in temp if i["path"] == "/logs/LLMChain/final_output"][0] + return final_output["text"] + else: + # Perform string replacements to clean the response text + cleaned_text = response.text + replacements = [ + ("'\n\ndata: b'", ""), + ("data: b' ", ""), + ("'\n\ndata: [DONE]\n\n", ""), + ("\n\ndata: b", ""), + ("'\n\n", ""), + ("'\n", ""), + ('''\'"''', ""), + ] + for old, new in replacements: + cleaned_text = cleaned_text.replace(old, new) + return cleaned_text + except (IndexError, KeyError, ValueError) as e: + # Handle potential errors during parsing + logger.error("Error parsing response: %s", e) + return response.text + + def generate_summary(self, document, document_type="text"): """Generate a summary for the given document content. Args: - doc_content (str): The content of the document. + document (str): The content or path of the document. document_type (str): The type of the document (default is "text"). Returns: str: The generated summary or an error message. """ - logger.info(">>> BACKEND_SERVICE_ENDPOINT - %s", self.BACKEND_SERVICE_ENDPOINT) - data = {"max_tokens": 256, "type": document_type, "messages": doc_content} + data = {"max_tokens": 256, "type": document_type, "messages": ""} + + if os.path.exists(document): + file_header = "text/plain" + file_ext = os.path.splitext(document)[-1] + if file_ext == ".pdf": + file_header = "application/pdf" + elif file_ext in [".doc", ".docx"]: + file_header = "application/octet-stream" + elif file_ext in self.ACCEPTED_AUDIO_FILE_TYPES + self.ACCEPTED_VIDEO_FILE_TYPES: + file_header = f"{document_type}/{file_ext[-3:]}" + files = {"files": (os.path.basename(document), open(document, "rb"), file_header)} + try: + response = requests.post( + url=self.BACKEND_SERVICE_ENDPOINT, + headers={}, + files=files, + data=data, + proxies={"http_proxy": os.environ["http_proxy"], "https_proxy": os.environ["https_proxy"]}, + ) - try: - response = requests.post( - url=self.BACKEND_SERVICE_ENDPOINT, - headers=self.HEADERS, - data=json.dumps(data), - proxies={"http_proxy": os.environ["http_proxy"], "https_proxy": os.environ["https_proxy"]}, - ) + return self.process_response(response) + + except requests.exceptions.RequestException as e: + logger.error("Request exception: %s", e) + return str(e) + + else: + data["messages"] = document + try: + response = requests.post( + url=self.BACKEND_SERVICE_ENDPOINT, + headers=self.HEADERS, + data=json.dumps(data), + proxies={"http_proxy": os.environ["http_proxy"], "https_proxy": os.environ["https_proxy"]}, + ) + + return self.process_response(response) - if response.status_code == 200: - try: - # Check if the specific log path is in the response text - if "/logs/LLMChain/final_output" in response.text: - # Extract the relevant part of the response - temp = ast.literal_eval( - [ - i.split("data: ")[1] - for i in response.text.split("\n\n") - if "/logs/LLMChain/final_output" in i - ][0] - )["ops"] - - # Find the final output value - final_output = [i["value"] for i in temp if i["path"] == "/logs/LLMChain/final_output"][0] - return final_output["text"] - else: - # Perform string replacements to clean the response text - cleaned_text = response.text - replacements = [ - ("'\n\ndata: b'", ""), - ("data: b' ", ""), - ("'\n\ndata: [DONE]\n\n", ""), - ("\n\ndata: b", ""), - ("'\n\n", ""), - ("'\n", ""), - ('''\'"''', ""), - ] - for old, new in replacements: - cleaned_text = cleaned_text.replace(old, new) - return cleaned_text - except (IndexError, KeyError, ValueError) as e: - # Handle potential errors during parsing - logger.error("Error parsing response: %s", e) - return response.text - - except requests.exceptions.RequestException as e: - logger.error("Request exception: %s", e) - return str(e) + except requests.exceptions.RequestException as e: + logger.error("Request exception: %s", e) + return str(e) return str(response.status_code) - def create_upload_ui(self, label, file_types, process_function, document_type="text"): + def create_upload_ui(self, label, file_types, document_type="text"): """Create a Gradio UI for file uploads. Args: label (str): The label for the upload button. file_types (list): The list of accepted file types. - process_function (function): The function to process the uploaded file. + document_type (str): The document type (text, audio, or video). Default is text. Returns: gr.Blocks: The Gradio Blocks object representing the upload UI. @@ -214,7 +179,7 @@ def create_upload_ui(self, label, file_types, process_function, document_type="t label="Text Summary", placeholder="Summarized text will be displayed here" ) upload_btn.upload( - lambda file: self.generate_summary(process_function(file), document_type=document_type), + lambda file: self.generate_summary(file, document_type=document_type), upload_btn, generated_text, ) @@ -263,24 +228,21 @@ def render(self): # File Upload UI file_ui = self.create_upload_ui( - label="Please upload a document (.pdf, .doc, .docx)", - file_types=[".pdf", ".doc", ".docx"], - process_function=self.read_file, + label=f"Please upload a document ({', '.join(self.ACCEPTED_TEXT_FILE_TYPES)})", + file_types=self.ACCEPTED_TEXT_FILE_TYPES, ) # Audio Upload UI audio_ui = self.create_upload_ui( - label="Please upload audio file (.wav, .mp3)", - file_types=[".wav", ".mp3"], - process_function=self.read_audio_file, + label=f"Please upload audio file ({', '.join(self.ACCEPTED_AUDIO_FILE_TYPES)})", + file_types=self.ACCEPTED_AUDIO_FILE_TYPES, document_type="audio", ) # Video Upload UI video_ui = self.create_upload_ui( - label="Please upload Video file (.mp4)", - file_types=[".mp4"], - process_function=self.read_video_file, + label=f"Please upload video file ({', '.join(self.ACCEPTED_VIDEO_FILE_TYPES)})", + file_types=self.ACCEPTED_VIDEO_FILE_TYPES, document_type="video", ) From 05011ebaac7977d1d179927c6e30cc582f976f06 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Thu, 8 May 2025 09:14:44 +0800 Subject: [PATCH 034/217] Integrate AudioQnA set_env to ut scripts. (#1897) Signed-off-by: ZePan110 --- .../docker_compose/amd/gpu/rocm/set_env.sh | 4 +- .../amd/gpu/rocm/set_env_vllm.sh | 4 +- .../docker_compose/intel/cpu/xeon/set_env.sh | 3 +- AudioQnA/tests/README.md | 45 +++++++++++++++++++ .../tests/test_compose_multilang_on_xeon.sh | 15 +------ AudioQnA/tests/test_compose_on_gaudi.sh | 18 +------- AudioQnA/tests/test_compose_on_rocm.sh | 15 +------ AudioQnA/tests/test_compose_on_xeon.sh | 15 +------ AudioQnA/tests/test_compose_tgi_on_gaudi.sh | 15 +------ AudioQnA/tests/test_compose_tgi_on_xeon.sh | 15 +------ AudioQnA/tests/test_compose_vllm_on_rocm.sh | 22 +-------- 11 files changed, 58 insertions(+), 113 deletions(-) create mode 100644 AudioQnA/tests/README.md diff --git a/AudioQnA/docker_compose/amd/gpu/rocm/set_env.sh b/AudioQnA/docker_compose/amd/gpu/rocm/set_env.sh index 4ee4320b03..d4a0bda6d1 100644 --- a/AudioQnA/docker_compose/amd/gpu/rocm/set_env.sh +++ b/AudioQnA/docker_compose/amd/gpu/rocm/set_env.sh @@ -6,8 +6,8 @@ # export host_ip= # export host_ip=$(hostname -I | awk '{print $1}') -export host_ip="192.165.1.21" -export HUGGINGFACEHUB_API_TOKEN=${YOUR_HUGGINGFACEHUB_API_TOKEN} +export host_ip=${ip_address} +export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} # export LLM_MODEL_ID=Intel/neural-chat-7b-v3-3 diff --git a/AudioQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh b/AudioQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh index 2eb724dc6e..9cd8934f49 100644 --- a/AudioQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh +++ b/AudioQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh @@ -6,8 +6,8 @@ # export host_ip= # export host_ip=$(hostname -I | awk '{print $1}') -export host_ip="" -export external_host_ip="" +export host_ip=${ip_address} +export external_host_ip=${ip_address} export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export HF_CACHE_DIR="./data" export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" diff --git a/AudioQnA/docker_compose/intel/cpu/xeon/set_env.sh b/AudioQnA/docker_compose/intel/cpu/xeon/set_env.sh index adc652f169..4a63ef65b3 100644 --- a/AudioQnA/docker_compose/intel/cpu/xeon/set_env.sh +++ b/AudioQnA/docker_compose/intel/cpu/xeon/set_env.sh @@ -14,7 +14,8 @@ export MEGA_SERVICE_HOST_IP=${host_ip} export WHISPER_SERVER_HOST_IP=${host_ip} export SPEECHT5_SERVER_HOST_IP=${host_ip} export LLM_SERVER_HOST_IP=${host_ip} - +export GPT_SOVITS_SERVER_HOST_IP=${host_ip} +export GPT_SOVITS_SERVER_PORT=9880 export WHISPER_SERVER_PORT=7066 export SPEECHT5_SERVER_PORT=7055 export LLM_SERVER_PORT=3006 diff --git a/AudioQnA/tests/README.md b/AudioQnA/tests/README.md new file mode 100644 index 0000000000..390c182447 --- /dev/null +++ b/AudioQnA/tests/README.md @@ -0,0 +1,45 @@ +# AudioQnA E2E test scripts + +## Set the required environment variable + +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +``` + +## Run test + +On Intel Xeon with TGI: + +```bash +bash test_compose_tgi_on_xeon.sh +``` + +On Intel Xeon with vLLM: + +```bash +bash test_compose_on_xeon.sh +``` + +On Intel Gaudi with TGI: + +```bash +bash test_compose_tgi_on_gaudi.sh +``` + +On Intel Gaudi with vLLM: + +```bash +bash test_compose_on_gaudi.sh +``` + +On AMD ROCm with TGI: + +```bash +bash test_compose_on_rocm.sh +``` + +On AMD ROCm with vLLM: + +```bash +bash test_compose_vllm_on_rocm.sh +``` diff --git a/AudioQnA/tests/test_compose_multilang_on_xeon.sh b/AudioQnA/tests/test_compose_multilang_on_xeon.sh index 2bf05b3529..f958c91c19 100644 --- a/AudioQnA/tests/test_compose_multilang_on_xeon.sh +++ b/AudioQnA/tests/test_compose_multilang_on_xeon.sh @@ -40,21 +40,8 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon/ - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export LLM_MODEL_ID=meta-llama/Meta-Llama-3-8B-Instruct - - export MEGA_SERVICE_HOST_IP=${ip_address} - export WHISPER_SERVER_HOST_IP=${ip_address} - export GPT_SOVITS_SERVER_HOST_IP=${ip_address} - export LLM_SERVER_HOST_IP=${ip_address} - - export WHISPER_SERVER_PORT=7066 - export GPT_SOVITS_SERVER_PORT=9880 - export LLM_SERVER_PORT=3006 - - export BACKEND_SERVICE_ENDPOINT=http://${ip_address}:3008/v1/audioqna export host_ip=${ip_address} - + source set_env.sh # sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env # Start Docker Containers diff --git a/AudioQnA/tests/test_compose_on_gaudi.sh b/AudioQnA/tests/test_compose_on_gaudi.sh index d999cf5183..e2d58b72e9 100644 --- a/AudioQnA/tests/test_compose_on_gaudi.sh +++ b/AudioQnA/tests/test_compose_on_gaudi.sh @@ -40,24 +40,8 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/hpu/gaudi - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export LLM_MODEL_ID=meta-llama/Meta-Llama-3-8B-Instruct - export NUM_CARDS=1 - export BLOCK_SIZE=128 - export MAX_NUM_SEQS=256 - export MAX_SEQ_LEN_TO_CAPTURE=2048 - - export MEGA_SERVICE_HOST_IP=${ip_address} - export WHISPER_SERVER_HOST_IP=${ip_address} - export SPEECHT5_SERVER_HOST_IP=${ip_address} - export LLM_SERVER_HOST_IP=${ip_address} - - export WHISPER_SERVER_PORT=7066 - export SPEECHT5_SERVER_PORT=7055 - export LLM_SERVER_PORT=3006 - - export BACKEND_SERVICE_ENDPOINT=http://${ip_address}:3008/v1/audioqna export host_ip=${ip_address} + source set_env.sh # sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env # Start Docker Containers diff --git a/AudioQnA/tests/test_compose_on_rocm.sh b/AudioQnA/tests/test_compose_on_rocm.sh index 117e92971d..f30abe355f 100644 --- a/AudioQnA/tests/test_compose_on_rocm.sh +++ b/AudioQnA/tests/test_compose_on_rocm.sh @@ -35,20 +35,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/amd/gpu/rocm/ - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export LLM_MODEL_ID=Intel/neural-chat-7b-v3-3 - - export MEGA_SERVICE_HOST_IP=${ip_address} - export WHISPER_SERVER_HOST_IP=${ip_address} - export SPEECHT5_SERVER_HOST_IP=${ip_address} - export LLM_SERVER_HOST_IP=${ip_address} - - export WHISPER_SERVER_PORT=7066 - export SPEECHT5_SERVER_PORT=7055 - export LLM_SERVER_PORT=3006 - - export BACKEND_SERVICE_ENDPOINT=http://${ip_address}:3008/v1/audioqna - + source set_env.sh # Start Docker Containers docker compose up -d > ${LOG_PATH}/start_services_with_compose.log n=0 diff --git a/AudioQnA/tests/test_compose_on_xeon.sh b/AudioQnA/tests/test_compose_on_xeon.sh index 8abda55023..9803591bc9 100644 --- a/AudioQnA/tests/test_compose_on_xeon.sh +++ b/AudioQnA/tests/test_compose_on_xeon.sh @@ -40,21 +40,8 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon/ - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export LLM_MODEL_ID=meta-llama/Meta-Llama-3-8B-Instruct - - export MEGA_SERVICE_HOST_IP=${ip_address} - export WHISPER_SERVER_HOST_IP=${ip_address} - export SPEECHT5_SERVER_HOST_IP=${ip_address} - export LLM_SERVER_HOST_IP=${ip_address} - - export WHISPER_SERVER_PORT=7066 - export SPEECHT5_SERVER_PORT=7055 - export LLM_SERVER_PORT=3006 - - export BACKEND_SERVICE_ENDPOINT=http://${ip_address}:3008/v1/audioqna export host_ip=${ip_address} - + source set_env.sh # sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env # Start Docker Containers diff --git a/AudioQnA/tests/test_compose_tgi_on_gaudi.sh b/AudioQnA/tests/test_compose_tgi_on_gaudi.sh index 156dce92cd..dd68dfe770 100644 --- a/AudioQnA/tests/test_compose_tgi_on_gaudi.sh +++ b/AudioQnA/tests/test_compose_tgi_on_gaudi.sh @@ -34,21 +34,8 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/hpu/gaudi - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export LLM_MODEL_ID=meta-llama/Meta-Llama-3-8B-Instruct - - export MEGA_SERVICE_HOST_IP=${ip_address} - export WHISPER_SERVER_HOST_IP=${ip_address} - export SPEECHT5_SERVER_HOST_IP=${ip_address} - export LLM_SERVER_HOST_IP=${ip_address} - - export WHISPER_SERVER_PORT=7066 - export SPEECHT5_SERVER_PORT=7055 - export LLM_SERVER_PORT=3006 - - export BACKEND_SERVICE_ENDPOINT=http://${ip_address}:3008/v1/audioqna export host_ip=${ip_address} - + source set_env.sh # Start Docker Containers docker compose -f compose_tgi.yaml up -d > ${LOG_PATH}/start_services_with_compose.log n=0 diff --git a/AudioQnA/tests/test_compose_tgi_on_xeon.sh b/AudioQnA/tests/test_compose_tgi_on_xeon.sh index 3190818124..0e0b2d571a 100644 --- a/AudioQnA/tests/test_compose_tgi_on_xeon.sh +++ b/AudioQnA/tests/test_compose_tgi_on_xeon.sh @@ -34,21 +34,8 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon/ - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export LLM_MODEL_ID=meta-llama/Meta-Llama-3-8B-Instruct - - export MEGA_SERVICE_HOST_IP=${ip_address} - export WHISPER_SERVER_HOST_IP=${ip_address} - export SPEECHT5_SERVER_HOST_IP=${ip_address} - export LLM_SERVER_HOST_IP=${ip_address} - - export WHISPER_SERVER_PORT=7066 - export SPEECHT5_SERVER_PORT=7055 - export LLM_SERVER_PORT=3006 - - export BACKEND_SERVICE_ENDPOINT=http://${ip_address}:3008/v1/audioqna export host_ip=${ip_address} - + source set_env.sh # Start Docker Containers docker compose -f compose_tgi.yaml up -d > ${LOG_PATH}/start_services_with_compose.log n=0 diff --git a/AudioQnA/tests/test_compose_vllm_on_rocm.sh b/AudioQnA/tests/test_compose_vllm_on_rocm.sh index 982bc74de9..924c8c25fa 100644 --- a/AudioQnA/tests/test_compose_vllm_on_rocm.sh +++ b/AudioQnA/tests/test_compose_vllm_on_rocm.sh @@ -33,27 +33,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/amd/gpu/rocm/ - - export host_ip=${ip_address} - export external_host_ip=${ip_address} - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export HF_CACHE_DIR="./data" - export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" - export VLLM_SERVICE_PORT="8081" - - export MEGA_SERVICE_HOST_IP=${host_ip} - export WHISPER_SERVER_HOST_IP=${host_ip} - export SPEECHT5_SERVER_HOST_IP=${host_ip} - export LLM_SERVER_HOST_IP=${host_ip} - - export WHISPER_SERVER_PORT=7066 - export SPEECHT5_SERVER_PORT=7055 - export LLM_SERVER_PORT=${VLLM_SERVICE_PORT} - export BACKEND_SERVICE_PORT=3008 - export FRONTEND_SERVICE_PORT=5173 - - export BACKEND_SERVICE_ENDPOINT=http://${external_host_ip}:${BACKEND_SERVICE_PORT}/v1/audioqna - + source set_env_vllm.sh sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env # Start Docker Containers From b467a13ec3b9e05d172ba886266a5cbb4dbdec29 Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Thu, 8 May 2025 10:34:36 +0800 Subject: [PATCH 035/217] daily update vLLM&vLLM-fork version (#1914) Signed-off-by: Sun, Xuehao --- .github/workflows/_build_image.yml | 6 +- .../workflows/daily-update-vllm-version.yml | 93 +++++++++++++++++++ AgentQnA/tests/step1_build_images.sh | 4 +- AudioQnA/tests/test_compose_on_gaudi.sh | 6 +- ChatQnA/tests/test_compose_faqgen_on_gaudi.sh | 4 +- .../tests/test_compose_guardrails_on_gaudi.sh | 4 +- ChatQnA/tests/test_compose_on_gaudi.sh | 4 +- .../test_compose_without_rerank_on_gaudi.sh | 4 +- CodeGen/tests/test_compose_on_gaudi.sh | 6 +- CodeTrans/tests/test_compose_on_gaudi.sh | 4 +- DocSum/tests/test_compose_on_gaudi.sh | 4 +- FinanceAgent/tests/test_compose_on_gaudi.sh | 6 +- 12 files changed, 119 insertions(+), 26 deletions(-) create mode 100644 .github/workflows/daily-update-vllm-version.yml diff --git a/.github/workflows/_build_image.yml b/.github/workflows/_build_image.yml index 4ce00db5ea..5d55d474a7 100644 --- a/.github/workflows/_build_image.yml +++ b/.github/workflows/_build_image.yml @@ -83,9 +83,9 @@ jobs: fi if [[ $(grep -c "vllm-gaudi:" ${docker_compose_path}) != 0 ]]; then git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork - VLLM_VER=v0.6.6.post1+Gaudi-1.20.0 - echo "Check out vLLM tag ${VLLM_VER}" - git checkout ${VLLM_VER} &> /dev/null && cd ../ + VLLM_FORK_VER=v0.6.6.post1+Gaudi-1.20.0 + echo "Check out vLLM tag ${VLLM_FORK_VER}" + git checkout ${VLLM_FORK_VER} &> /dev/null && cd ../ fi git clone --depth 1 --branch ${{ inputs.opea_branch }} https://github.com/opea-project/GenAIComps.git cd GenAIComps && git rev-parse HEAD && cd ../ diff --git a/.github/workflows/daily-update-vllm-version.yml b/.github/workflows/daily-update-vllm-version.yml new file mode 100644 index 0000000000..eb09bf5a68 --- /dev/null +++ b/.github/workflows/daily-update-vllm-version.yml @@ -0,0 +1,93 @@ +# Copyright (C) 2025 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +name: Daily update vLLM & vLLM-fork version + +on: + schedule: + - cron: "30 22 * * *" + workflow_dispatch: + +env: + BRANCH_NAME: "update" + USER_NAME: "CICD-at-OPEA" + USER_EMAIL: "CICD@opea.dev" + +jobs: + freeze-tag: + runs-on: ubuntu-latest + strategy: + matrix: + include: + - repo: vLLM + repo_name: vllm-project/vllm + ver_name: VLLM_VER + - repo: vLLM-fork + repo_url: HabanaAI/vllm-fork + ver_name: VLLM_FORK_VER + permissions: + contents: write + pull-requests: write + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: ${{ github.ref }} + + - name: Set up Git + run: | + git config --global user.name ${{ env.USER_NAME }} + git config --global user.email ${{ env.USER_EMAIL }} + git remote set-url origin https://${{ env.USER_NAME }}:"${{ secrets.ACTION_TOKEN }}"@github.com/${{ github.repository }}.git + git fetch + + if git ls-remote https://github.com/${{ github.repository }}.git "refs/heads/${{ env.BRANCH_NAME }}_${{ matrix.repo }}" | grep -q "refs/heads/${{ env.BRANCH_NAME }}_${{ matrix.repo }}"; then + echo "branch ${{ env.BRANCH_NAME }}_${{ matrix.repo }} exists" + git checkout ${{ env.BRANCH_NAME }}_${{ matrix.repo }} + else + echo "branch ${{ env.BRANCH_NAME }}_${{ matrix.repo }} not exists" + git checkout -b ${{ env.BRANCH_NAME }}_${{ matrix.repo }} + git push origin ${{ env.BRANCH_NAME }}_${{ matrix.repo }} + echo "branch ${{ env.BRANCH_NAME }}_${{ matrix.repo }} created successfully" + fi + + - name: Run script + run: | + latest_vllm_ver=$(curl -s "https://api.github.com/repos/${{ matrix.repo_name }}/tags" | jq '.[0].name' -) + echo "latest_vllm_ver=${latest_vllm_ver}" >> "$GITHUB_ENV" + find . -type f \( -name "*.sh" -o -name "_build_image.yml" \) -exec sed -i "s/${{ matrix.ver_name }}=.*/${{ matrix.ver_name }}=${latest_vllm_ver}/" {} \; + + - name: Commit changes + run: | + git add . + if git diff-index --quiet HEAD --; then + echo "No changes detected, skipping commit." + exit 1 + else + git commit -s -m "Update ${{ matrix.repo }} version to ${latest_vllm_ver}" + git push + fi + + - name: Create Pull Request + run: | + pr_count=$(curl -H "Authorization: token ${{ secrets.ACTION_TOKEN }}" -s "https://api.github.com/repos/${{ github.repository }}/pulls?state=all&head=${{ env.USER_NAME }}:${{ env.BRANCH_NAME }}_${{ matrix.repo }}" | jq '. | length') + if [ $pr_count -gt 0 ]; then + echo "Pull Request exists" + pr_number=$(curl -H "Authorization: token ${{ secrets.ACTION_TOKEN }}" -s "https://api.github.com/repos/${{ github.repository }}/pulls?state=all&head=${{ env.USER_NAME }}:${{ env.BRANCH_NAME }}_${{ matrix.repo }}" | jq '.[0].number') + curl -X PATCH -H "Authorization: token ${{ secrets.ACTION_TOKEN }}" -d "{ + \"title\":\"Update ${{ matrix.repo }} version to ${latest_vllm_ver}\", + \"body\":\"Update ${{ matrix.repo }} version to ${latest_vllm_ver}\", + \"state\":\"open\" + }" "https://api.github.com/repos/${{ github.repository }}/pulls/${pr_number}" + echo "Pull Request updated successfully" + else + echo "Pull Request not exists..." + curl -H "Authorization: token ${{ secrets.ACTION_TOKEN }}" -d "{ + \"title\":\"Update ${{ matrix.repo }} version to ${latest_vllm_ver}\", + \"body\":\"Update ${{ matrix.repo }} version to ${latest_vllm_ver}\", + \"head\":\"${{ env.USER_NAME }}:${{ env.BRANCH_NAME }}_${{ matrix.repo }}\", + \"base\":\"main\" + }" "https://api.github.com/repos/${{ github.repository }}/pulls" + echo "Pull Request created successfully" + fi diff --git a/AgentQnA/tests/step1_build_images.sh b/AgentQnA/tests/step1_build_images.sh index dfb20df229..8edd7b623a 100644 --- a/AgentQnA/tests/step1_build_images.sh +++ b/AgentQnA/tests/step1_build_images.sh @@ -37,8 +37,8 @@ function build_agent_docker_image_gaudi_vllm() { get_genai_comps git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork - VLLM_VER=v0.6.6.post1+Gaudi-1.20.0 - git checkout ${VLLM_VER} &> /dev/null && cd ../ + VLLM_FORK_VER=v0.6.6.post1+Gaudi-1.20.0 + git checkout ${VLLM_FORK_VER} &> /dev/null && cd ../ echo "Build agent image with --no-cache..." service_list="agent agent-ui vllm-gaudi" diff --git a/AudioQnA/tests/test_compose_on_gaudi.sh b/AudioQnA/tests/test_compose_on_gaudi.sh index e2d58b72e9..c24f5ff82e 100644 --- a/AudioQnA/tests/test_compose_on_gaudi.sh +++ b/AudioQnA/tests/test_compose_on_gaudi.sh @@ -27,9 +27,9 @@ function build_docker_images() { git clone https://github.com/HabanaAI/vllm-fork.git cd vllm-fork/ - VLLM_VER=v0.6.6.post1+Gaudi-1.20.0 - echo "Check out vLLM tag ${VLLM_VER}" - git checkout ${VLLM_VER} &> /dev/null && cd ../ + VLLM_FORK_VER=v0.6.6.post1+Gaudi-1.20.0 + echo "Check out vLLM tag ${VLLM_FORK_VER}" + git checkout ${VLLM_FORK_VER} &> /dev/null && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="audioqna audioqna-ui whisper-gaudi speecht5-gaudi vllm-gaudi" diff --git a/ChatQnA/tests/test_compose_faqgen_on_gaudi.sh b/ChatQnA/tests/test_compose_faqgen_on_gaudi.sh index be040bdbef..2a30dbb773 100644 --- a/ChatQnA/tests/test_compose_faqgen_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_faqgen_on_gaudi.sh @@ -24,8 +24,8 @@ function build_docker_images() { docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . popd && sleep 1s git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork - VLLM_VER=v0.6.6.post1+Gaudi-1.20.0 - git checkout ${VLLM_VER} &> /dev/null && cd ../ + VLLM_FORK_VER=v0.6.6.post1+Gaudi-1.20.0 + git checkout ${VLLM_FORK_VER} &> /dev/null && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="chatqna chatqna-ui dataprep retriever llm-faqgen vllm-gaudi nginx" diff --git a/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh b/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh index 06b58bedc0..f9057f6ec0 100644 --- a/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh @@ -24,8 +24,8 @@ function build_docker_images() { docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . popd && sleep 1s git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork - VLLM_VER=v0.6.6.post1+Gaudi-1.20.0 - git checkout ${VLLM_VER} &> /dev/null && cd ../ + VLLM_FORK_VER=v0.6.6.post1+Gaudi-1.20.0 + git checkout ${VLLM_FORK_VER} &> /dev/null && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="chatqna chatqna-ui dataprep retriever vllm-gaudi guardrails nginx" diff --git a/ChatQnA/tests/test_compose_on_gaudi.sh b/ChatQnA/tests/test_compose_on_gaudi.sh index 0fe3cf11a9..144f541907 100644 --- a/ChatQnA/tests/test_compose_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_on_gaudi.sh @@ -24,8 +24,8 @@ function build_docker_images() { docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . popd && sleep 1s git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork - VLLM_VER=v0.6.6.post1+Gaudi-1.20.0 - git checkout ${VLLM_VER} &> /dev/null && cd ../ + VLLM_FORK_VER=v0.6.6.post1+Gaudi-1.20.0 + git checkout ${VLLM_FORK_VER} &> /dev/null && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="chatqna chatqna-ui dataprep retriever vllm-gaudi nginx" diff --git a/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh b/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh index 6e4782ba79..7d6837402f 100644 --- a/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh @@ -24,8 +24,8 @@ function build_docker_images() { docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . popd && sleep 1s git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork - VLLM_VER=v0.6.6.post1+Gaudi-1.20.0 - git checkout ${VLLM_VER} &> /dev/null && cd ../ + VLLM_FORK_VER=v0.6.6.post1+Gaudi-1.20.0 + git checkout ${VLLM_FORK_VER} &> /dev/null && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="chatqna chatqna-ui dataprep retriever vllm-gaudi nginx" diff --git a/CodeGen/tests/test_compose_on_gaudi.sh b/CodeGen/tests/test_compose_on_gaudi.sh index fda73cd165..58eb0888db 100644 --- a/CodeGen/tests/test_compose_on_gaudi.sh +++ b/CodeGen/tests/test_compose_on_gaudi.sh @@ -43,9 +43,9 @@ function build_docker_images() { # Download Gaudi vllm of latest tag git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork - VLLM_VER=v0.6.6.post1+Gaudi-1.20.0 - echo "Check out vLLM tag ${VLLM_VER}" - git checkout ${VLLM_VER} &> /dev/null && cd ../ + VLLM_FORK_VER=v0.6.6.post1+Gaudi-1.20.0 + echo "Check out vLLM tag ${VLLM_FORK_VER}" + git checkout ${VLLM_FORK_VER} &> /dev/null && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="codegen codegen-gradio-ui llm-textgen vllm-gaudi dataprep retriever embedding" diff --git a/CodeTrans/tests/test_compose_on_gaudi.sh b/CodeTrans/tests/test_compose_on_gaudi.sh index 5f287eb025..41472244ac 100644 --- a/CodeTrans/tests/test_compose_on_gaudi.sh +++ b/CodeTrans/tests/test_compose_on_gaudi.sh @@ -31,8 +31,8 @@ function build_docker_images() { cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork - VLLM_VER=v0.6.6.post1+Gaudi-1.20.0 - git checkout ${VLLM_VER} &> /dev/null && cd ../ + VLLM_FORK_VER=v0.6.6.post1+Gaudi-1.20.0 + git checkout ${VLLM_FORK_VER} &> /dev/null && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="codetrans codetrans-ui llm-textgen vllm-gaudi nginx" diff --git a/DocSum/tests/test_compose_on_gaudi.sh b/DocSum/tests/test_compose_on_gaudi.sh index 3c0f3d695b..64d3063872 100644 --- a/DocSum/tests/test_compose_on_gaudi.sh +++ b/DocSum/tests/test_compose_on_gaudi.sh @@ -50,8 +50,8 @@ function build_docker_images() { popd && sleep 1s git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork - VLLM_VER=v0.6.6.post1+Gaudi-1.20.0 - git checkout ${VLLM_VER} &> /dev/null && cd ../ + VLLM_FORK_VER=v0.6.6.post1+Gaudi-1.20.0 + git checkout ${VLLM_FORK_VER} &> /dev/null && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="docsum docsum-gradio-ui whisper llm-docsum vllm-gaudi" diff --git a/FinanceAgent/tests/test_compose_on_gaudi.sh b/FinanceAgent/tests/test_compose_on_gaudi.sh index 207dcc62f1..0f42813978 100644 --- a/FinanceAgent/tests/test_compose_on_gaudi.sh +++ b/FinanceAgent/tests/test_compose_on_gaudi.sh @@ -59,9 +59,9 @@ function build_vllm_docker_image() { git clone https://github.com/HabanaAI/vllm-fork.git fi cd ./vllm-fork - # VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)") - VLLM_VER=v0.6.6.post1+Gaudi-1.20.0 - git checkout ${VLLM_VER} &> /dev/null + + VLLM_FORK_VER=v0.6.6.post1+Gaudi-1.20.0 + git checkout ${VLLM_FORK_VER} &> /dev/null docker build --no-cache -f Dockerfile.hpu -t $vllm_image --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy if [ $? -ne 0 ]; then echo "$vllm_image failed" From bfefdfad340c20e4993f6b1ea5544a931bcb0a7f Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Thu, 8 May 2025 16:36:37 +0800 Subject: [PATCH 036/217] Fix vllm version update workflow (#1919) Signed-off-by: Sun, Xuehao --- .github/env/_build_image.sh | 5 ++++ .github/workflows/_build_image.yml | 11 ++----- .../workflows/daily-update-vllm-version.yml | 30 +++++++++---------- 3 files changed, 23 insertions(+), 23 deletions(-) create mode 100644 .github/env/_build_image.sh diff --git a/.github/env/_build_image.sh b/.github/env/_build_image.sh new file mode 100644 index 0000000000..d559137fed --- /dev/null +++ b/.github/env/_build_image.sh @@ -0,0 +1,5 @@ +# Copyright (C) 2025 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +export VLLM_VER=v0.8.3 +export VLLM_FORK_VER=v0.6.6.post1+Gaudi-1.20.0 diff --git a/.github/workflows/_build_image.yml b/.github/workflows/_build_image.yml index 5d55d474a7..79cab22216 100644 --- a/.github/workflows/_build_image.yml +++ b/.github/workflows/_build_image.yml @@ -75,17 +75,12 @@ jobs: run: | cd ${{ github.workspace }}/${{ inputs.example }}/docker_image_build docker_compose_path=${{ github.workspace }}/${{ inputs.example }}/docker_image_build/build.yaml + source ${{ github.workspace }}/.github/env/_build_image.sh if [[ $(grep -c "vllm:" ${docker_compose_path}) != 0 ]]; then - git clone https://github.com/vllm-project/vllm.git && cd vllm - VLLM_VER=v0.8.3 - echo "Check out vLLM tag ${VLLM_VER}" - git checkout ${VLLM_VER} &> /dev/null && cd ../ + git clone -b ${VLLM_VER} --single-branch https://github.com/vllm-project/vllm.git fi if [[ $(grep -c "vllm-gaudi:" ${docker_compose_path}) != 0 ]]; then - git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork - VLLM_FORK_VER=v0.6.6.post1+Gaudi-1.20.0 - echo "Check out vLLM tag ${VLLM_FORK_VER}" - git checkout ${VLLM_FORK_VER} &> /dev/null && cd ../ + git clone -b ${VLLM_FORK_VER} --single-branch https://github.com/HabanaAI/vllm-fork.git fi git clone --depth 1 --branch ${{ inputs.opea_branch }} https://github.com/opea-project/GenAIComps.git cd GenAIComps && git rev-parse HEAD && cd ../ diff --git a/.github/workflows/daily-update-vllm-version.yml b/.github/workflows/daily-update-vllm-version.yml index eb09bf5a68..982fb130af 100644 --- a/.github/workflows/daily-update-vllm-version.yml +++ b/.github/workflows/daily-update-vllm-version.yml @@ -23,7 +23,7 @@ jobs: repo_name: vllm-project/vllm ver_name: VLLM_VER - repo: vLLM-fork - repo_url: HabanaAI/vllm-fork + repo_name: HabanaAI/vllm-fork ver_name: VLLM_FORK_VER permissions: contents: write @@ -55,8 +55,9 @@ jobs: - name: Run script run: | latest_vllm_ver=$(curl -s "https://api.github.com/repos/${{ matrix.repo_name }}/tags" | jq '.[0].name' -) + latest_vllm_ver=$(echo "$latest_vllm_ver" | sed 's/"//g') echo "latest_vllm_ver=${latest_vllm_ver}" >> "$GITHUB_ENV" - find . -type f \( -name "*.sh" -o -name "_build_image.yml" \) -exec sed -i "s/${{ matrix.ver_name }}=.*/${{ matrix.ver_name }}=${latest_vllm_ver}/" {} \; + find . -type f -name "*.sh" -exec sed -i "s/${{ matrix.ver_name }}=.*/${{ matrix.ver_name }}=${latest_vllm_ver}/" {} \; - name: Commit changes run: | @@ -66,28 +67,27 @@ jobs: exit 1 else git commit -s -m "Update ${{ matrix.repo }} version to ${latest_vllm_ver}" - git push + git push --set-upstream origin ${{ env.BRANCH_NAME }}_${{ matrix.repo }} fi - name: Create Pull Request + env: + GH_TOKEN: ${{ secrets.ACTION_TOKEN }} run: | pr_count=$(curl -H "Authorization: token ${{ secrets.ACTION_TOKEN }}" -s "https://api.github.com/repos/${{ github.repository }}/pulls?state=all&head=${{ env.USER_NAME }}:${{ env.BRANCH_NAME }}_${{ matrix.repo }}" | jq '. | length') if [ $pr_count -gt 0 ]; then echo "Pull Request exists" pr_number=$(curl -H "Authorization: token ${{ secrets.ACTION_TOKEN }}" -s "https://api.github.com/repos/${{ github.repository }}/pulls?state=all&head=${{ env.USER_NAME }}:${{ env.BRANCH_NAME }}_${{ matrix.repo }}" | jq '.[0].number') - curl -X PATCH -H "Authorization: token ${{ secrets.ACTION_TOKEN }}" -d "{ - \"title\":\"Update ${{ matrix.repo }} version to ${latest_vllm_ver}\", - \"body\":\"Update ${{ matrix.repo }} version to ${latest_vllm_ver}\", - \"state\":\"open\" - }" "https://api.github.com/repos/${{ github.repository }}/pulls/${pr_number}" + gh pr edit ${pr_number} \ + --title "Update ${{ matrix.repo }} version to ${latest_vllm_ver}" \ + --body "Update ${{ matrix.repo }} version to ${latest_vllm_ver}" echo "Pull Request updated successfully" else - echo "Pull Request not exists..." - curl -H "Authorization: token ${{ secrets.ACTION_TOKEN }}" -d "{ - \"title\":\"Update ${{ matrix.repo }} version to ${latest_vllm_ver}\", - \"body\":\"Update ${{ matrix.repo }} version to ${latest_vllm_ver}\", - \"head\":\"${{ env.USER_NAME }}:${{ env.BRANCH_NAME }}_${{ matrix.repo }}\", - \"base\":\"main\" - }" "https://api.github.com/repos/${{ github.repository }}/pulls" + echo "Pull Request does not exists..." + gh pr create \ + -B main \ + -H ${{ env.BRANCH_NAME }}_${{ matrix.repo }} \ + --title "Update ${{ matrix.repo }} version to ${latest_vllm_ver}" \ + --body "Update ${{ matrix.repo }} version to ${latest_vllm_ver}" echo "Pull Request created successfully" fi From ebb7c24ca845a35e6bd1efe245aaa06da775de3a Mon Sep 17 00:00:00 2001 From: Razvan Liviu Varzaru <45736827+RazvanLiviuVarzaru@users.noreply.github.com> Date: Fri, 9 May 2025 07:08:15 +0300 Subject: [PATCH 037/217] Add ChatQnA docker-compose example on Intel Xeon using MariaDB Vector (#1916) Signed-off-by: Razvan-Liviu Varzaru Co-authored-by: Liang Lv Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .../docker_compose/intel/cpu/xeon/README.md | 1 + .../intel/cpu/xeon/README_mariadb.md | 259 ++++++++++++++++++ .../intel/cpu/xeon/compose_mariadb.yaml | 185 +++++++++++++ .../intel/cpu/xeon/set_env_mariadb.sh | 25 ++ ChatQnA/tests/test_compose_mariadb_on_xeon.sh | 176 ++++++++++++ 5 files changed, 646 insertions(+) create mode 100644 ChatQnA/docker_compose/intel/cpu/xeon/README_mariadb.md create mode 100644 ChatQnA/docker_compose/intel/cpu/xeon/compose_mariadb.yaml create mode 100755 ChatQnA/docker_compose/intel/cpu/xeon/set_env_mariadb.sh create mode 100644 ChatQnA/tests/test_compose_mariadb_on_xeon.sh diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/README.md b/ChatQnA/docker_compose/intel/cpu/xeon/README.md index e2e2deaaa6..eea4c6132d 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/README.md +++ b/ChatQnA/docker_compose/intel/cpu/xeon/README.md @@ -156,6 +156,7 @@ In the context of deploying a ChatQnA pipeline on an Intel® Xeon® platform, we | [compose_faqgen_tgi.yaml](./compose_faqgen_tgi.yaml) | Enables FAQ generation using TGI as the LLM serving framework. For more details, refer to [README_faqgen.md](./README_faqgen.md). | | [compose.telemetry.yaml](./compose.telemetry.yaml) | Helper file for telemetry features for vllm. Can be used along with any compose files that serves vllm | | [compose_tgi.telemetry.yaml](./compose_tgi.telemetry.yaml) | Helper file for telemetry features for tgi. Can be used along with any compose files that serves tgi | +| [compose_mariadb.yaml](./compose_mariadb.yaml) | Uses MariaDB Server as the vector database. All other configurations remain the same as the default | ## ChatQnA with Conversational UI (Optional) diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/README_mariadb.md b/ChatQnA/docker_compose/intel/cpu/xeon/README_mariadb.md new file mode 100644 index 0000000000..4717e61109 --- /dev/null +++ b/ChatQnA/docker_compose/intel/cpu/xeon/README_mariadb.md @@ -0,0 +1,259 @@ +# Deploying ChatQnA with MariaDB Vector on Intel® Xeon® Processors + +This document outlines the deployment process for a ChatQnA application utilizing the [GenAIComps](https://github.com/opea-project/GenAIComps.git) microservice pipeline on Intel® Xeon® servers. The pipeline integrates **MariaDB Vector** as the vector database and includes microservices such as `embedding`, `retriever`, `rerank`, and `llm`. + +--- + +## Table of Contents + +1. [Build Docker Images](#build-docker-images) +2. [Validate Microservices](#validate-microservices) +3. [Launch the UI](#launch-the-ui) +4. [Launch the Conversational UI (Optional)](#launch-the-conversational-ui-optional) + +--- + +## Build Docker Images + +First of all, you need to build Docker Images locally and install the python package of it. + +```bash +git clone https://github.com/opea-project/GenAIComps.git +cd GenAIComps +``` + +### 1. Build Retriever Image + +```bash +docker build --no-cache -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile . +``` + +### 2. Build Dataprep Image + +```bash +docker build --no-cache -t opea/dataprep:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/dataprep/src/Dockerfile . +cd .. +``` + +### 3. Build MegaService Docker Image + +To construct the Mega Service, we utilize the [GenAIComps](https://github.com/opea-project/GenAIComps.git) microservice pipeline within the `chatqna.py` Python script. Build MegaService Docker image via below command: + +```bash +git clone https://github.com/opea-project/GenAIExamples.git +cd GenAIExamples/ChatQnA/ +docker build --no-cache -t opea/chatqna:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . +cd ../.. +``` + +### 4. Build UI Docker Image + +Build frontend Docker image via below command: + +```bash +cd GenAIExamples/ChatQnA/ui +docker build --no-cache -t opea/chatqna-ui:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f ./docker/Dockerfile . +cd ../../.. +``` + +### 5. Build Conversational React UI Docker Image (Optional) + +Build frontend Docker image that enables Conversational experience with ChatQnA megaservice via below command: + +**Export the value of the public IP address of your Xeon server to the `host_ip` environment variable** + +```bash +cd GenAIExamples/ChatQnA/ui +export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8912/v1/chatqna" +export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6043/v1/dataprep/ingest" +docker build --no-cache -t opea/chatqna-conversation-ui:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy --build-arg BACKEND_SERVICE_ENDPOINT=$BACKEND_SERVICE_ENDPOINT --build-arg DATAPREP_SERVICE_ENDPOINT=$DATAPREP_SERVICE_ENDPOINT -f ./docker/Dockerfile.react . +cd ../../.. +``` + +### 6. Build Nginx Docker Image + +```bash +cd GenAIComps +docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/nginx/src/Dockerfile . +``` + +Then run the command `docker images`, you will have the following 5 Docker Images: + +1. `opea/dataprep:latest` +2. `opea/retriever:latest` +3. `opea/chatqna:latest` +4. `opea/chatqna-ui:latest` +5. `opea/nginx:latest` + +## Start Microservices + +### Required Models + +By default, the embedding, reranking and LLM models are set to a default value as listed below: + +| Service | Model | +| --------- | ----------------------------------- | +| Embedding | BAAI/bge-base-en-v1.5 | +| Reranking | BAAI/bge-reranker-base | +| LLM | meta-llama/Meta-Llama-3-8B-Instruct | + +Change the `xxx_MODEL_ID` below for your needs. + +### Setup Environment Variables + +Since the `compose.yaml` will consume some environment variables, you need to set them up in advance as below. + +**Export the value of the public IP address of your Xeon server to the `host_ip` environment variable** + +> Change the External_Public_IP below with the actual IPV4 value + +```bash +export host_ip="External_Public_IP" +``` + +> Change to your actual Huggingface API Token value + +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +``` + +**Append the value of the public IP address to the no_proxy list if you are in a proxy environment** + +```bash +export no_proxy=${your_no_proxy},chatqna-xeon-ui-server,chatqna-xeon-backend-server,dataprep-mariadb-vector,tei-embedding-service,retriever,tei-reranking-service,tgi-service,vllm-service +``` + +```bash +export no_proxy=${your_no_proxy} +export http_proxy=${your_http_proxy} +export https_proxy=${your_http_proxy} +export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" +export RERANK_MODEL_ID="BAAI/bge-reranker-base" +export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" +export MARIADB_DATABASE="vectordb" +export MARIADB_USER="chatqna" +export MARIADB_PASSWORD="password" +``` + +Note: Please replace with `host_ip` with you external IP address, do not use localhost. + +### Start all the services Docker Containers + +> Before running the docker compose command, you need to be in the folder that has the docker compose yaml file + +```bash +cd GenAIExamples/ChatQnA/docker_compose/intel/cpu/xeon/ +docker compose -f compose_mariadb.yaml up -d +``` + +### Validate Microservices + +Follow the instructions to validate MicroServices. +For details on how to verify the correctness of the response, refer to [how-to-validate_service](../../hpu/gaudi/how_to_validate_service.md). + +1. TEI Embedding Service + + ```bash + curl ${host_ip}:6040/embed \ + -X POST \ + -d '{"inputs":"What is Deep Learning?"}' \ + -H 'Content-Type: application/json' + ``` + +2. Retriever Microservice + + To consume the retriever microservice, you need to generate a mock embedding vector by Python script. The length of embedding vector + is determined by the embedding model. + Here we use the model `EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"`, which vector size is 768. + + Check the vector dimension of your embedding model, set `your_embedding` dimension equals to it. + + ```bash + export your_embedding=$(python3 -c "import random; embedding = [random.uniform(-1, 1) for _ in range(768)]; print(embedding)") + curl http://${host_ip}:6045/v1/retrieval \ + -X POST \ + -d '{"text":"What is the revenue of Nike in 2023?","embedding":"'"${your_embedding}"'"}' \ + -H 'Content-Type: application/json' + ``` + +3. TEI Reranking Service + + ```bash + curl http://${host_ip}:6041/rerank \ + -X POST \ + -d '{"query":"What is Deep Learning?", "texts": ["Deep Learning is not...", "Deep learning is..."]}' \ + -H 'Content-Type: application/json' + ``` + +4. LLM Backend Service + + In the first startup, this service will take more time to download, load and warm up the model. After it's finished, the service will be ready. + + Try the command below to check whether the LLM service is ready. + + ```bash + docker logs vllm-service 2>&1 | grep complete + ``` + + If the service is ready, you will get the response like below. + + ```text + INFO: Application startup complete. + ``` + + Then try the `cURL` command below to validate vLLM service. + + ```bash + curl http://${host_ip}:6042/v1/chat/completions \ + -X POST \ + -d '{"model": "meta-llama/Meta-Llama-3-8B-Instruct", "messages": [{"role": "user", "content": "What is Deep Learning?"}], "max_tokens":17}' \ + -H 'Content-Type: application/json' + ``` + +5. MegaService + + ```bash + curl http://${host_ip}:8912/v1/chatqna -H "Content-Type: application/json" -d '{ + "messages": "What is the revenue of Nike in 2023?" + }' + ``` + +6. Dataprep Microservice(Optional) + + If you want to update the default knowledge base, you can use the following commands: + + Update Knowledge Base via Local File Upload: + + ```bash + curl -X POST "http://${host_ip}:6043/v1/dataprep/ingest" \ + -H "Content-Type: multipart/form-data" \ + -F "files=@./your_file.pdf" + ``` + + This command updates a knowledge base by uploading a local file for processing. Update the file path according to your environment. + + Add Knowledge Base via HTTP Links: + + ```bash + curl -X POST "http://${host_ip}:6043/v1/dataprep/ingest" \ + -H "Content-Type: multipart/form-data" \ + -F 'link_list=["https://opea.dev"]' + ``` + +## Launch the UI + +To access the frontend, open the following URL in your browser: http://{host_ip}:5173. By default, the UI runs on port 5173 internally. If you prefer to use a different host port to access the frontend, you can modify the port mapping in the `compose.yaml` file as shown below: + +```yaml + chatqna-xeon-ui-server: + image: opea/chatqna-ui:latest + ... + ports: + - "80:5173" +``` + +![project-screenshot](../../../../assets/img/chat_ui_init.png) + +Here is an example of running ChatQnA: + +![project-screenshot](../../../../assets/img/chat_ui_response.png) diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_mariadb.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_mariadb.yaml new file mode 100644 index 0000000000..9e109e6144 --- /dev/null +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_mariadb.yaml @@ -0,0 +1,185 @@ +# Copyright (C) 2025 MariaDB Foundation +# SPDX-License-Identifier: Apache-2.0 + +services: + mariadb-server: + image: mariadb:latest + container_name: mariadb-server + ports: + - "3306:3306" + environment: + - MARIADB_DATABASE=${MARIADB_DATABASE} + - MARIADB_USER=${MARIADB_USER} + - MARIADB_PASSWORD=${MARIADB_PASSWORD} + - MARIADB_RANDOM_ROOT_PASSWORD=1 + healthcheck: + test: ["CMD", "healthcheck.sh", "--connect", "--innodb_initialized"] + start_period: 10s + interval: 10s + timeout: 5s + retries: 3 + dataprep-mariadb-vector: + image: ${REGISTRY:-opea}/dataprep:${TAG:-latest} + container_name: dataprep-mariadb-vector + depends_on: + mariadb-server: + condition: service_healthy + tei-embedding-service: + condition: service_started + ports: + - "6007:5000" + environment: + no_proxy: ${no_proxy} + http_proxy: ${http_proxy} + https_proxy: ${https_proxy} + DATAPREP_COMPONENT_NAME: "OPEA_DATAPREP_MARIADBVECTOR" + MARIADB_CONNECTION_URL: mariadb+mariadbconnector://${MARIADB_USER}:${MARIADB_PASSWORD}@mariadb-server:3306/${MARIADB_DATABASE} + TEI_ENDPOINT: http://tei-embedding-service:80 + HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] + interval: 10s + timeout: 5s + retries: 50 + restart: unless-stopped + tei-embedding-service: + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 + container_name: tei-embedding-server + ports: + - "6006:80" + volumes: + - "${MODEL_CACHE:-./data}:/data" + shm_size: 1g + environment: + no_proxy: ${no_proxy} + http_proxy: ${http_proxy} + https_proxy: ${https_proxy} + command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate + retriever: + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} + container_name: retriever-mariadb-vector + depends_on: + mariadb-server: + condition: service_healthy + ports: + - "7000:7000" + ipc: host + environment: + no_proxy: ${no_proxy} + http_proxy: ${http_proxy} + https_proxy: ${https_proxy} + MARIADB_CONNECTION_URL: mariadb+mariadbconnector://${MARIADB_USER}:${MARIADB_PASSWORD}@mariadb-server:3306/${MARIADB_DATABASE} + HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_MARIADBVECTOR" + restart: unless-stopped + tei-reranking-service: + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 + container_name: tei-reranking-server + ports: + - "8808:80" + volumes: + - "${MODEL_CACHE:-./data}:/data" + shm_size: 1g + environment: + no_proxy: ${no_proxy} + http_proxy: ${http_proxy} + https_proxy: ${https_proxy} + HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_HUB_DISABLE_PROGRESS_BARS: 1 + HF_HUB_ENABLE_HF_TRANSFER: 0 + command: --model-id ${RERANK_MODEL_ID} --auto-truncate + vllm-service: + image: ${REGISTRY:-opea}/vllm:${TAG:-latest} + container_name: vllm-service + ports: + - "9009:80" + volumes: + - "${MODEL_CACHE:-./data}:/root/.cache/huggingface/hub" + shm_size: 128g + environment: + no_proxy: ${no_proxy} + http_proxy: ${http_proxy} + https_proxy: ${https_proxy} + HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + LLM_MODEL_ID: ${LLM_MODEL_ID} + VLLM_TORCH_PROFILER_DIR: "/mnt" + VLLM_CPU_KVCACHE_SPACE: 40 + healthcheck: + test: ["CMD-SHELL", "curl -f http://$host_ip:9009/health || exit 1"] + interval: 10s + timeout: 10s + retries: 100 + command: --model $LLM_MODEL_ID --host 0.0.0.0 --port 80 + chatqna-xeon-backend-server: + image: ${REGISTRY:-opea}/chatqna:${TAG:-latest} + container_name: chatqna-xeon-backend-server + depends_on: + mariadb-server: + condition: service_healthy + dataprep-mariadb-vector: + condition: service_healthy + tei-embedding-service: + condition: service_started + retriever: + condition: service_started + tei-reranking-service: + condition: service_started + vllm-service: + condition: service_healthy + ports: + - "8888:8888" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + - MEGA_SERVICE_HOST_IP=chatqna-xeon-backend-server + - EMBEDDING_SERVER_HOST_IP=tei-embedding-service + - EMBEDDING_SERVER_PORT=${EMBEDDING_SERVER_PORT:-80} + - RETRIEVER_SERVICE_HOST_IP=retriever + - RERANK_SERVER_HOST_IP=tei-reranking-service + - RERANK_SERVER_PORT=${RERANK_SERVER_PORT:-80} + - LLM_SERVER_HOST_IP=vllm-service + - LLM_SERVER_PORT=80 + - LLM_MODEL=${LLM_MODEL_ID} + - LOGFLAG=${LOGFLAG} + ipc: host + restart: always + chatqna-xeon-ui-server: + image: ${REGISTRY:-opea}/chatqna-ui:${TAG:-latest} + container_name: chatqna-xeon-ui-server + depends_on: + - chatqna-xeon-backend-server + ports: + - "5173:5173" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + ipc: host + restart: always + chatqna-xeon-nginx-server: + image: ${REGISTRY:-opea}/nginx:${TAG:-latest} + container_name: chatqna-xeon-nginx-server + depends_on: + - chatqna-xeon-backend-server + - chatqna-xeon-ui-server + ports: + - "${NGINX_PORT:-80}:80" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + - FRONTEND_SERVICE_IP=chatqna-xeon-ui-server + - FRONTEND_SERVICE_PORT=5173 + - BACKEND_SERVICE_NAME=chatqna + - BACKEND_SERVICE_IP=chatqna-xeon-backend-server + - BACKEND_SERVICE_PORT=8888 + - DATAPREP_SERVICE_IP=dataprep-mariadb-vector + - DATAPREP_SERVICE_PORT=5000 + ipc: host + restart: always + +networks: + default: + driver: bridge diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/set_env_mariadb.sh b/ChatQnA/docker_compose/intel/cpu/xeon/set_env_mariadb.sh new file mode 100755 index 0000000000..88ae5c0eec --- /dev/null +++ b/ChatQnA/docker_compose/intel/cpu/xeon/set_env_mariadb.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +# Copyright (C) 2025 MariaDB Foundation +# SPDX-License-Identifier: Apache-2.0 + +pushd "../../../../../" > /dev/null +source .set_env.sh +popd > /dev/null + +if [ -z "${HUGGINGFACEHUB_API_TOKEN}" ]; then + echo "Error: HUGGINGFACEHUB_API_TOKEN is not set. Please set HUGGINGFACEHUB_API_TOKEN." +fi + +export host_ip=$(hostname -I | awk '{print $1}') +export MARIADB_DATABASE="vectordb" +export MARIADB_USER="chatqna" +export MARIADB_PASSWORD="password" +export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" +export RERANK_MODEL_ID="BAAI/bge-reranker-base" +export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" +export LOGFLAG="" +export no_proxy="$no_proxy,chatqna-xeon-ui-server,chatqna-xeon-backend-server,dataprep-redis-service,tei-embedding-service,retriever,tei-reranking-service,tgi-service,vllm-service,jaeger,prometheus,grafana,node-exporter" +export LLM_SERVER_PORT=9000 +export NGINX_PORT=80 diff --git a/ChatQnA/tests/test_compose_mariadb_on_xeon.sh b/ChatQnA/tests/test_compose_mariadb_on_xeon.sh new file mode 100644 index 0000000000..45f5f99e47 --- /dev/null +++ b/ChatQnA/tests/test_compose_mariadb_on_xeon.sh @@ -0,0 +1,176 @@ +#!/bin/bash +# Copyright (C) 2025 MariaDB Foundation +# SPDX-License-Identifier: Apache-2.0 + +set -e +IMAGE_REPO=${IMAGE_REPO:-"opea"} +IMAGE_TAG=${IMAGE_TAG:-"latest"} +echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" +echo "TAG=IMAGE_TAG=${IMAGE_TAG}" +export REGISTRY=${IMAGE_REPO} +export TAG=${IMAGE_TAG} +export MODEL_CACHE=${model_cache:-"./data"} + +WORKPATH=$(dirname "$PWD") +LOG_PATH="$WORKPATH/tests" +ip_address=$(hostname -I | awk '{print $1}') + +function build_docker_images() { + opea_branch=${opea_branch:-"main"} + cd $WORKPATH/docker_image_build + git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s + git clone https://github.com/vllm-project/vllm.git && cd vllm + VLLM_VER="v0.8.3" + echo "Check out vLLM tag ${VLLM_VER}" + git checkout ${VLLM_VER} &> /dev/null + # make sure NOT change the pwd + cd ../ + + echo "Build all the images with --no-cache, check docker_image_build.log for details..." + service_list="chatqna chatqna-ui dataprep retriever vllm nginx" + docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log + + docker images && sleep 1s +} + +function start_services() { + cd $WORKPATH/docker_compose/intel/cpu/xeon + export MARIADB_DATABASE="vectordb" + export MARIADB_USER="chatqna" + export MARIADB_PASSWORD="test" + export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" + export RERANK_MODEL_ID="BAAI/bge-reranker-base" + export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" + export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} + export host_ip=${ip_address} + + # Start Docker Containers + docker compose -f compose_mariadb.yaml up -d > ${LOG_PATH}/start_services_with_compose.log + n=0 + until [[ "$n" -ge 100 ]]; do + docker logs vllm-service > ${LOG_PATH}/vllm_service_start.log 2>&1 + if grep -q complete ${LOG_PATH}/vllm_service_start.log; then + break + fi + sleep 5s + n=$((n+1)) + done +} + +function validate_service() { + local URL="$1" + local EXPECTED_RESULT="$2" + local SERVICE_NAME="$3" + local DOCKER_NAME="$4" + local INPUT_DATA="$5" + + local HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -X POST -d "$INPUT_DATA" -H 'Content-Type: application/json' "$URL") + if [ "$HTTP_STATUS" -eq 200 ]; then + echo "[ $SERVICE_NAME ] HTTP status is 200. Checking content..." + + local CONTENT=$(curl -s -X POST -d "$INPUT_DATA" -H 'Content-Type: application/json' "$URL" | tee ${LOG_PATH}/${SERVICE_NAME}.log) + + if echo "$CONTENT" | grep -q "$EXPECTED_RESULT"; then + echo "[ $SERVICE_NAME ] Content is as expected." + else + echo "[ $SERVICE_NAME ] Content does not match the expected result: $CONTENT" + docker logs ${DOCKER_NAME} >> ${LOG_PATH}/${SERVICE_NAME}.log + exit 1 + fi + else + echo "[ $SERVICE_NAME ] HTTP status is not 200. Received status was $HTTP_STATUS" + docker logs ${DOCKER_NAME} >> ${LOG_PATH}/${SERVICE_NAME}.log + exit 1 + fi + sleep 1s +} + +function validate_microservices() { + # Check if the microservices are running correctly. + sleep 3m + + # tei for embedding service + validate_service \ + "${ip_address}:6006/embed" \ + "\[\[" \ + "tei-embedding" \ + "tei-embedding-server" \ + '{"inputs":"What is Deep Learning?"}' + + # retrieval microservice + test_embedding=$(python3 -c "import random; embedding = [random.uniform(-1, 1) for _ in range(768)]; print(embedding)") + validate_service \ + "${ip_address}:7000/v1/retrieval" \ + " " \ + "retrieval" \ + "retriever-mariadb-vector" \ + "{\"text\":\"What is the revenue of Nike in 2023?\",\"embedding\":${test_embedding}}" + + # tei for rerank microservice + validate_service \ + "${ip_address}:8808/rerank" \ + '{"index":1,"score":' \ + "tei-rerank" \ + "tei-reranking-server" \ + '{"query":"What is Deep Learning?", "texts": ["Deep Learning is not...", "Deep learning is..."]}' + + # vllm for llm service + validate_service \ + "${ip_address}:9009/v1/chat/completions" \ + "content" \ + "vllm-llm" \ + "vllm-service" \ + '{"model": "meta-llama/Meta-Llama-3-8B-Instruct", "messages": [{"role": "user", "content": "What is Deep Learning?"}], "max_tokens": 17}' +} + +function validate_megaservice() { + # Curl the Mega Service + validate_service \ + "${ip_address}:8888/v1/chatqna" \ + "Nike" \ + "mega-chatqna" \ + "chatqna-xeon-backend-server" \ + '{"messages": "What is the revenue of Nike in 2023?"}' + +} + +function stop_docker() { + cd $WORKPATH/docker_compose/intel/cpu/xeon + docker compose down +} + +function main() { + + echo "::group::stop_docker" + stop_docker + echo "::endgroup::" + + echo "::group::build_docker_images" + if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" + start_services + echo "::endgroup::" + + echo "::group::validate_microservices" + validate_microservices + echo "::endgroup::" + + echo "::group::validate_megaservice" + validate_megaservice + echo "::endgroup::" + + echo "::group::stop_docker" + stop_docker + echo "::endgroup::" + + docker system prune -f + +} + +main From 4efb1e083344c9796202b253206d03d65d970d4a Mon Sep 17 00:00:00 2001 From: Eero Tamminen Date: Sat, 10 May 2025 16:57:52 +0300 Subject: [PATCH 038/217] Update paths to GenAIInfra scripts (#1923) Signed-off-by: Eero Tamminen --- DocSum/kubernetes/helm/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/DocSum/kubernetes/helm/README.md b/DocSum/kubernetes/helm/README.md index 64537cc06f..4ed4570754 100644 --- a/DocSum/kubernetes/helm/README.md +++ b/DocSum/kubernetes/helm/README.md @@ -147,7 +147,7 @@ nano ~/docsum-k8s-install/GenAIExamples/DocSum/kubernetes/helm/rocm-tgi-values.y #### If ROCm vLLM used ```bash cd ~/docsum-k8s-install/GenAIInfra/helm-charts -./update_dependency.sh +scripts/update_dependency.sh helm dependency update docsum helm upgrade --install docsum docsum \ --set global.HUGGINGFACEHUB_API_TOKEN=${HFTOKEN} \ @@ -157,7 +157,7 @@ helm upgrade --install docsum docsum \ #### If ROCm TGI used ```bash cd ~/docsum-k8s-install/GenAIInfra/helm-charts -./update_dependency.sh +scripts/update_dependency.sh helm dependency update docsum helm upgrade --install docsum docsum \ --set global.HUGGINGFACEHUB_API_TOKEN=${HFTOKEN} \ From 7590b055aa4cd36fcdab92a5aeee625cc9b385ee Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Mon, 12 May 2025 10:19:18 +0800 Subject: [PATCH 039/217] Integrate DBQnA set_env to ut scripts and enhanced validation checks. (#1915) Integrate DBQnA set_env to ut scripts. Add README.md for ut scripts. Enhanced validation checks Signed-off-by: ZePan110 Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- DBQnA/docker_compose/amd/gpu/rocm/set_env.sh | 10 +++++++-- .../docker_compose/intel/cpu/xeon/set_env.sh | 19 ++++++----------- DBQnA/docker_compose/set_env.sh | 11 ---------- DBQnA/tests/README.md | 21 +++++++++++++++++++ DBQnA/tests/test_compose_on_rocm.sh | 18 ++++------------ DBQnA/tests/test_compose_on_xeon.sh | 13 +++--------- 6 files changed, 42 insertions(+), 50 deletions(-) mode change 100644 => 100755 DBQnA/docker_compose/intel/cpu/xeon/set_env.sh delete mode 100755 DBQnA/docker_compose/set_env.sh create mode 100644 DBQnA/tests/README.md diff --git a/DBQnA/docker_compose/amd/gpu/rocm/set_env.sh b/DBQnA/docker_compose/amd/gpu/rocm/set_env.sh index 6dd066fc7b..f744dbcc0f 100644 --- a/DBQnA/docker_compose/amd/gpu/rocm/set_env.sh +++ b/DBQnA/docker_compose/amd/gpu/rocm/set_env.sh @@ -3,8 +3,13 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -export host_ip="" -export DBQNA_HUGGINGFACEHUB_API_TOKEN="" +pushd "../../" > /dev/null +ls -l +source .set_env.sh +popd > /dev/null + +export host_ip=${ip_address} +export DBQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export DBQNA_TGI_SERVICE_PORT=8008 export DBQNA_TGI_LLM_ENDPOINT="http://${host_ip}:${DBQNA_TGI_SERVICE_PORT}" export DBQNA_LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" @@ -14,3 +19,4 @@ export POSTGRES_PASSWORD="testpwd" export POSTGRES_DB="chinook" export DBQNA_TEXT_TO_SQL_PORT=9090 export DBQNA_UI_PORT=5174 +export build_texttosql_url="${ip_address}:${DBQNA_TEXT_TO_SQL_PORT}/v1" diff --git a/DBQnA/docker_compose/intel/cpu/xeon/set_env.sh b/DBQnA/docker_compose/intel/cpu/xeon/set_env.sh old mode 100644 new mode 100755 index beae6d5bc9..f05e9c871c --- a/DBQnA/docker_compose/intel/cpu/xeon/set_env.sh +++ b/DBQnA/docker_compose/intel/cpu/xeon/set_env.sh @@ -2,26 +2,19 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 + pushd "../../../../../" > /dev/null source .set_env.sh popd > /dev/null -#export host_ip=$(hostname -I | awk '{print $1}') - -if [ -z "${HUGGINGFACEHUB_API_TOKEN}" ]; then - echo "Error: HUGGINGFACEHUB_API_TOKEN is not set. Please set HUGGINGFACEHUB_API_TOKEN." -fi - -if [ -z "${host_ip}" ]; then - echo "Error: host_ip is not set. Please set host_ip first." -fi +export host_ip=${ip_address} export no_proxy=$no_proxy,$host_ip,dbqna-xeon-react-ui-server,text2sql-service,tgi-service,postgres-container export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -export TGI_PORT=8008 -export TGI_LLM_ENDPOINT="http://${host_ip}:${TGI_PORT}" -export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" +export HF_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export POSTGRES_USER=postgres export POSTGRES_PASSWORD=testpwd export POSTGRES_DB=chinook +export TGI_PORT=8008 export TEXT2SQL_PORT=9090 -"set_env.sh" 27L, 974B +export TGI_LLM_ENDPOINT="http://${host_ip}:${TGI_PORT}" +export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" diff --git a/DBQnA/docker_compose/set_env.sh b/DBQnA/docker_compose/set_env.sh deleted file mode 100755 index 94ca2186a2..0000000000 --- a/DBQnA/docker_compose/set_env.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env bash - -# Copyright (C) 2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 -pushd "../../" > /dev/null -source .set_env.sh -popd > /dev/null - -export TGI_PORT=8008 -export TGI_LLM_ENDPOINT="http://${your_ip}:${TGI_PORT}" -export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" diff --git a/DBQnA/tests/README.md b/DBQnA/tests/README.md new file mode 100644 index 0000000000..5d6dc16a10 --- /dev/null +++ b/DBQnA/tests/README.md @@ -0,0 +1,21 @@ +# DBQnA E2E test scripts + +## Set the required environment variable + +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +``` + +## Run test + +On Intel Xeon with TGI: + +```bash +bash test_compose_on_xeon.sh +``` + +On AMD ROCm with TGI: + +```bash +bash test_compose_on_rocm.sh +``` diff --git a/DBQnA/tests/test_compose_on_rocm.sh b/DBQnA/tests/test_compose_on_rocm.sh index de482b4eaa..df83d4fbec 100644 --- a/DBQnA/tests/test_compose_on_rocm.sh +++ b/DBQnA/tests/test_compose_on_rocm.sh @@ -7,21 +7,10 @@ set -xe WORKPATH=$(dirname "$PWD") LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') -tgi_port=8008 -tgi_volume=$WORKPATH/data export host_ip=${ip_address} -export DBQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -export DBQNA_TGI_SERVICE_PORT=8008 -export DBQNA_TGI_LLM_ENDPOINT="http://${host_ip}:${DBQNA_TGI_SERVICE_PORT}" -export DBQNA_LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" -export MODEL_ID=${DBQNA_LLM_MODEL_ID} -export POSTGRES_USER="postgres" -export POSTGRES_PASSWORD="testpwd" -export POSTGRES_DB="chinook" -export DBQNA_TEXT_TO_SQL_PORT=9090 -export DBQNA_UI_PORT=5174 -export build_texttosql_url="${ip_address}:${DBQNA_TEXT_TO_SQL_PORT}/v1" +source $WORKPATH/docker_compose/amd/gpu/rocm/set_env.sh + export MODEL_CACHE=${model_cache:-"/var/lib/GenAI/data"} function build_docker_images() { @@ -57,7 +46,8 @@ function validate_microservice() { -d '{"input_text": "Find the total number of Albums.","conn_str": {"user": "'${POSTGRES_USER}'","password": "'${POSTGRES_PASSWORD}'","host": "'${ip_address}'", "port": "5442", "database": "'${POSTGRES_DB}'" }}' \ -H 'Content-Type: application/json') - if [[ $result == *"output"* ]]; then + if echo "$result" | jq -e '.result.output' > /dev/null 2>&1; then + # if [[ $result == *"output"* ]]; then echo $result echo "Result correct." else diff --git a/DBQnA/tests/test_compose_on_xeon.sh b/DBQnA/tests/test_compose_on_xeon.sh index da9fa1b71a..751d3ac24b 100755 --- a/DBQnA/tests/test_compose_on_xeon.sh +++ b/DBQnA/tests/test_compose_on_xeon.sh @@ -15,7 +15,6 @@ export MODEL_CACHE=${model_cache:-"./data"} WORKPATH=$(dirname "$PWD") LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') -tgi_port=8008 function build_docker_images() { cd $WORKPATH/docker_image_build @@ -30,14 +29,7 @@ function build_docker_images() { function start_service() { cd $WORKPATH/docker_compose/intel/cpu/xeon - export model="mistralai/Mistral-7B-Instruct-v0.3" - export LLM_MODEL_ID=${model} - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export POSTGRES_USER=postgres - export POSTGRES_PASSWORD=testpwd - export POSTGRES_DB=chinook - export TEXT2SQL_PORT=9090 - export TGI_LLM_ENDPOINT="http://${ip_address}:${tgi_port}" + source ./set_env.sh # Start Docker Containers docker compose -f compose.yaml up -d > ${LOG_PATH}/start_services_with_compose.log @@ -60,7 +52,8 @@ function validate_microservice() { -d '{"input_text": "Find the total number of Albums.","conn_str": {"user": "'${POSTGRES_USER}'","password": "'${POSTGRES_PASSWORD}'","host": "'${ip_address}'", "port": "5442", "database": "'${POSTGRES_DB}'" }}' \ -H 'Content-Type: application/json') - if [[ $result == *"output"* ]]; then + if echo "$result" | jq -e '.result.output' > /dev/null 2>&1; then + # if [[ $result == *"output"* ]]; then echo $result echo "Result correct." else From 7ffb4107e6d2c0583caf1189295012e7d6c12c77 Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Mon, 12 May 2025 11:30:29 +0800 Subject: [PATCH 040/217] set fail-fast to false in vLLM update actions (#1926) Signed-off-by: Sun, Xuehao --- .github/workflows/daily-update-vllm-version.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/daily-update-vllm-version.yml b/.github/workflows/daily-update-vllm-version.yml index 982fb130af..eb88b16bd7 100644 --- a/.github/workflows/daily-update-vllm-version.yml +++ b/.github/workflows/daily-update-vllm-version.yml @@ -25,6 +25,7 @@ jobs: - repo: vLLM-fork repo_name: HabanaAI/vllm-fork ver_name: VLLM_FORK_VER + fail-fast: false permissions: contents: write pull-requests: write From 2596671d3fa70eca50bd6016a65b02da9f77d0dd Mon Sep 17 00:00:00 2001 From: Ying Hu Date: Mon, 12 May 2025 11:40:33 +0800 Subject: [PATCH 041/217] Update README.md for remove the docker installer (#1927) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- ChatQnA/docker_compose/install_docker.sh | 35 ----------------------- GraphRAG/README.md | 2 +- GraphRAG/docker_compose/install_docker.sh | 35 ----------------------- 3 files changed, 1 insertion(+), 71 deletions(-) delete mode 100644 ChatQnA/docker_compose/install_docker.sh delete mode 100644 GraphRAG/docker_compose/install_docker.sh diff --git a/ChatQnA/docker_compose/install_docker.sh b/ChatQnA/docker_compose/install_docker.sh deleted file mode 100644 index 8774c5756d..0000000000 --- a/ChatQnA/docker_compose/install_docker.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash - -# Copyright (C) 2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -# Update the package index -sudo apt-get -y update - -# Install prerequisites -sudo apt-get -y install ca-certificates curl - -# Create the directory for the Docker GPG key -sudo install -m 0755 -d /etc/apt/keyrings - -# Add Docker's official GPG key -sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc - -# Set permissions for the GPG key -sudo chmod a+r /etc/apt/keyrings/docker.asc - -# Add Docker repository to the sources list -echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \ - $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null - -# Update the package index with Docker packages -sudo apt-get -y update - -# Install Docker packages -sudo apt-get -y install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin - -# add existing user -sudo usermod -aG docker $USER - -# Optional: Verify that Docker is installed correctly -sudo docker --version diff --git a/GraphRAG/README.md b/GraphRAG/README.md index 3c9de58d69..314e18a0ef 100644 --- a/GraphRAG/README.md +++ b/GraphRAG/README.md @@ -18,7 +18,7 @@ Quick Start Deployment Steps: 2. Run Docker Compose. 3. Consume the GraphRAG Service. -Note: If you do not have docker installed you can run this script to install docker : `bash docker_compose/install_docker.sh` +Note: If you do not have Docker installed you can [install Docker](https://docs.docker.com/engine/install/) first ### Quick Start: 1.Setup Environment Variable diff --git a/GraphRAG/docker_compose/install_docker.sh b/GraphRAG/docker_compose/install_docker.sh deleted file mode 100644 index d2a495bde3..0000000000 --- a/GraphRAG/docker_compose/install_docker.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env bash - -# Copyright (C) 2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -# Update the package index -sudo apt-get -y update - -# Install prerequisites -sudo apt-get -y install ca-certificates curl --no-install-recommends --fix-missing - -# Create the directory for the Docker GPG key -sudo install -m 0755 -d /etc/apt/keyrings - -# Add Docker's official GPG key -sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc - -# Set permissions for the GPG key -sudo chmod a+r /etc/apt/keyrings/docker.asc - -# Add Docker repository to the sources list -echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \ - $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null - -# Update the package index with Docker packages -sudo apt-get -y update - -# Install Docker packages -sudo apt-get -y install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin --no-install-recommends --fix-missing - -# add existing user -sudo usermod -aG docker $USER - -# Optional: Verify that Docker is installed correctly -sudo docker --version From 99f2f940b678ee5320d95a264dc3c0c34cb36228 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Mon, 12 May 2025 17:41:57 +0800 Subject: [PATCH 042/217] Fix input check for helm test workflow (#1938) Signed-off-by: ZePan110 --- .github/workflows/_helm-e2e.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/_helm-e2e.yml b/.github/workflows/_helm-e2e.yml index edff2d7f36..61a91ce583 100644 --- a/.github/workflows/_helm-e2e.yml +++ b/.github/workflows/_helm-e2e.yml @@ -137,7 +137,7 @@ jobs: env: example: ${{ inputs.example }} run: | - if [[ ! "$example" =~ ^[a-zA-Z]{1,20}$ ]] || [[ "$example" =~ \.\. ]] || [[ "$example" == -* || "$example" == *- ]]; then + if [[ ! "$example" =~ ^[a-zA-Z0-9]{1,20}$ ]] || [[ "$example" =~ \.\. ]] || [[ "$example" == -* || "$example" == *- ]]; then echo "Error: Invalid input - only lowercase alphanumeric and internal hyphens allowed" exit 1 fi From a0bdf8eab25a777315d189910750b65d38bc2198 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Tue, 13 May 2025 13:34:31 +0800 Subject: [PATCH 043/217] Add opea/vllm-rocm README.md link in docker_images_list.md (#1925) Signed-off-by: ZePan110 --- .github/workflows/pr-link-path-scan.yml | 6 ++++-- docker_images_list.md | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pr-link-path-scan.yml b/.github/workflows/pr-link-path-scan.yml index d6aa98d499..cb1e4b116d 100644 --- a/.github/workflows/pr-link-path-scan.yml +++ b/.github/workflows/pr-link-path-scan.yml @@ -23,6 +23,7 @@ jobs: - name: Check the Validity of Hyperlinks run: | cd ${{github.workspace}} + delay=15 fail="FALSE" merged_commit=$(git log -1 --format='%H') changed_files="$(git diff --name-status --diff-filter=ARM ${{ github.event.pull_request.base.sha }} ${merged_commit} | awk '/\.md$/ {print $NF}')" @@ -35,14 +36,15 @@ jobs: # echo $url_line url=$(echo "$url_line"|cut -d '(' -f2 | cut -d ')' -f1|sed 's/\.git$//') path=$(echo "$url_line"|cut -d':' -f1 | cut -d'/' -f2-) + sleep $delay response=$(curl -L -s -o /dev/null -w "%{http_code}" "$url")|| true if [ "$response" -ne 200 ]; then - echo "**********Validation failed, try again**********" + echo "**********Validation failed ($response), try again**********" response_retry=$(curl -s -o /dev/null -w "%{http_code}" "$url") if [ "$response_retry" -eq 200 ]; then echo "*****Retry successfully*****" else - echo "Invalid link from ${{github.workspace}}/$path: $url" + echo "Invalid link ($response_retry) from ${{github.workspace}}/$path: $url" fail="TRUE" fi fi diff --git a/docker_images_list.md b/docker_images_list.md index 401cdc5872..d54eb87cf3 100644 --- a/docker_images_list.md +++ b/docker_images_list.md @@ -111,7 +111,7 @@ Take ChatQnA for example. ChatQnA is a chatbot application service based on the | [opea/vllm-arc](https://hub.docker.com/r/opea/vllm-arc) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/vllm/src/Dockerfile.intel_gpu) | Deploying and servicing VLLM models on Arc based on VLLM projects | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/vllm/README.md) | | [opea/vllm-gaudi](https://hub.docker.com/r/opea/vllm-gaudi) | [Link](https://github.com/HabanaAI/vllm-fork/blob/v0.6.6.post1%2BGaudi-1.20.0/Dockerfile.hpu) | Deploying and servicing VLLM models on Gaudi2 based on VLLM project | [Link](https://github.com/HabanaAI/vllm-fork/blob/habana_main/README.md) | | [opea/vllm-openvino](https://hub.docker.com/r/opea/vllm-openvino) | [Link](https://github.com/vllm-project/vllm/blob/v0.6.1/Dockerfile.openvino) | VLLM Model for Deploying and Serving Openvino Framework Based on VLLM Project | [Link](https://github.com/vllm-project/vllm/blob/main/README.md) | -| [opea/vllm-rocm](https://hub.docker.com/r/opea/vllm-rocm) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/vllm/src/Dockerfile.amd_gpu) | Deploying and servicing VLLM models on AMD Rocm based on VLLM project | | +| [opea/vllm-rocm](https://hub.docker.com/r/opea/vllm-rocm) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/vllm/src/Dockerfile.amd_gpu) | Deploying and servicing VLLM models on AMD Rocm based on VLLM project | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/vllm/README.md) | | [opea/wav2lip](https://hub.docker.com/r/opea/wav2lip) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/wav2lip/src/Dockerfile) | OPEA Generate lip movements from audio files microservice with Pathway for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/wav2lip/deployment/kubernetes/README.md) | | [opea/wav2lip-gaudi](https://hub.docker.com/r/opea/wav2lip-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/wav2lip/src/Dockerfile.intel_hpu) | OPEA Generate lip movements from audio files microservice with Pathway for GenAI application use on the Gaudi2 | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/wav2lip/deployment/kubernetes/README.md) | | [opea/web-retriever](https://hub.docker.com/r/opea/web-retriever)
| [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/web_retrievers/src/Dockerfile) | OPEA retrieval microservice based on chroma vectordb for GenAI application | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/web_retrievers/src/README.md) | From bd6726c53a2649468bb401ba2f42fc93fcb572a7 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Wed, 14 May 2025 10:57:16 +0800 Subject: [PATCH 044/217] Blocking link checks that require a login (#1946) Signed-off-by: ZePan110 Co-authored-by: chen, suyue Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- .github/workflows/pr-link-path-scan.yml | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/.github/workflows/pr-link-path-scan.yml b/.github/workflows/pr-link-path-scan.yml index cb1e4b116d..30040bc8b0 100644 --- a/.github/workflows/pr-link-path-scan.yml +++ b/.github/workflows/pr-link-path-scan.yml @@ -36,16 +36,20 @@ jobs: # echo $url_line url=$(echo "$url_line"|cut -d '(' -f2 | cut -d ')' -f1|sed 's/\.git$//') path=$(echo "$url_line"|cut -d':' -f1 | cut -d'/' -f2-) - sleep $delay - response=$(curl -L -s -o /dev/null -w "%{http_code}" "$url")|| true - if [ "$response" -ne 200 ]; then - echo "**********Validation failed ($response), try again**********" - response_retry=$(curl -s -o /dev/null -w "%{http_code}" "$url") - if [ "$response_retry" -eq 200 ]; then - echo "*****Retry successfully*****" - else - echo "Invalid link ($response_retry) from ${{github.workspace}}/$path: $url" - fail="TRUE" + if [[ "$url" == "https://platform.openai.com/api-keys"* ]]; then + echo "Link "$url" from ${{github.workspace}}/$path needs to be verified by a real person." + else + sleep $delay + response=$(curl -L -s -o /dev/null -w "%{http_code}" "$url")|| true + if [ "$response" -ne 200 ]; then + echo "**********Validation failed ($response), try again**********" + response_retry=$(curl -s -o /dev/null -w "%{http_code}" "$url") + if [ "$response_retry" -eq 200 ]; then + echo "*****Retry successfully*****" + else + echo "Invalid link ($response_retry) from ${{github.workspace}}/$path: $url" + fail="TRUE" + fi fi fi done From 26d07019d0c655bf36aeb837ea91b78f10a2205c Mon Sep 17 00:00:00 2001 From: "chen, suyue" Date: Wed, 14 May 2025 11:11:54 +0800 Subject: [PATCH 045/217] [CICD enhance] CodeTrans run CI with latest base image, group logs in GHA outputs. (#1929) Signed-off-by: chensuyue --- CodeTrans/Dockerfile | 3 +- CodeTrans/docker_image_build/build.yaml | 3 ++ CodeTrans/tests/test_compose_on_gaudi.sh | 35 ++++++++++++++------ CodeTrans/tests/test_compose_on_rocm.sh | 34 +++++++++++++------ CodeTrans/tests/test_compose_on_xeon.sh | 35 ++++++++++++++------ CodeTrans/tests/test_compose_tgi_on_gaudi.sh | 34 +++++++++++++------ CodeTrans/tests/test_compose_tgi_on_xeon.sh | 34 +++++++++++++------ CodeTrans/tests/test_compose_vllm_on_rocm.sh | 34 +++++++++++++------ 8 files changed, 145 insertions(+), 67 deletions(-) diff --git a/CodeTrans/Dockerfile b/CodeTrans/Dockerfile index 786cbcd243..21f9b1633c 100644 --- a/CodeTrans/Dockerfile +++ b/CodeTrans/Dockerfile @@ -1,8 +1,9 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 +ARG IMAGE_REPO=opea ARG BASE_TAG=latest -FROM opea/comps-base:$BASE_TAG +FROM $IMAGE_REPO/comps-base:$BASE_TAG COPY ./code_translation.py $HOME/code_translation.py diff --git a/CodeTrans/docker_image_build/build.yaml b/CodeTrans/docker_image_build/build.yaml index 2ff45e3812..b230d1d4ec 100644 --- a/CodeTrans/docker_image_build/build.yaml +++ b/CodeTrans/docker_image_build/build.yaml @@ -5,6 +5,8 @@ services: codetrans: build: args: + IMAGE_REPO: ${REGISTRY:-opea} + BASE_TAG: ${TAG:-latest} http_proxy: ${http_proxy} https_proxy: ${https_proxy} no_proxy: ${no_proxy} @@ -45,4 +47,5 @@ services: build: context: GenAIComps dockerfile: comps/third_parties/vllm/src/Dockerfile.amd_gpu + extends: codetrans image: ${REGISTRY:-opea}/vllm-rocm:${TAG:-latest} diff --git a/CodeTrans/tests/test_compose_on_gaudi.sh b/CodeTrans/tests/test_compose_on_gaudi.sh index 41472244ac..7b0baa6602 100644 --- a/CodeTrans/tests/test_compose_on_gaudi.sh +++ b/CodeTrans/tests/test_compose_on_gaudi.sh @@ -17,19 +17,14 @@ ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { opea_branch=${opea_branch:-"main"} - # If the opea_branch isn't main, replace the git clone branch in Dockerfile. - if [[ "${opea_branch}" != "main" ]]; then - cd $WORKPATH - OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git" - NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git" - find . -type f -name "Dockerfile*" | while read -r file; do - echo "Processing file: $file" - sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file" - done - fi cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s + git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork VLLM_FORK_VER=v0.6.6.post1+Gaudi-1.20.0 git checkout ${VLLM_FORK_VER} &> /dev/null && cd ../ @@ -160,17 +155,35 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services + echo "::endgroup::" + echo "::group::validate_microservices" validate_microservices + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice + echo "::endgroup::" + + echo "::group::validate_frontend" validate_frontend + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } diff --git a/CodeTrans/tests/test_compose_on_rocm.sh b/CodeTrans/tests/test_compose_on_rocm.sh index ef429636ba..ecc6a4fdfc 100644 --- a/CodeTrans/tests/test_compose_on_rocm.sh +++ b/CodeTrans/tests/test_compose_on_rocm.sh @@ -18,19 +18,13 @@ ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { opea_branch=${opea_branch:-"main"} - # If the opea_branch isn't main, replace the git clone branch in Dockerfile. - if [[ "${opea_branch}" != "main" ]]; then - cd $WORKPATH - OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git" - NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git" - find . -type f -name "Dockerfile*" | while read -r file; do - echo "Processing file: $file" - sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file" - done - fi cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="codetrans codetrans-ui llm-textgen nginx" @@ -161,17 +155,35 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services + echo "::endgroup::" + echo "::group::validate_microservices" validate_microservices + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice + echo "::endgroup::" + + echo "::group::validate_frontend" validate_frontend + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } diff --git a/CodeTrans/tests/test_compose_on_xeon.sh b/CodeTrans/tests/test_compose_on_xeon.sh index 4deb89fe00..54ae5ee0ca 100644 --- a/CodeTrans/tests/test_compose_on_xeon.sh +++ b/CodeTrans/tests/test_compose_on_xeon.sh @@ -17,19 +17,14 @@ ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { opea_branch=${opea_branch:-"main"} - # If the opea_branch isn't main, replace the git clone branch in Dockerfile. - if [[ "${opea_branch}" != "main" ]]; then - cd $WORKPATH - OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git" - NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git" - find . -type f -name "Dockerfile*" | while read -r file; do - echo "Processing file: $file" - sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file" - done - fi cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s + git clone https://github.com/vllm-project/vllm.git && cd vllm VLLM_VER="v0.8.3" echo "Check out vLLM tag ${VLLM_VER}" @@ -163,17 +158,35 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services + echo "::endgroup::" + echo "::group::validate_microservices" validate_microservices + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice + echo "::endgroup::" + + echo "::group::validate_frontend" validate_frontend + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } diff --git a/CodeTrans/tests/test_compose_tgi_on_gaudi.sh b/CodeTrans/tests/test_compose_tgi_on_gaudi.sh index cb4bb53659..5914dc29ce 100644 --- a/CodeTrans/tests/test_compose_tgi_on_gaudi.sh +++ b/CodeTrans/tests/test_compose_tgi_on_gaudi.sh @@ -17,19 +17,13 @@ ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { opea_branch=${opea_branch:-"main"} - # If the opea_branch isn't main, replace the git clone branch in Dockerfile. - if [[ "${opea_branch}" != "main" ]]; then - cd $WORKPATH - OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git" - NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git" - find . -type f -name "Dockerfile*" | while read -r file; do - echo "Processing file: $file" - sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file" - done - fi cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="codetrans codetrans-ui llm-textgen nginx" @@ -167,17 +161,35 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services + echo "::endgroup::" + echo "::group::validate_microservices" validate_microservices + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice + echo "::endgroup::" + + echo "::group::validate_frontend" validate_frontend + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } diff --git a/CodeTrans/tests/test_compose_tgi_on_xeon.sh b/CodeTrans/tests/test_compose_tgi_on_xeon.sh index 57bd46348f..99a4f6a7d0 100644 --- a/CodeTrans/tests/test_compose_tgi_on_xeon.sh +++ b/CodeTrans/tests/test_compose_tgi_on_xeon.sh @@ -17,19 +17,13 @@ ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { opea_branch=${opea_branch:-"main"} - # If the opea_branch isn't main, replace the git clone branch in Dockerfile. - if [[ "${opea_branch}" != "main" ]]; then - cd $WORKPATH - OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git" - NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git" - find . -type f -name "Dockerfile*" | while read -r file; do - echo "Processing file: $file" - sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file" - done - fi cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="codetrans codetrans-ui llm-textgen nginx" @@ -167,17 +161,35 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services + echo "::endgroup::" + echo "::group::validate_microservices" validate_microservices + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice + echo "::endgroup::" + + echo "::group::validate_frontend" validate_frontend + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } diff --git a/CodeTrans/tests/test_compose_vllm_on_rocm.sh b/CodeTrans/tests/test_compose_vllm_on_rocm.sh index 558a3a02ad..2ef8709607 100644 --- a/CodeTrans/tests/test_compose_vllm_on_rocm.sh +++ b/CodeTrans/tests/test_compose_vllm_on_rocm.sh @@ -17,19 +17,13 @@ ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { opea_branch=${opea_branch:-"main"} - # If the opea_branch isn't main, replace the git clone branch in Dockerfile. - if [[ "${opea_branch}" != "main" ]]; then - cd $WORKPATH - OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git" - NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git" - find . -type f -name "Dockerfile*" | while read -r file; do - echo "Processing file: $file" - sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file" - done - fi cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="vllm-rocm llm-textgen codetrans codetrans-ui nginx" @@ -160,17 +154,35 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services + echo "::endgroup::" + echo "::group::validate_microservices" validate_microservices + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice + echo "::endgroup::" + + echo "::group::validate_frontend" validate_frontend + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } From fb53c536a31cf4fb8e6dddbdc1e74ba50a4e9a09 Mon Sep 17 00:00:00 2001 From: alexsin368 <109180236+alexsin368@users.noreply.github.com> Date: Tue, 13 May 2025 20:12:57 -0700 Subject: [PATCH 046/217] AgentQnA - add support for remote server (#1900) Signed-off-by: alexsin368 Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: ZePan110 --- AgentQnA/README.md | 65 +++++++++++++++---- .../intel/cpu/xeon/compose_remote.yaml | 18 +++++ 2 files changed, 71 insertions(+), 12 deletions(-) create mode 100644 AgentQnA/docker_compose/intel/cpu/xeon/compose_remote.yaml diff --git a/AgentQnA/README.md b/AgentQnA/README.md index 6844f716e7..c78703d6fb 100644 --- a/AgentQnA/README.md +++ b/AgentQnA/README.md @@ -99,7 +99,7 @@ flowchart LR #### First, clone the `GenAIExamples` repo. -``` +```bash export WORKDIR= cd $WORKDIR git clone https://github.com/opea-project/GenAIExamples.git @@ -109,7 +109,7 @@ git clone https://github.com/opea-project/GenAIExamples.git ##### For proxy environments only -``` +```bash export http_proxy="Your_HTTP_Proxy" export https_proxy="Your_HTTPs_Proxy" # Example: no_proxy="localhost, 127.0.0.1, 192.168.1.1" @@ -118,14 +118,24 @@ export no_proxy="Your_No_Proxy" ##### For using open-source llms -``` +Set up a [HuggingFace](https://huggingface.co/) account and generate a [user access token](https://huggingface.co/docs/transformers.js/en/guides/private#step-1-generating-a-user-access-token). + +Then set an environment variable with the token and another for a directory to download the models: + +```bash export HUGGINGFACEHUB_API_TOKEN= -export HF_CACHE_DIR= #so that no need to redownload every time +export HF_CACHE_DIR= # to avoid redownloading models ``` -##### [Optional] OPANAI_API_KEY to use OpenAI models +##### [Optional] OPENAI_API_KEY to use OpenAI models or Intel® AI for Enterprise Inference -``` +To use OpenAI models, generate a key following these [instructions](https://platform.openai.com/api-keys). + +To use a remote server running Intel® AI for Enterprise Inference, contact the cloud service provider or owner of the on-prem machine for a key to access the desired model on the server. + +Then set the environment variable `OPENAI_API_KEY` with the key contents: + +```bash export OPENAI_API_KEY= ``` @@ -133,16 +143,18 @@ export OPENAI_API_KEY= ##### Gaudi -``` +```bash source $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/hpu/gaudi/set_env.sh ``` ##### Xeon -``` +```bash source $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/cpu/xeon/set_env.sh ``` +For running + ### 2. Launch the multi-agent system.
We make it convenient to launch the whole system with docker compose, which includes microservices for LLM, agents, UI, retrieval tool, vector database, dataprep, and telemetry. There are 3 docker compose files, which make it easy for users to pick and choose. Users can choose a different retrieval tool other than the `DocIndexRetriever` example provided in our GenAIExamples repo. Users can choose not to launch the telemetry containers. @@ -184,14 +196,37 @@ docker compose -f $WORKDIR/GenAIExamples/DocIndexRetriever/docker_compose/intel/ #### Launch on Xeon -On Xeon, only OpenAI models are supported. The command below will launch the multi-agent system with the `DocIndexRetriever` as the retrieval tool for the Worker RAG agent. +On Xeon, OpenAI models and models deployed on a remote server are supported. Both methods require an API key. ```bash export OPENAI_API_KEY= cd $WORKDIR/GenAIExamples/AgentQnA/docker_compose/intel/cpu/xeon +``` + +##### OpenAI Models + +The command below will launch the multi-agent system with the `DocIndexRetriever` as the retrieval tool for the Worker RAG agent. + +```bash docker compose -f $WORKDIR/GenAIExamples/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml -f compose_openai.yaml up -d ``` +##### Models on Remote Server + +When models are deployed on a remote server with Intel® AI for Enterprise Inference, a base URL and an API key are required to access them. To run the Agent microservice on Xeon while using models deployed on a remote server, add `compose_remote.yaml` to the `docker compose` command and set additional environment variables. + +###### Notes + +- `OPENAI_API_KEY` is already set in a previous step. +- `model` is used to overwrite the value set for this environment variable in `set_env.sh`. +- `LLM_ENDPOINT_URL` is the base URL given from the owner of the on-prem machine or cloud service provider. It will follow this format: "https://". Here is an example: "https://api.inference.example.com". + +```bash +export model= +export LLM_ENDPOINT_URL= +docker compose -f $WORKDIR/GenAIExamples/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml -f compose_openai.yaml -f compose_remote.yaml up -d +``` + ### 3. Ingest Data into the vector database The `run_ingest_data.sh` script will use an example jsonl file to ingest example documents into a vector database. Other ways to ingest data and other types of documents supported can be found in the OPEA dataprep microservice located in the opea-project/GenAIComps repo. @@ -208,12 +243,18 @@ bash run_ingest_data.sh The UI microservice is launched in the previous step with the other microservices. To see the UI, open a web browser to `http://${ip_address}:5173` to access the UI. Note the `ip_address` here is the host IP of the UI microservice. -1. `create Admin Account` with a random value -2. add opea agent endpoint `http://$ip_address:9090/v1` which is a openai compatible api +1. Click on the arrow above `Get started`. Create an admin account with a name, email, and password. +2. Add an OpenAI-compatible API endpoint. In the upper right, click on the circle button with the user's initial, go to `Admin Settings`->`Connections`. Under `Manage OpenAI API Connections`, click on the `+` to add a connection. Fill in these fields: + +- **URL**: `http://${ip_address}:9090/v1`, do not forget the `v1` +- **Key**: any value +- **Model IDs**: any name i.e. `opea-agent`, then press `+` to add it + +Click "Save". ![opea-agent-setting](assets/img/opea-agent-setting.png) -3. test opea agent with ui +3. Test OPEA agent with UI. Return to `New Chat` and ensure the model (i.e. `opea-agent`) is selected near the upper left. Enter in any prompt to interact with the agent. ![opea-agent-test](assets/img/opea-agent-test.png) diff --git a/AgentQnA/docker_compose/intel/cpu/xeon/compose_remote.yaml b/AgentQnA/docker_compose/intel/cpu/xeon/compose_remote.yaml new file mode 100644 index 0000000000..24536435a3 --- /dev/null +++ b/AgentQnA/docker_compose/intel/cpu/xeon/compose_remote.yaml @@ -0,0 +1,18 @@ +# Copyright (C) 2025 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +services: + worker-rag-agent: + environment: + llm_endpoint_url: ${LLM_ENDPOINT_URL} + api_key: ${OPENAI_API_KEY} + + worker-sql-agent: + environment: + llm_endpoint_url: ${LLM_ENDPOINT_URL} + api_key: ${OPENAI_API_KEY} + + supervisor-react-agent: + environment: + llm_endpoint_url: ${LLM_ENDPOINT_URL} + api_key: ${OPENAI_API_KEY} From f2c8e0b4ff83584aba3042c29f3d5f960338e219 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Wed, 14 May 2025 13:00:51 +0800 Subject: [PATCH 047/217] Integrate DocIndexRetriever set_env to ut scripts. (#1945) Signed-off-by: ZePan110 --- .../docker_compose/intel/cpu/xeon/set_env.sh | 22 +++++++++++++ .../docker_compose/intel/hpu/gaudi/set_env.sh | 23 +++++++++++++ DocIndexRetriever/tests/README.md | 33 +++++++++++++++++++ .../tests/test_compose_milvus_on_gaudi.sh | 17 +--------- .../tests/test_compose_milvus_on_xeon.sh | 17 +--------- .../tests/test_compose_on_gaudi.sh | 15 +-------- .../tests/test_compose_on_xeon.sh | 17 +--------- 7 files changed, 82 insertions(+), 62 deletions(-) create mode 100644 DocIndexRetriever/tests/README.md diff --git a/DocIndexRetriever/docker_compose/intel/cpu/xeon/set_env.sh b/DocIndexRetriever/docker_compose/intel/cpu/xeon/set_env.sh index e4f5c207ba..ca8818e065 100644 --- a/DocIndexRetriever/docker_compose/intel/cpu/xeon/set_env.sh +++ b/DocIndexRetriever/docker_compose/intel/cpu/xeon/set_env.sh @@ -5,3 +5,25 @@ pushd "../../../../../" > /dev/null source .set_env.sh popd > /dev/null + +ip_address=$(hostname -I | awk '{print $1}') +export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" +export RERANK_MODEL_ID="BAAI/bge-reranker-base" +export TEI_EMBEDDING_ENDPOINT="http://${ip_address}:6006" +export TEI_RERANKING_ENDPOINT="http://${ip_address}:8808" +export TGI_LLM_ENDPOINT="http://${ip_address}:8008" +export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export MEGA_SERVICE_HOST_IP=${ip_address} +export EMBEDDING_SERVICE_HOST_IP=${ip_address} +export RETRIEVER_SERVICE_HOST_IP=${ip_address} +export RERANK_SERVICE_HOST_IP=${ip_address} +export LLM_SERVICE_HOST_IP=${ip_address} +export host_ip=${ip_address} +export RERANK_TYPE="tei" +export LOGFLAG=true + +export REDIS_URL="redis://${ip_address}:6379" +export INDEX_NAME="rag-redis" + +export MILVUS_HOST=${ip_address} +export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest" diff --git a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/set_env.sh b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/set_env.sh index e4f5c207ba..0c2b818df4 100644 --- a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/set_env.sh +++ b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/set_env.sh @@ -5,3 +5,26 @@ pushd "../../../../../" > /dev/null source .set_env.sh popd > /dev/null + +ip_address=$(hostname -I | awk '{print $1}') +export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" +export RERANK_MODEL_ID="BAAI/bge-reranker-base" +export TEI_EMBEDDING_ENDPOINT="http://${ip_address}:8090" +export TEI_RERANKING_ENDPOINT="http://${ip_address}:8808" +export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export MEGA_SERVICE_HOST_IP=${ip_address} +export EMBEDDING_SERVICE_HOST_IP=${ip_address} +export RETRIEVER_SERVICE_HOST_IP=${ip_address} +export RERANK_SERVICE_HOST_IP=${ip_address} +export host_ip=${ip_address} +export RERANK_TYPE="tei" +export LOGFLAG=true + +export REDIS_URL="redis://${ip_address}:6379" +export INDEX_NAME="rag-redis" + + +export TGI_LLM_ENDPOINT="http://${ip_address}:8008" +export MILVUS_HOST=${ip_address} +export LLM_SERVICE_HOST_IP=${ip_address} +export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest" diff --git a/DocIndexRetriever/tests/README.md b/DocIndexRetriever/tests/README.md new file mode 100644 index 0000000000..be057c4239 --- /dev/null +++ b/DocIndexRetriever/tests/README.md @@ -0,0 +1,33 @@ +# DocIndexRetriever E2E test scripts + +## Set the required environment variable + +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +``` + +## Run test + +On Intel Xeon with TGI: + +```bash +bash test_compose_on_xeon.sh +``` + +On Intel Gaudi with TGI: + +```bash +bash test_compose_on_gaudi.sh +``` + +On AMD ROCm with TGI: + +```bash +bash test_compose_on_rocm.sh +``` + +On AMD ROCm with vLLM: + +```bash +bash test_compose_vllm_on_rocm.sh +``` diff --git a/DocIndexRetriever/tests/test_compose_milvus_on_gaudi.sh b/DocIndexRetriever/tests/test_compose_milvus_on_gaudi.sh index 40633be8f4..e0fc2b01ee 100644 --- a/DocIndexRetriever/tests/test_compose_milvus_on_gaudi.sh +++ b/DocIndexRetriever/tests/test_compose_milvus_on_gaudi.sh @@ -36,22 +36,7 @@ function build_docker_images() { function start_services() { echo "Starting Docker Services...." cd $WORKPATH/docker_compose/intel/hpu/gaudi - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export TEI_EMBEDDING_ENDPOINT="http://${ip_address}:8090" - export TEI_RERANKING_ENDPOINT="http://${ip_address}:8808" - export TGI_LLM_ENDPOINT="http://${ip_address}:8008" - export MILVUS_HOST=${ip_address} - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export MEGA_SERVICE_HOST_IP=${ip_address} - export EMBEDDING_SERVICE_HOST_IP=${ip_address} - export RETRIEVER_SERVICE_HOST_IP=${ip_address} - export RERANK_SERVICE_HOST_IP=${ip_address} - export LLM_SERVICE_HOST_IP=${ip_address} - export host_ip=${ip_address} - export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest" - export RERANK_TYPE="tei" - export LOGFLAG=true + source ./set_env.sh # Start Docker Containers docker compose -f compose_milvus.yaml up -d diff --git a/DocIndexRetriever/tests/test_compose_milvus_on_xeon.sh b/DocIndexRetriever/tests/test_compose_milvus_on_xeon.sh index 59b1c40aa0..f8942173c4 100755 --- a/DocIndexRetriever/tests/test_compose_milvus_on_xeon.sh +++ b/DocIndexRetriever/tests/test_compose_milvus_on_xeon.sh @@ -35,22 +35,7 @@ function build_docker_images() { function start_services() { echo "Starting Docker Services...." cd $WORKPATH/docker_compose/intel/cpu/xeon - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export TEI_EMBEDDING_ENDPOINT="http://${ip_address}:6006" - export TEI_RERANKING_ENDPOINT="http://${ip_address}:8808" - export TGI_LLM_ENDPOINT="http://${ip_address}:8008" - export MILVUS_HOST=${ip_address} - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export MEGA_SERVICE_HOST_IP=${ip_address} - export EMBEDDING_SERVICE_HOST_IP=${ip_address} - export RETRIEVER_SERVICE_HOST_IP=${ip_address} - export RERANK_SERVICE_HOST_IP=${ip_address} - export LLM_SERVICE_HOST_IP=${ip_address} - export host_ip=${ip_address} - export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest" - export RERANK_TYPE="tei" - export LOGFLAG=true + source ./set_env.sh # Start Docker Containers docker compose -f compose_milvus.yaml up -d diff --git a/DocIndexRetriever/tests/test_compose_on_gaudi.sh b/DocIndexRetriever/tests/test_compose_on_gaudi.sh index 4b9de5a2df..596fbd8f4b 100644 --- a/DocIndexRetriever/tests/test_compose_on_gaudi.sh +++ b/DocIndexRetriever/tests/test_compose_on_gaudi.sh @@ -34,20 +34,7 @@ function build_docker_images() { function start_services() { echo "Starting Docker Services...." cd $WORKPATH/docker_compose/intel/hpu/gaudi - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export TEI_EMBEDDING_ENDPOINT="http://${ip_address}:8090" - export TEI_RERANKING_ENDPOINT="http://${ip_address}:8808" - export REDIS_URL="redis://${ip_address}:6379" - export INDEX_NAME="rag-redis" - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export MEGA_SERVICE_HOST_IP=${ip_address} - export EMBEDDING_SERVICE_HOST_IP=${ip_address} - export RETRIEVER_SERVICE_HOST_IP=${ip_address} - export RERANK_SERVICE_HOST_IP=${ip_address} - export host_ip=${ip_address} - export RERANK_TYPE="tei" - export LOGFLAG=true + source ./set_env.sh # Start Docker Containers docker compose up -d diff --git a/DocIndexRetriever/tests/test_compose_on_xeon.sh b/DocIndexRetriever/tests/test_compose_on_xeon.sh index 467411653c..aab0cb4c60 100644 --- a/DocIndexRetriever/tests/test_compose_on_xeon.sh +++ b/DocIndexRetriever/tests/test_compose_on_xeon.sh @@ -34,22 +34,7 @@ function build_docker_images() { function start_services() { echo "Starting Docker Services...." cd $WORKPATH/docker_compose/intel/cpu/xeon - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export TEI_EMBEDDING_ENDPOINT="http://${ip_address}:6006" - export TEI_RERANKING_ENDPOINT="http://${ip_address}:8808" - export TGI_LLM_ENDPOINT="http://${ip_address}:8008" - export REDIS_URL="redis://${ip_address}:6379" - export INDEX_NAME="rag-redis" - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export MEGA_SERVICE_HOST_IP=${ip_address} - export EMBEDDING_SERVICE_HOST_IP=${ip_address} - export RETRIEVER_SERVICE_HOST_IP=${ip_address} - export RERANK_SERVICE_HOST_IP=${ip_address} - export LLM_SERVICE_HOST_IP=${ip_address} - export host_ip=${ip_address} - export RERANK_TYPE="tei" - export LOGFLAG=true + source ./set_env.sh # Start Docker Containers docker compose up -d From 9f80a18cb5f257c5a79c1bef56a35baa60bc1eb5 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Wed, 14 May 2025 13:12:35 +0800 Subject: [PATCH 048/217] Integrate GraphRAG set_env to ut scripts. (#1943) Integrate GraphRAG set_env to ut scripts. Add README.md for UT scripts. Signed-off-by: ZePan110 --- .../docker_compose/intel/hpu/gaudi/set_env.sh | 5 +++- GraphRAG/tests/README.md | 15 +++++++++++ GraphRAG/tests/test_compose_on_gaudi.sh | 26 +------------------ 3 files changed, 20 insertions(+), 26 deletions(-) create mode 100644 GraphRAG/tests/README.md diff --git a/GraphRAG/docker_compose/intel/hpu/gaudi/set_env.sh b/GraphRAG/docker_compose/intel/hpu/gaudi/set_env.sh index a4fd8049b0..441ea183be 100644 --- a/GraphRAG/docker_compose/intel/hpu/gaudi/set_env.sh +++ b/GraphRAG/docker_compose/intel/hpu/gaudi/set_env.sh @@ -10,6 +10,9 @@ pushd "../../../../../" > /dev/null source .set_env.sh popd > /dev/null +host_ip=$(hostname -I | awk '{print $1}') +export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export TEI_EMBEDDER_PORT=11633 export LLM_ENDPOINT_PORT=11634 export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" @@ -17,7 +20,6 @@ export OPENAI_EMBEDDING_MODEL="text-embedding-3-small" export LLM_MODEL_ID="meta-llama/Meta-Llama-3.1-8B-Instruct" export OPENAI_LLM_MODEL="gpt-4o" export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:${TEI_EMBEDDER_PORT}" -export LLM_MODEL_ID="meta-llama/Meta-Llama-3.1-8B-Instruct" export TGI_LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}" export NEO4J_PORT1=11631 export NEO4J_PORT2=11632 @@ -32,3 +34,4 @@ export MAX_TOTAL_TOKENS=8192 export DATA_PATH="/mnt/nvme2n1/hf_cache" export DATAPREP_PORT=11103 export RETRIEVER_PORT=11635 +export MEGA_SERVICE_PORT=8888 diff --git a/GraphRAG/tests/README.md b/GraphRAG/tests/README.md new file mode 100644 index 0000000000..daf4788df2 --- /dev/null +++ b/GraphRAG/tests/README.md @@ -0,0 +1,15 @@ +# GraphRAG E2E test scripts + +## Set the required environment variable + +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +``` + +## Run test + +On Intel Gaudi: + +```bash +bash test_compose_on_gaudi.sh +``` diff --git a/GraphRAG/tests/test_compose_on_gaudi.sh b/GraphRAG/tests/test_compose_on_gaudi.sh index 9d65aa5802..04fc3d8c9a 100755 --- a/GraphRAG/tests/test_compose_on_gaudi.sh +++ b/GraphRAG/tests/test_compose_on_gaudi.sh @@ -41,31 +41,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/hpu/gaudi - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export HF_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - - export TEI_EMBEDDER_PORT=11633 - export LLM_ENDPOINT_PORT=11634 - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export OPENAI_EMBEDDING_MODEL="text-embedding-3-small" - export LLM_MODEL_ID="meta-llama/Meta-Llama-3.1-8B-Instruct" - export OPENAI_LLM_MODEL="gpt-4o" - export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:${TEI_EMBEDDER_PORT}" - export LLM_MODEL_ID="meta-llama/Meta-Llama-3.1-8B-Instruct" - export TGI_LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}" - export NEO4J_PORT1=11631 - export NEO4J_PORT2=11632 - export NEO4J_URI="bolt://${host_ip}:${NEO4J_PORT2}" - export NEO4J_URL="bolt://${host_ip}:${NEO4J_PORT2}" - export NEO4J_USERNAME="neo4j" - export NEO4J_PASSWORD="neo4jtest" - export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:5000/v1/dataprep/ingest" - export LOGFLAG=True - export MAX_INPUT_TOKENS=4096 - export MAX_TOTAL_TOKENS=8192 - export DATAPREP_PORT=11103 - export RETRIEVER_PORT=11635 - export MEGA_SERVICE_PORT=8888 + source set_env.sh unset OPENAI_API_KEY # Start Docker Containers From 8eac02e58bfbcda79b3848f5758801e93da37ea4 Mon Sep 17 00:00:00 2001 From: "chen, suyue" Date: Wed, 14 May 2025 17:12:09 +0800 Subject: [PATCH 049/217] [CICD enhance] DBQnA run CI with latest base image, group logs in GHA outputs. (#1931) Signed-off-by: chensuyue --- DBQnA/docker_image_build/build.yaml | 6 +++--- DBQnA/tests/test_compose_on_rocm.sh | 32 ++++++++++++++++++++++++----- DBQnA/tests/test_compose_on_xeon.sh | 23 +++++++++++++++++---- 3 files changed, 49 insertions(+), 12 deletions(-) diff --git a/DBQnA/docker_image_build/build.yaml b/DBQnA/docker_image_build/build.yaml index 4c4cc9e3f4..11d7f518b6 100644 --- a/DBQnA/docker_image_build/build.yaml +++ b/DBQnA/docker_image_build/build.yaml @@ -7,6 +7,8 @@ services: context: GenAIComps dockerfile: comps/text2sql/src/Dockerfile args: + IMAGE_REPO: ${REGISTRY:-opea} + BASE_TAG: ${TAG:-latest} http_proxy: ${http_proxy} https_proxy: ${https_proxy} no_proxy: ${no_proxy} @@ -16,8 +18,6 @@ services: context: ../ui dockerfile: ./docker/Dockerfile.react args: - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} - no_proxy: ${no_proxy} texttosql_url: ${build_texttosql_url} + extends: text2sql image: ${REGISTRY:-opea}/text2sql-react-ui:${TAG:-latest} diff --git a/DBQnA/tests/test_compose_on_rocm.sh b/DBQnA/tests/test_compose_on_rocm.sh index df83d4fbec..e2dc0b81d6 100644 --- a/DBQnA/tests/test_compose_on_rocm.sh +++ b/DBQnA/tests/test_compose_on_rocm.sh @@ -4,6 +4,13 @@ set -xe +IMAGE_REPO=${IMAGE_REPO:-"opea"} +IMAGE_TAG=${IMAGE_TAG:-"latest"} +echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" +echo "TAG=IMAGE_TAG=${IMAGE_TAG}" +export REGISTRY=${IMAGE_REPO} +export TAG=${IMAGE_TAG} + WORKPATH=$(dirname "$PWD") LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') @@ -25,7 +32,7 @@ function build_docker_images() { docker images && sleep 1s } -function start_service() { +function start_services() { cd "$WORKPATH"/docker_compose/amd/gpu/rocm # Start Docker Containers docker compose up -d > "${LOG_PATH}"/start_services_with_compose.log @@ -95,16 +102,31 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + + echo "::group::build_docker_images" + if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" + start_services + echo "::endgroup::" - build_docker_images - start_service - sleep 10s + echo "::group::validate_microservice" validate_microservice + echo "::endgroup::" + + echo "::group::validate_frontend" validate_frontend + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } diff --git a/DBQnA/tests/test_compose_on_xeon.sh b/DBQnA/tests/test_compose_on_xeon.sh index 751d3ac24b..c410cc48f8 100755 --- a/DBQnA/tests/test_compose_on_xeon.sh +++ b/DBQnA/tests/test_compose_on_xeon.sh @@ -27,7 +27,7 @@ function build_docker_images() { docker images && sleep 1s } -function start_service() { +function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon source ./set_env.sh @@ -101,16 +101,31 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" - build_docker_images - start_service + echo "::group::build_docker_images" + if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + echo "::group::start_services" + start_services + echo "::endgroup::" + + echo "::group::validate_microservice" validate_microservice + echo "::endgroup::" + + echo "::group::validate_frontend" validate_frontend + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } From 410df80925bf82b6c36233a96479f5139303c97c Mon Sep 17 00:00:00 2001 From: "chen, suyue" Date: Thu, 15 May 2025 11:22:49 +0800 Subject: [PATCH 050/217] [CICD enhance] AvatarChatbot run CI with latest base image, group logs in GHA outputs. (#1930) Signed-off-by: chensuyue --- AvatarChatbot/Dockerfile | 3 +- AvatarChatbot/docker_image_build/build.yaml | 2 ++ AvatarChatbot/tests/test_compose_on_gaudi.sh | 36 +++++++++++--------- AvatarChatbot/tests/test_compose_on_rocm.sh | 31 ++++++++++------- AvatarChatbot/tests/test_compose_on_xeon.sh | 34 ++++++++++-------- 5 files changed, 63 insertions(+), 43 deletions(-) diff --git a/AvatarChatbot/Dockerfile b/AvatarChatbot/Dockerfile index 86e5a9bc6c..a9ec9a2cb7 100644 --- a/AvatarChatbot/Dockerfile +++ b/AvatarChatbot/Dockerfile @@ -1,8 +1,9 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 +ARG IMAGE_REPO=opea ARG BASE_TAG=latest -FROM opea/comps-base:$BASE_TAG +FROM $IMAGE_REPO/comps-base:$BASE_TAG COPY ./avatarchatbot.py $HOME/avatarchatbot.py diff --git a/AvatarChatbot/docker_image_build/build.yaml b/AvatarChatbot/docker_image_build/build.yaml index 23d102a508..9d99a52a9d 100644 --- a/AvatarChatbot/docker_image_build/build.yaml +++ b/AvatarChatbot/docker_image_build/build.yaml @@ -5,6 +5,8 @@ services: avatarchatbot: build: args: + IMAGE_REPO: ${REGISTRY:-opea} + BASE_TAG: ${TAG:-latest} http_proxy: ${http_proxy} https_proxy: ${https_proxy} no_proxy: ${no_proxy} diff --git a/AvatarChatbot/tests/test_compose_on_gaudi.sh b/AvatarChatbot/tests/test_compose_on_gaudi.sh index 6c167c4467..faf156907d 100755 --- a/AvatarChatbot/tests/test_compose_on_gaudi.sh +++ b/AvatarChatbot/tests/test_compose_on_gaudi.sh @@ -24,19 +24,13 @@ ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { opea_branch=${opea_branch:-"main"} - # If the opea_branch isn't main, replace the git clone branch in Dockerfile. - if [[ "${opea_branch}" != "main" ]]; then - cd $WORKPATH - OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git" - NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git" - find . -type f -name "Dockerfile*" | while read -r file; do - echo "Processing file: $file" - sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file" - done - fi cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="avatarchatbot whisper-gaudi speecht5-gaudi wav2lip-gaudi animation" @@ -128,19 +122,29 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker - echo y | docker builder prune --all - echo y | docker image prune + echo "::endgroup::" + docker builder prune --all -f + docker image prune -f + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services - # validate_microservices + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice - # validate_frontend + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker builder prune --all - echo y | docker image prune + echo "::endgroup::" + docker builder prune --all -f + docker image prune -f } diff --git a/AvatarChatbot/tests/test_compose_on_rocm.sh b/AvatarChatbot/tests/test_compose_on_rocm.sh index f0069ef913..514921f6e2 100644 --- a/AvatarChatbot/tests/test_compose_on_rocm.sh +++ b/AvatarChatbot/tests/test_compose_on_rocm.sh @@ -25,6 +25,10 @@ ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { cd $WORKPATH/docker_image_build git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../ + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="avatarchatbot whisper asr speecht5 tts wav2lip animation" @@ -138,11 +142,6 @@ function validate_megaservice() { } -#function validate_frontend() { - -#} - - function stop_docker() { cd $WORKPATH/docker_compose/amd/gpu/rocm docker compose down && docker compose rm -f @@ -151,19 +150,27 @@ function stop_docker() { function main() { - echo $OPENAI_API_KEY - echo $OPENAI_KEY - + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services - # validate_microservices - sleep 30 + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice - # validate_frontend + echo "::endgroup::" + + echo "::group::stop_docker" stop_docker + echo "::endgroup::" - echo y | docker system prune + docker system prune -f } diff --git a/AvatarChatbot/tests/test_compose_on_xeon.sh b/AvatarChatbot/tests/test_compose_on_xeon.sh index ed7bc15699..8e9a04535b 100755 --- a/AvatarChatbot/tests/test_compose_on_xeon.sh +++ b/AvatarChatbot/tests/test_compose_on_xeon.sh @@ -24,19 +24,13 @@ ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { opea_branch=${opea_branch:-"main"} - # If the opea_branch isn't main, replace the git clone branch in Dockerfile. - if [[ "${opea_branch}" != "main" ]]; then - cd $WORKPATH - OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git" - NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git" - find . -type f -name "Dockerfile*" | while read -r file; do - echo "Processing file: $file" - sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file" - done - fi cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="avatarchatbot whisper speecht5 wav2lip animation" @@ -127,16 +121,28 @@ function stop_docker() { function main() { + + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services - # validate_microservices + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice - # validate_frontend + echo "::endgroup::" + + echo "::group::stop_docker" stop_docker + echo "::endgroup::" - echo y | docker builder prune --all - echo y | docker image prune + docker system prune -f } From 3fb59a976988c63e77ce6a27d2239a39f6e1acae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20De=20Le=C3=B3n?= <111013930+daniel-de-leon-user293@users.noreply.github.com> Date: Thu, 15 May 2025 11:58:58 -0700 Subject: [PATCH 051/217] Update DocSum README and environment configuration (#1917) Signed-off-by: Daniel Deleon Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Abolfazl Shahbazi <12436063+ashahba@users.noreply.github.com> Co-authored-by: chen, suyue Co-authored-by: Eero Tamminen Co-authored-by: Zhenzhong Xu --- .../docker_compose/intel/cpu/xeon/README.md | 32 +++++++---------- .../docker_compose/intel/hpu/gaudi/README.md | 34 ++++++++----------- .../intel/hpu/gaudi/compose.yaml | 1 + DocSum/docker_compose/set_env.sh | 7 +++- 4 files changed, 34 insertions(+), 40 deletions(-) diff --git a/DocSum/docker_compose/intel/cpu/xeon/README.md b/DocSum/docker_compose/intel/cpu/xeon/README.md index 06d3e4378d..b06d6007e8 100644 --- a/DocSum/docker_compose/intel/cpu/xeon/README.md +++ b/DocSum/docker_compose/intel/cpu/xeon/README.md @@ -21,35 +21,29 @@ This section describes how to quickly deploy and test the DocSum service manuall 6. [Test the Pipeline](#test-the-pipeline) 7. [Cleanup the Deployment](#cleanup-the-deployment) -### Access the Code +### Access the Code and Set Up Environment Clone the GenAIExample repository and access the ChatQnA Intel Xeon platform Docker Compose files and supporting scripts: -``` +```bash git clone https://github.com/opea-project/GenAIExamples.git -cd GenAIExamples/DocSum/docker_compose/intel/cpu/xeon/ +cd GenAIExamples/DocSum/docker_compose +source set_env.sh +cd intel/cpu/xeon/ ``` -Checkout a released version, such as v1.2: +NOTE: by default vLLM does "warmup" at start, to optimize its performance for the specified model and the underlying platform, which can take long time. For development (and e.g. autoscaling) it can be skipped with `export VLLM_SKIP_WARMUP=true`. -``` -git checkout v1.2 +Checkout a released version, such as v1.3: + +```bash +git checkout v1.3 ``` ### Generate a HuggingFace Access Token Some HuggingFace resources, such as some models, are only accessible if you have an access token. If you do not already have a HuggingFace access token, you can create one by first creating an account by following the steps provided at [HuggingFace](https://huggingface.co/) and then generating a [user access token](https://huggingface.co/docs/transformers.js/en/guides/private#step-1-generating-a-user-access-token). -### Configure the Deployment Environment - -To set up environment variables for deploying DocSum services, source the _set_env.sh_ script in this directory: - -``` -source ./set_env.sh -``` - -The _set_env.sh_ script will prompt for required and optional environment variables used to configure the DocSum services. If a value is not entered, the script will use a default value for the same. It will also generate a _.env_ file defining the desired configuration. Consult the section on [DocSum Service configuration](#docsum-service-configuration) for information on how service specific configuration parameters affect deployments. - ### Deploy the Services Using Docker Compose To deploy the DocSum services, execute the `docker compose up` command with the appropriate arguments. For a default deployment, execute: @@ -78,13 +72,13 @@ Please refer to the table below to build different microservices from source: After running docker compose, check if all the containers launched via docker compose have started: -``` +```bash docker ps -a ``` For the default deployment, the following 5 containers should have started: -``` +```bash CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 748f577b3c78 opea/whisper:latest "python whisper_s…" 5 minutes ago Up About a minute 0.0.0.0:7066->7066/tcp, :::7066->7066/tcp docsum-xeon-whisper-server 4eq8b7034fd9 opea/docsum-gradio-ui:latest "docker-entrypoint.s…" 5 minutes ago Up About a minute 0.0.0.0:5173->5173/tcp, :::5173->5173/tcp docsum-xeon-ui-server @@ -109,7 +103,7 @@ curl -X POST http://${host_ip}:8888/v1/docsum \ To stop the containers associated with the deployment, execute the following command: -``` +```bash docker compose -f compose.yaml down ``` diff --git a/DocSum/docker_compose/intel/hpu/gaudi/README.md b/DocSum/docker_compose/intel/hpu/gaudi/README.md index 5cf9e77477..2edd6934fe 100644 --- a/DocSum/docker_compose/intel/hpu/gaudi/README.md +++ b/DocSum/docker_compose/intel/hpu/gaudi/README.md @@ -23,35 +23,29 @@ This section describes how to quickly deploy and test the DocSum service manuall 6. [Test the Pipeline](#test-the-pipeline) 7. [Cleanup the Deployment](#cleanup-the-deployment) -### Access the Code +### Access the Code and Set Up Environment -Clone the GenAIExample repository and access the ChatQnA Intel® Gaudi® platform Docker Compose files and supporting scripts: +Clone the GenAIExample repository and access the DocSum Intel® Gaudi® platform Docker Compose files and supporting scripts: -``` +```bash git clone https://github.com/opea-project/GenAIExamples.git -cd GenAIExamples/DocSum/docker_compose/intel/hpu/gaudi/ +cd GenAIExamples/DocSum/docker_compose +source set_env.sh +cd intel/hpu/gaudi/ ``` -Checkout a released version, such as v1.2: +NOTE: by default vLLM does "warmup" at start, to optimize its performance for the specified model and the underlying platform, which can take long time. For development (and e.g. autoscaling) it can be skipped with `export VLLM_SKIP_WARMUP=true`. -``` -git checkout v1.2 +Checkout a released version, such as v1.3: + +```bash +git checkout v1.3 ``` ### Generate a HuggingFace Access Token Some HuggingFace resources, such as some models, are only accessible if you have an access token. If you do not already have a HuggingFace access token, you can create one by first creating an account by following the steps provided at [HuggingFace](https://huggingface.co/) and then generating a [user access token](https://huggingface.co/docs/transformers.js/en/guides/private#step-1-generating-a-user-access-token). -### Configure the Deployment Environment - -To set up environment variables for deploying DocSum services, source the _set_env.sh_ script in this directory: - -``` -source ./set_env.sh -``` - -The _set_env.sh_ script will prompt for required and optional environment variables used to configure the DocSum services. If a value is not entered, the script will use a default value for the same. It will also generate a _.env_ file defining the desired configuration. Consult the section on [DocSum Service configuration](#docsum-service-configuration) for information on how service specific configuration parameters affect deployments. - ### Deploy the Services Using Docker Compose To deploy the DocSum services, execute the `docker compose up` command with the appropriate arguments. For a default deployment, execute: @@ -80,13 +74,13 @@ Please refer to the table below to build different microservices from source: After running docker compose, check if all the containers launched via docker compose have started: -``` +```bash docker ps -a ``` For the default deployment, the following 5 containers should have started: -``` +```bash CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 748f577b3c78 opea/whisper:latest "python whisper_s…" 5 minutes ago Up About a minute 0.0.0.0:7066->7066/tcp, :::7066->7066/tcp docsum-gaudi-whisper-server 4eq8b7034fd9 opea/docsum-gradio-ui:latest "docker-entrypoint.s…" 5 minutes ago Up About a minute 0.0.0.0:5173->5173/tcp, :::5173->5173/tcp docsum-gaudi-ui-server @@ -111,7 +105,7 @@ curl -X POST http://${host_ip}:8888/v1/docsum \ To stop the containers associated with the deployment, execute the following command: -``` +```bash docker compose -f compose.yaml down ``` diff --git a/DocSum/docker_compose/intel/hpu/gaudi/compose.yaml b/DocSum/docker_compose/intel/hpu/gaudi/compose.yaml index 58f427c549..f44d789a93 100644 --- a/DocSum/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/DocSum/docker_compose/intel/hpu/gaudi/compose.yaml @@ -18,6 +18,7 @@ services: OMPI_MCA_btl_vader_single_copy_mechanism: none LLM_MODEL_ID: ${LLM_MODEL_ID} NUM_CARDS: ${NUM_CARDS} + VLLM_SKIP_WARMUP: ${VLLM_SKIP_WARMUP:-false} VLLM_TORCH_PROFILER_DIR: "/mnt" healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:80/health || exit 1"] diff --git a/DocSum/docker_compose/set_env.sh b/DocSum/docker_compose/set_env.sh index 6f5351479e..b31ceb5784 100644 --- a/DocSum/docker_compose/set_env.sh +++ b/DocSum/docker_compose/set_env.sh @@ -6,10 +6,10 @@ pushd "../../" > /dev/null source .set_env.sh popd > /dev/null +export host_ip=$(hostname -I | awk '{print $1}') # Example: host_ip="192.168.1.1" export no_proxy="${no_proxy},${host_ip}" # Example: no_proxy="localhost, 127.0.0.1, 192.168.1.1" export http_proxy=$http_proxy export https_proxy=$https_proxy -export host_ip=$(hostname -I | awk '{print $1}') # Example: host_ip="192.168.1.1" export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export LLM_ENDPOINT_PORT=8008 @@ -29,3 +29,8 @@ export BACKEND_SERVICE_PORT=8888 export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:${BACKEND_SERVICE_PORT}/v1/docsum" export LOGFLAG=True + +export NUM_CARDS=1 +export BLOCK_SIZE=128 +export MAX_NUM_SEQS=256 +export MAX_SEQ_LEN_TO_CAPTURE=2048 From bb9ec6e5d2a810ac054e7dbfdbb5ad9601ba50f4 Mon Sep 17 00:00:00 2001 From: Zhu Yongbo Date: Fri, 16 May 2025 10:06:46 +0800 Subject: [PATCH 052/217] fix EdgeCraftRAG UI image build bug (#1964) Signed-off-by: Yongbozzz --- EdgeCraftRAG/ui/vue/components.d.ts | 3 --- EdgeCraftRAG/ui/vue/package.json | 5 ++++- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/EdgeCraftRAG/ui/vue/components.d.ts b/EdgeCraftRAG/ui/vue/components.d.ts index 64ca06335b..05af641f82 100644 --- a/EdgeCraftRAG/ui/vue/components.d.ts +++ b/EdgeCraftRAG/ui/vue/components.d.ts @@ -18,12 +18,10 @@ declare module 'vue' { AConfigProvider: typeof import('ant-design-vue/es')['ConfigProvider'] ADescriptions: typeof import('ant-design-vue/es')['Descriptions'] ADescriptionsItem: typeof import('ant-design-vue/es')['DescriptionsItem'] - ADivider: typeof import('ant-design-vue/es')['Divider'] ADrawer: typeof import('ant-design-vue/es')['Drawer'] AEmpty: typeof import('ant-design-vue/es')['Empty'] AForm: typeof import('ant-design-vue/es')['Form'] AFormItem: typeof import('ant-design-vue/es')['FormItem'] - AImage: typeof import('ant-design-vue/es')['Image'] AInput: typeof import('ant-design-vue/es')['Input'] AInputNumber: typeof import('ant-design-vue/es')['InputNumber'] ALayout: typeof import('ant-design-vue/es')['Layout'] @@ -31,7 +29,6 @@ declare module 'vue' { ALayoutHeader: typeof import('ant-design-vue/es')['LayoutHeader'] AModal: typeof import('ant-design-vue/es')['Modal'] APagination: typeof import('ant-design-vue/es')['Pagination'] - APopover: typeof import('ant-design-vue/es')['Popover'] ARadio: typeof import('ant-design-vue/es')['Radio'] ARadioGroup: typeof import('ant-design-vue/es')['RadioGroup'] ARow: typeof import('ant-design-vue/es')['Row'] diff --git a/EdgeCraftRAG/ui/vue/package.json b/EdgeCraftRAG/ui/vue/package.json index 0c3e32bc11..8a215ec138 100644 --- a/EdgeCraftRAG/ui/vue/package.json +++ b/EdgeCraftRAG/ui/vue/package.json @@ -12,13 +12,16 @@ "@vueuse/i18n": "^4.0.0-beta.12", "ant-design-vue": "^4.0.0-rc.6", "axios": "^1.7.9", + "clipboard": "^2.0.11", + "dayjs": "^1.11.13", "echarts": "^5.5.1", "event-source-polyfill": "^1.0.31", + "highlight.js": "^11.11.1", "http": "^0.0.1-security", "js-cookie": "^3.0.5", "lodash": "^4.17.21", "marked": "^15.0.6", - "pinia": "^2.3.0", + "pinia": "^3.0.2", "pinia-plugin-persistedstate": "^4.2.0", "qs": "^6.13.1", "socket.io-client": "^4.8.1", From 7f55b5a10009a34b18f6c8f2eb80a6877c704bc0 Mon Sep 17 00:00:00 2001 From: "chen, suyue" Date: Fri, 16 May 2025 14:44:17 +0800 Subject: [PATCH 053/217] Specify image build list for VisualQnA (#1967) Signed-off-by: chensuyue --- .github/workflows/_run-docker-compose.yml | 10 +++++ ChatQnA/tests/test_compose_mariadb_on_xeon.sh | 2 +- VisualQnA/Dockerfile | 3 +- VisualQnA/docker_image_build/build.yaml | 15 +++++++ VisualQnA/tests/test_compose_on_gaudi.sh | 37 ++++++++++++----- VisualQnA/tests/test_compose_on_rocm.sh | 37 ++++++++++------- VisualQnA/tests/test_compose_on_xeon.sh | 40 ++++++++++++++----- VisualQnA/tests/test_compose_tgi_on_gaudi.sh | 39 ++++++++++-------- VisualQnA/tests/test_compose_tgi_on_xeon.sh | 39 ++++++++++-------- VisualQnA/tests/test_compose_vllm_on_rocm.sh | 36 ++++++++++------- 10 files changed, 176 insertions(+), 82 deletions(-) diff --git a/.github/workflows/_run-docker-compose.yml b/.github/workflows/_run-docker-compose.yml index 24879a9759..fa9b560c09 100644 --- a/.github/workflows/_run-docker-compose.yml +++ b/.github/workflows/_run-docker-compose.yml @@ -204,6 +204,10 @@ jobs: if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && sleep 1s; fi echo "Cleaning up images ..." + df -h + sleep 1 + docker system df + sleep 1 if [[ "${{ inputs.hardware }}" == "xeon"* ]]; then docker system prune -a -f else @@ -213,7 +217,13 @@ jobs: docker images --filter reference="opea/comps-base" -q | xargs -r docker rmi && sleep 1s docker system prune -f fi + sleep 5 docker images + sleep 1 + df -h + sleep 1 + docker system df + sleep 1 - name: Publish pipeline artifact if: ${{ !cancelled() }} diff --git a/ChatQnA/tests/test_compose_mariadb_on_xeon.sh b/ChatQnA/tests/test_compose_mariadb_on_xeon.sh index 45f5f99e47..412e32626a 100644 --- a/ChatQnA/tests/test_compose_mariadb_on_xeon.sh +++ b/ChatQnA/tests/test_compose_mariadb_on_xeon.sh @@ -140,7 +140,7 @@ function validate_megaservice() { function stop_docker() { cd $WORKPATH/docker_compose/intel/cpu/xeon - docker compose down + docker compose -f compose_mariadb.yaml down } function main() { diff --git a/VisualQnA/Dockerfile b/VisualQnA/Dockerfile index 95936d9c03..988215be2e 100644 --- a/VisualQnA/Dockerfile +++ b/VisualQnA/Dockerfile @@ -1,8 +1,9 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 +ARG IMAGE_REPO=opea ARG BASE_TAG=latest -FROM opea/comps-base:$BASE_TAG +FROM $IMAGE_REPO/comps-base:$BASE_TAG COPY ./visualqna.py $HOME/visualqna.py diff --git a/VisualQnA/docker_image_build/build.yaml b/VisualQnA/docker_image_build/build.yaml index 11fab7106f..e8b1240040 100644 --- a/VisualQnA/docker_image_build/build.yaml +++ b/VisualQnA/docker_image_build/build.yaml @@ -5,6 +5,8 @@ services: visualqna: build: args: + IMAGE_REPO: ${REGISTRY:-opea} + BASE_TAG: ${TAG:-latest} http_proxy: ${http_proxy} https_proxy: ${https_proxy} no_proxy: ${no_proxy} @@ -33,4 +35,17 @@ services: build: context: GenAIComps dockerfile: comps/third_parties/vllm/src/Dockerfile.amd_gpu + extends: visualqna image: ${REGISTRY:-opea}/vllm-rocm:${TAG:-latest} + vllm: + build: + context: vllm + dockerfile: docker/Dockerfile.cpu + extends: visualqna + image: ${REGISTRY:-opea}/vllm:${TAG:-latest} + vllm-gaudi: + build: + context: vllm-fork + dockerfile: Dockerfile.hpu + extends: visualqna + image: ${REGISTRY:-opea}/vllm-gaudi:${TAG:-latest} diff --git a/VisualQnA/tests/test_compose_on_gaudi.sh b/VisualQnA/tests/test_compose_on_gaudi.sh index 3fbc8e0adc..237ad3b7dd 100644 --- a/VisualQnA/tests/test_compose_on_gaudi.sh +++ b/VisualQnA/tests/test_compose_on_gaudi.sh @@ -18,15 +18,20 @@ LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { + opea_branch=${opea_branch:-"main"} cd $WORKPATH/docker_image_build - git clone --depth 1 --branch main https://github.com/opea-project/GenAIComps.git - docker compose -f build.yaml build --no-cache > ${LOG_PATH}/docker_image_build.log + git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s - git clone https://github.com/HabanaAI/vllm-fork.git - cd ./vllm-fork/ - docker build -f Dockerfile.hpu -t opea/vllm-gaudi:${TAG} --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy - cd .. - rm -rf vllm-fork + git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork + VLLM_FORK_VER=v0.6.6.post1+Gaudi-1.20.0 + git checkout ${VLLM_FORK_VER} &> /dev/null && cd ../ + + service_list="visualqna visualqna-ui lvm nginx vllm-gaudi" + docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker images && sleep 1s } @@ -186,17 +191,31 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services + echo "::endgroup::" + echo "::group::validate_microservices" validate_microservices + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice - #validate_frontend + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } diff --git a/VisualQnA/tests/test_compose_on_rocm.sh b/VisualQnA/tests/test_compose_on_rocm.sh index dcdf4e4719..ea53c0ab31 100644 --- a/VisualQnA/tests/test_compose_on_rocm.sh +++ b/VisualQnA/tests/test_compose_on_rocm.sh @@ -38,24 +38,17 @@ export MODEL_CACHE=${model_cache:-"/var/opea/multimodalqna-service/data"} function build_docker_images() { opea_branch=${opea_branch:-"main"} - # If the opea_branch isn't main, replace the git clone branch in Dockerfile. - if [[ "${opea_branch}" != "main" ]]; then - cd $WORKPATH - OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git" - NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git" - find . -type f -name "Dockerfile*" | while read -r file; do - echo "Processing file: $file" - sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file" - done - fi - cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s echo "Build all the images with --no-cache, check docker_image_build.log for details..." - docker compose -f build.yaml build --no-cache > ${LOG_PATH}/docker_image_build.log + service_list="visualqna visualqna-ui lvm nginx" + docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/text-generation-inference:2.4.1-rocm docker images && sleep 1s } @@ -209,17 +202,31 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services + echo "::endgroup::" + echo "::group::validate_microservices" validate_microservices + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice - #validate_frontend + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } diff --git a/VisualQnA/tests/test_compose_on_xeon.sh b/VisualQnA/tests/test_compose_on_xeon.sh index 0e645c324b..bd5a9c5aa2 100644 --- a/VisualQnA/tests/test_compose_on_xeon.sh +++ b/VisualQnA/tests/test_compose_on_xeon.sh @@ -17,22 +17,28 @@ LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { + opea_branch=${opea_branch:-"main"} cd $WORKPATH/docker_image_build - git clone --depth 1 --branch main https://github.com/opea-project/GenAIComps.git - docker compose -f build.yaml build --no-cache > ${LOG_PATH}/docker_image_build.log - - docker pull opea/vllm:latest - docker tag opea/vllm:latest opea/vllm:${TAG} + git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s + git clone https://github.com/vllm-project/vllm.git && cd vllm + VLLM_VER="v0.8.3" + echo "Check out vLLM tag ${VLLM_VER}" + git checkout ${VLLM_VER} &> /dev/null + cd ../ + + service_list="visualqna visualqna-ui lvm nginx vllm" + docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker images && sleep 1s } function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon/ - source ./set_env.sh - sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env - # Start Docker Containers docker compose up -d > ${LOG_PATH}/start_services_with_compose.log @@ -179,17 +185,31 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services + echo "::endgroup::" + echo "::group::validate_microservices" validate_microservices + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice - #validate_frontend + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } diff --git a/VisualQnA/tests/test_compose_tgi_on_gaudi.sh b/VisualQnA/tests/test_compose_tgi_on_gaudi.sh index 913d6ed527..495f764814 100644 --- a/VisualQnA/tests/test_compose_tgi_on_gaudi.sh +++ b/VisualQnA/tests/test_compose_tgi_on_gaudi.sh @@ -17,24 +17,17 @@ ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { opea_branch=${opea_branch:-"main"} - # If the opea_branch isn't main, replace the git clone branch in Dockerfile. - if [[ "${opea_branch}" != "main" ]]; then - cd $WORKPATH - OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git" - NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git" - find . -type f -name "Dockerfile*" | while read -r file; do - echo "Processing file: $file" - sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file" - done - fi - cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s echo "Build all the images with --no-cache, check docker_image_build.log for details..." - docker compose -f build.yaml build --no-cache > ${LOG_PATH}/docker_image_build.log + service_list="visualqna visualqna-ui lvm nginx" + docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/tgi-gaudi:2.0.6 docker images && sleep 1s } @@ -200,22 +193,36 @@ function validate_frontend() { function stop_docker() { cd $WORKPATH/docker_compose/intel/hpu/gaudi - docker compose stop && docker compose rm -f + docker compose -f compose_tgi.yaml down } function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services + echo "::endgroup::" + echo "::group::validate_microservices" validate_microservices + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice - # validate_frontend + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } diff --git a/VisualQnA/tests/test_compose_tgi_on_xeon.sh b/VisualQnA/tests/test_compose_tgi_on_xeon.sh index d6311719d0..a8cc02a23e 100644 --- a/VisualQnA/tests/test_compose_tgi_on_xeon.sh +++ b/VisualQnA/tests/test_compose_tgi_on_xeon.sh @@ -17,24 +17,17 @@ ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { opea_branch=${opea_branch:-"main"} - # If the opea_branch isn't main, replace the git clone branch in Dockerfile. - if [[ "${opea_branch}" != "main" ]]; then - cd $WORKPATH - OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git" - NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git" - find . -type f -name "Dockerfile*" | while read -r file; do - echo "Processing file: $file" - sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file" - done - fi - cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s echo "Build all the images with --no-cache, check docker_image_build.log for details..." - docker compose -f build.yaml build --no-cache > ${LOG_PATH}/docker_image_build.log + service_list="visualqna visualqna-ui lvm nginx" + docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/text-generation-inference:2.4.0-intel-cpu docker images && sleep 1s } @@ -200,22 +193,36 @@ function validate_frontend() { function stop_docker() { cd $WORKPATH/docker_compose/intel/cpu/xeon/ - docker compose stop && docker compose rm -f + docker compose -f compose_tgi.yaml down } function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services + echo "::endgroup::" + echo "::group::validate_microservices" validate_microservices + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice - # validate_frontend + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } diff --git a/VisualQnA/tests/test_compose_vllm_on_rocm.sh b/VisualQnA/tests/test_compose_vllm_on_rocm.sh index abf014631c..606bb9e254 100644 --- a/VisualQnA/tests/test_compose_vllm_on_rocm.sh +++ b/VisualQnA/tests/test_compose_vllm_on_rocm.sh @@ -37,22 +37,16 @@ export MODEL_CACHE=${model_cache:-"/var/opea/multimodalqna-service/data"} function build_docker_images() { opea_branch=${opea_branch:-"main"} - # If the opea_branch isn't main, replace the git clone branch in Dockerfile. - if [[ "${opea_branch}" != "main" ]]; then - cd $WORKPATH - OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git" - NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git" - find . -type f -name "Dockerfile*" | while read -r file; do - echo "Processing file: $file" - sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file" - done - fi - cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s echo "Build all the images with --no-cache, check docker_image_build.log for details..." - docker compose -f build.yaml build --no-cache > ${LOG_PATH}/docker_image_build.log + service_list="visualqna visualqna-ui lvm nginx vllm-rocm" + docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker images && sleep 1s } @@ -207,17 +201,31 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services + echo "::endgroup::" + echo "::group::validate_microservices" validate_microservices + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice - #validate_frontend + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } From 11b04b38db1a28fbb5adcdb0f6d92c76fafa0f10 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Fri, 16 May 2025 15:09:07 +0800 Subject: [PATCH 054/217] Integrate SearchQnA set_env to ut scripts. (#1950) Integrate SearchQnA set_env to ut scripts. Add README.md for UT scripts. Signed-off-by: ZePan110 --- .../docker_compose/amd/gpu/rocm/set_env.sh | 8 ++--- .../amd/gpu/rocm/set_env_vllm.sh | 10 +++--- .../docker_compose/{ => intel}/set_env.sh | 6 ++-- SearchQnA/tests/README.md | 33 +++++++++++++++++++ SearchQnA/tests/test_compose_on_gaudi.sh | 27 ++------------- SearchQnA/tests/test_compose_on_rocm.sh | 31 +---------------- SearchQnA/tests/test_compose_on_xeon.sh | 27 ++------------- SearchQnA/tests/test_compose_vllm_on_rocm.sh | 32 ++---------------- 8 files changed, 55 insertions(+), 119 deletions(-) rename SearchQnA/docker_compose/{ => intel}/set_env.sh (82%) create mode 100644 SearchQnA/tests/README.md diff --git a/SearchQnA/docker_compose/amd/gpu/rocm/set_env.sh b/SearchQnA/docker_compose/amd/gpu/rocm/set_env.sh index faedeb3f54..3d84e01fcf 100644 --- a/SearchQnA/docker_compose/amd/gpu/rocm/set_env.sh +++ b/SearchQnA/docker_compose/amd/gpu/rocm/set_env.sh @@ -2,8 +2,8 @@ # Copyright (C) 2025 Advanced Micro Devices, Inc. -export HOST_IP='' -export EXTERNAL_HOST_IP='' +export HOST_IP=${ip_address} +export EXTERNAL_HOST_IP=${ip_address} export SEARCH_EMBEDDING_MODEL_ID='BAAI/bge-base-en-v1.5' export SEARCH_GOOGLE_API_KEY=${GOOGLE_API_KEY} @@ -12,9 +12,9 @@ export SEARCH_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export SEARCH_LLM_MODEL_ID='Intel/neural-chat-7b-v3-3' export SEARCH_RERANK_MODEL_ID='BAAI/bge-reranker-base' -export SEARCH_BACKEND_SERVICE_PORT=18142 +export SEARCH_BACKEND_SERVICE_PORT=3008 export SEARCH_EMBEDDING_SERVICE_PORT=3002 -export SEARCH_FRONTEND_SERVICE_PORT=18143 +export SEARCH_FRONTEND_SERVICE_PORT=5173 export SEARCH_LLM_SERVICE_PORT=3007 export SEARCH_RERANK_SERVICE_PORT=3005 export SEARCH_TEI_EMBEDDING_PORT=3001 diff --git a/SearchQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh b/SearchQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh index d59d242d38..a891cce2a0 100644 --- a/SearchQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh +++ b/SearchQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh @@ -2,8 +2,8 @@ # Copyright (C) 2025 Advanced Micro Devices, Inc. -export HOST_IP='' -export EXTERNAL_HOST_IP='' +export HOST_IP=${ip_address} +export EXTERNAL_HOST_IP=${ip_address} export SEARCH_EMBEDDING_MODEL_ID='BAAI/bge-base-en-v1.5' export SEARCH_GOOGLE_API_KEY=${GOOGLE_API_KEY} @@ -12,11 +12,11 @@ export SEARCH_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export SEARCH_LLM_MODEL_ID='Intel/neural-chat-7b-v3-3' export SEARCH_RERANK_MODEL_ID='BAAI/bge-reranker-base' -export MODEL_PATH="./data" +export MODEL_CACHE="./data" -export SEARCH_BACKEND_SERVICE_PORT=18142 +export SEARCH_BACKEND_SERVICE_PORT=3008 export SEARCH_EMBEDDING_SERVICE_PORT=3002 -export SEARCH_FRONTEND_SERVICE_PORT=18143 +export SEARCH_FRONTEND_SERVICE_PORT=5173 export SEARCH_LLM_SERVICE_PORT=3007 export SEARCH_RERANK_SERVICE_PORT=3005 export SEARCH_TEI_EMBEDDING_PORT=3001 diff --git a/SearchQnA/docker_compose/set_env.sh b/SearchQnA/docker_compose/intel/set_env.sh similarity index 82% rename from SearchQnA/docker_compose/set_env.sh rename to SearchQnA/docker_compose/intel/set_env.sh index 232dcf7281..45aaa7eb48 100644 --- a/SearchQnA/docker_compose/set_env.sh +++ b/SearchQnA/docker_compose/intel/set_env.sh @@ -2,11 +2,13 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -pushd "../../" > /dev/null +pushd "../../../" > /dev/null source .set_env.sh popd > /dev/null - +export GOOGLE_CSE_ID=$GOOGLE_CSE_ID +export GOOGLE_API_KEY=$GOOGLE_API_KEY +export HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN export EMBEDDING_MODEL_ID=BAAI/bge-base-en-v1.5 export TEI_EMBEDDING_ENDPOINT=http://${host_ip}:3001 export RERANK_MODEL_ID=BAAI/bge-reranker-base diff --git a/SearchQnA/tests/README.md b/SearchQnA/tests/README.md new file mode 100644 index 0000000000..4dd235fbb6 --- /dev/null +++ b/SearchQnA/tests/README.md @@ -0,0 +1,33 @@ +# SearchQnA E2E test scripts + +## Set the required environment variable + +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +``` + +## Run test + +On Intel Xeon with TGI: + +```bash +bash test_compose_on_xeon.sh +``` + +On Intel Gaudi with TGI: + +```bash +bash test_compose_on_gaudi.sh +``` + +On AMD ROCm with TGI: + +```bash +bash test_compose_on_rocm.sh +``` + +On AMD ROCm with vLLM: + +```bash +bash test_compose_vllm_on_rocm.sh +``` diff --git a/SearchQnA/tests/test_compose_on_gaudi.sh b/SearchQnA/tests/test_compose_on_gaudi.sh index b3e5286299..0e4952fd4b 100644 --- a/SearchQnA/tests/test_compose_on_gaudi.sh +++ b/SearchQnA/tests/test_compose_on_gaudi.sh @@ -43,34 +43,13 @@ function build_docker_images() { function start_services() { - cd $WORKPATH/docker_compose/intel/hpu/gaudi - export GOOGLE_CSE_ID=$GOOGLE_CSE_ID - export GOOGLE_API_KEY=$GOOGLE_API_KEY - export HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN - - export EMBEDDING_MODEL_ID=BAAI/bge-base-en-v1.5 - export TEI_EMBEDDING_ENDPOINT=http://$ip_address:3001 - export RERANK_MODEL_ID=BAAI/bge-reranker-base + cd $WORKPATH/docker_compose/intel/ export RERANK_TYPE="tei" - export TEI_RERANKING_ENDPOINT=http://$ip_address:3004 - - export TGI_LLM_ENDPOINT=http://$ip_address:3006 - export LLM_MODEL_ID=Intel/neural-chat-7b-v3-3 - - export MEGA_SERVICE_HOST_IP=${ip_address} - export EMBEDDING_SERVICE_HOST_IP=${ip_address} - export WEB_RETRIEVER_SERVICE_HOST_IP=${ip_address} - export RERANK_SERVICE_HOST_IP=${ip_address} - export LLM_SERVICE_HOST_IP=${ip_address} - - export EMBEDDING_SERVICE_PORT=3002 - export WEB_RETRIEVER_SERVICE_PORT=3003 - export RERANK_SERVICE_PORT=3005 - export LLM_SERVICE_PORT=3007 export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:3008/v1/searchqna" export host_ip=${ip_address} export LOGFLAG=true - + source ./set_env.sh + cd hpu/gaudi sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/SearchQnA/tests/test_compose_on_rocm.sh b/SearchQnA/tests/test_compose_on_rocm.sh index 4ab67a6619..a822ff1823 100644 --- a/SearchQnA/tests/test_compose_on_rocm.sh +++ b/SearchQnA/tests/test_compose_on_rocm.sh @@ -30,36 +30,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/amd/gpu/rocm/ - - export HOST_IP=${ip_address} - export EXTERNAL_HOST_IP=${ip_address} - - export SEARCH_EMBEDDING_MODEL_ID='BAAI/bge-base-en-v1.5' - export SEARCH_GOOGLE_API_KEY=${GOOGLE_API_KEY} - export SEARCH_GOOGLE_CSE_ID=${GOOGLE_CSE_ID} - export SEARCH_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export SEARCH_LLM_MODEL_ID='Intel/neural-chat-7b-v3-3' - export SEARCH_RERANK_MODEL_ID='BAAI/bge-reranker-base' - - export SEARCH_BACKEND_SERVICE_PORT=3008 - export SEARCH_EMBEDDING_SERVICE_PORT=3002 - export SEARCH_FRONTEND_SERVICE_PORT=5173 - export SEARCH_LLM_SERVICE_PORT=3007 - export SEARCH_RERANK_SERVICE_PORT=3005 - export SEARCH_TEI_EMBEDDING_PORT=3001 - export SEARCH_TEI_RERANKING_PORT=3004 - export SEARCH_TGI_SERVICE_PORT=3006 - export SEARCH_WEB_RETRIEVER_SERVICE_PORT=3003 - - export SEARCH_BACKEND_SERVICE_ENDPOINT=http://${EXTERNAL_HOST_IP}:${SEARCH_BACKEND_SERVICE_PORT}/v1/searchqna - export SEARCH_EMBEDDING_SERVICE_HOST_IP=${HOST_IP} - export SEARCH_LLM_SERVICE_HOST_IP=${HOST_IP} - export SEARCH_MEGA_SERVICE_HOST_IP=${HOST_IP} - export SEARCH_RERANK_SERVICE_HOST_IP=${HOST_IP} - export SEARCH_TEI_EMBEDDING_ENDPOINT=http://${HOST_IP}:${SEARCH_TEI_EMBEDDING_PORT} - export SEARCH_TEI_RERANKING_ENDPOINT=http://${HOST_IP}:${SEARCH_TEI_RERANKING_PORT} - export SEARCH_TGI_LLM_ENDPOINT=http://${HOST_IP}:${SEARCH_TGI_SERVICE_PORT} - export SEARCH_WEB_RETRIEVER_SERVICE_HOST_IP=${HOST_IP} + source ./set_env.sh sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/SearchQnA/tests/test_compose_on_xeon.sh b/SearchQnA/tests/test_compose_on_xeon.sh index df5214b91e..408048060c 100644 --- a/SearchQnA/tests/test_compose_on_xeon.sh +++ b/SearchQnA/tests/test_compose_on_xeon.sh @@ -41,34 +41,13 @@ function build_docker_images() { } function start_services() { - cd $WORKPATH/docker_compose/intel/cpu/xeon/ - export GOOGLE_CSE_ID=$GOOGLE_CSE_ID - export GOOGLE_API_KEY=$GOOGLE_API_KEY - export HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN - - export EMBEDDING_MODEL_ID=BAAI/bge-base-en-v1.5 - export TEI_EMBEDDING_ENDPOINT=http://$ip_address:3001 - export RERANK_MODEL_ID=BAAI/bge-reranker-base + cd $WORKPATH/docker_compose/intel/ export RERANK_TYPE="tei" - export TEI_RERANKING_ENDPOINT=http://$ip_address:3004 - - export TGI_LLM_ENDPOINT=http://$ip_address:3006 - export LLM_MODEL_ID=Intel/neural-chat-7b-v3-3 - - export MEGA_SERVICE_HOST_IP=${ip_address} - export EMBEDDING_SERVICE_HOST_IP=${ip_address} - export WEB_RETRIEVER_SERVICE_HOST_IP=${ip_address} - export RERANK_SERVICE_HOST_IP=${ip_address} - export LLM_SERVICE_HOST_IP=${ip_address} - - export EMBEDDING_SERVICE_PORT=3002 - export WEB_RETRIEVER_SERVICE_PORT=3003 - export RERANK_SERVICE_PORT=3005 - export LLM_SERVICE_PORT=3007 export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:3008/v1/searchqna" export host_ip=${ip_address} export LOGFLAG=true - + source ./set_env.sh + cd cpu/xeon sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/SearchQnA/tests/test_compose_vllm_on_rocm.sh b/SearchQnA/tests/test_compose_vllm_on_rocm.sh index 530245cdcb..92de3f9e00 100644 --- a/SearchQnA/tests/test_compose_vllm_on_rocm.sh +++ b/SearchQnA/tests/test_compose_vllm_on_rocm.sh @@ -31,36 +31,8 @@ function start_services() { cd $WORKPATH/docker_compose/amd/gpu/rocm/ export HOST_IP=${ip_address} - export EXTERNAL_HOST_IP=${ip_address} - - export SEARCH_EMBEDDING_MODEL_ID='BAAI/bge-base-en-v1.5' - export SEARCH_GOOGLE_API_KEY=${GOOGLE_API_KEY} - export SEARCH_GOOGLE_CSE_ID=${GOOGLE_CSE_ID} - export SEARCH_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export SEARCH_LLM_MODEL_ID='Intel/neural-chat-7b-v3-3' - export SEARCH_RERANK_MODEL_ID='BAAI/bge-reranker-base' - - export MODEL_CACHE="./data" - - export SEARCH_BACKEND_SERVICE_PORT=3008 - export SEARCH_EMBEDDING_SERVICE_PORT=3002 - export SEARCH_FRONTEND_SERVICE_PORT=5173 - export SEARCH_LLM_SERVICE_PORT=3007 - export SEARCH_RERANK_SERVICE_PORT=3005 - export SEARCH_TEI_EMBEDDING_PORT=3001 - export SEARCH_TEI_RERANKING_PORT=3004 - export SEARCH_VLLM_SERVICE_PORT=3080 - export SEARCH_WEB_RETRIEVER_SERVICE_PORT=3003 - - export SEARCH_BACKEND_SERVICE_ENDPOINT=http://${EXTERNAL_HOST_IP}:${SEARCH_BACKEND_SERVICE_PORT}/v1/searchqna - export SEARCH_EMBEDDING_SERVICE_HOST_IP=${HOST_IP} - export SEARCH_LLM_ENDPOINT=http://${HOST_IP}:${SEARCH_VLLM_SERVICE_PORT} - export SEARCH_LLM_SERVICE_HOST_IP=${HOST_IP} - export SEARCH_MEGA_SERVICE_HOST_IP=${HOST_IP} - export SEARCH_RERANK_SERVICE_HOST_IP=${HOST_IP} - export SEARCH_TEI_EMBEDDING_ENDPOINT=http://${HOST_IP}:${SEARCH_TEI_EMBEDDING_PORT} - export SEARCH_TEI_RERANKING_ENDPOINT=http://${HOST_IP}:${SEARCH_TEI_RERANKING_PORT} - export SEARCH_WEB_RETRIEVER_SERVICE_HOST_IP=${HOST_IP} + # export SEARCH_BACKEND_SERVICE_PORT=3008 + source ./set_env_vllm.sh sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env From 7ee6f3657cc9ecfe73ab4539c4a5cbf8a6f923c2 Mon Sep 17 00:00:00 2001 From: "chen, suyue" Date: Fri, 16 May 2025 15:16:02 +0800 Subject: [PATCH 055/217] [CICD enhance] DocIndexRetriever run CI with latest base image, group logs in GHA outputs. (#1932) Signed-off-by: chensuyue --- DocIndexRetriever/Dockerfile | 3 +- .../docker_image_build/build.yaml | 2 ++ .../tests/test_compose_milvus_on_gaudi.sh | 34 +++++++++++-------- .../tests/test_compose_milvus_on_xeon.sh | 33 +++++++++++------- .../tests/test_compose_on_gaudi.sh | 29 ++++++++++------ .../tests/test_compose_on_xeon.sh | 30 ++++++++++------ .../test_compose_without_rerank_on_xeon.sh | 27 +++++++++++---- 7 files changed, 102 insertions(+), 56 deletions(-) diff --git a/DocIndexRetriever/Dockerfile b/DocIndexRetriever/Dockerfile index 06fb1dc016..a9aa823f5c 100644 --- a/DocIndexRetriever/Dockerfile +++ b/DocIndexRetriever/Dockerfile @@ -1,8 +1,9 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 +ARG IMAGE_REPO=opea ARG BASE_TAG=latest -FROM opea/comps-base:$BASE_TAG +FROM $IMAGE_REPO/comps-base:$BASE_TAG COPY ./retrieval_tool.py $HOME/retrieval_tool.py diff --git a/DocIndexRetriever/docker_image_build/build.yaml b/DocIndexRetriever/docker_image_build/build.yaml index 80753e8946..bfaaf51d34 100644 --- a/DocIndexRetriever/docker_image_build/build.yaml +++ b/DocIndexRetriever/docker_image_build/build.yaml @@ -5,6 +5,8 @@ services: doc-index-retriever: build: args: + IMAGE_REPO: ${REGISTRY:-opea} + BASE_TAG: ${TAG:-latest} http_proxy: ${http_proxy} https_proxy: ${https_proxy} no_proxy: ${no_proxy} diff --git a/DocIndexRetriever/tests/test_compose_milvus_on_gaudi.sh b/DocIndexRetriever/tests/test_compose_milvus_on_gaudi.sh index e0fc2b01ee..e423c8b7f0 100644 --- a/DocIndexRetriever/tests/test_compose_milvus_on_gaudi.sh +++ b/DocIndexRetriever/tests/test_compose_milvus_on_gaudi.sh @@ -20,17 +20,15 @@ function build_docker_images() { if [ ! -d "GenAIComps" ] ; then git clone --single-branch --branch "${opea_branch:-"main"}" https://github.com/opea-project/GenAIComps.git fi + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s + service_list="dataprep embedding retriever reranking doc-index-retriever" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 - docker pull ghcr.io/huggingface/tei-gaudi:1.5.0 - docker pull quay.io/coreos/etcd:v3.5.5 - docker pull minio/minio:RELEASE.2023-03-20T20-16-18Z - docker pull milvusdb/milvus:v2.4.6 docker images && sleep 1s - - echo "Docker images built!" } function start_services() { @@ -112,19 +110,27 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi - echo "Dump current docker ps" - docker ps - start_time=$(date +%s) + echo "::endgroup::" + + echo "::group::start_services" start_services - end_time=$(date +%s) - duration=$((end_time-start_time)) - echo "Mega service start duration is $duration s" + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } diff --git a/DocIndexRetriever/tests/test_compose_milvus_on_xeon.sh b/DocIndexRetriever/tests/test_compose_milvus_on_xeon.sh index f8942173c4..b85e939fa5 100755 --- a/DocIndexRetriever/tests/test_compose_milvus_on_xeon.sh +++ b/DocIndexRetriever/tests/test_compose_milvus_on_xeon.sh @@ -20,16 +20,15 @@ function build_docker_images() { if [ ! -d "GenAIComps" ] ; then git clone --single-branch --branch "${opea_branch:-"main"}" https://github.com/opea-project/GenAIComps.git fi + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s + service_list="dataprep embedding retriever reranking doc-index-retriever" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 - docker pull quay.io/coreos/etcd:v3.5.5 - docker pull minio/minio:RELEASE.2023-03-20T20-16-18Z - docker pull milvusdb/milvus:v2.4.6 docker images && sleep 1s - - echo "Docker images built!" } function start_services() { @@ -111,19 +110,27 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi - echo "Dump current docker ps" - docker ps - start_time=$(date +%s) + echo "::endgroup::" + + echo "::group::start_services" start_services - end_time=$(date +%s) - duration=$((end_time-start_time)) - echo "Mega service start duration is $duration s" + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } diff --git a/DocIndexRetriever/tests/test_compose_on_gaudi.sh b/DocIndexRetriever/tests/test_compose_on_gaudi.sh index 596fbd8f4b..11541eca1e 100644 --- a/DocIndexRetriever/tests/test_compose_on_gaudi.sh +++ b/DocIndexRetriever/tests/test_compose_on_gaudi.sh @@ -21,14 +21,15 @@ function build_docker_images() { if [ ! -d "GenAIComps" ] ; then git clone --single-branch --branch "${opea_branch:-"main"}" https://github.com/opea-project/GenAIComps.git fi + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s echo "Build all the images with --no-cache, check docker_image_build.log for details..." docker compose -f build.yaml build --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull redis/redis-stack:7.2.0-v9 - docker pull ghcr.io/huggingface/tei-gaudi:1.5.0 docker images && sleep 1s - echo "Docker images built!" } function start_services() { @@ -103,19 +104,27 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi - echo "Dump current docker ps" - docker ps - start_time=$(date +%s) + echo "::endgroup::" + + echo "::group::start_services" start_services - end_time=$(date +%s) - duration=$((end_time-start_time)) - echo "Mega service start duration is $duration s" + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } diff --git a/DocIndexRetriever/tests/test_compose_on_xeon.sh b/DocIndexRetriever/tests/test_compose_on_xeon.sh index aab0cb4c60..229e47efea 100644 --- a/DocIndexRetriever/tests/test_compose_on_xeon.sh +++ b/DocIndexRetriever/tests/test_compose_on_xeon.sh @@ -21,14 +21,15 @@ function build_docker_images() { if [ ! -d "GenAIComps" ] ; then git clone --single-branch --branch "${opea_branch:-"main"}" https://github.com/opea-project/GenAIComps.git fi + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s + service_list="dataprep embedding retriever reranking doc-index-retriever" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 - docker pull redis/redis-stack:7.2.0-v9 docker images && sleep 1s - - echo "Docker images built!" } function start_services() { @@ -110,20 +111,27 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi - echo "Dump current docker ps" - docker ps + echo "::endgroup::" - start_time=$(date +%s) + echo "::group::start_services" start_services - end_time=$(date +%s) - duration=$((end_time-start_time)) - echo "Mega service start duration is $duration s" + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } diff --git a/DocIndexRetriever/tests/test_compose_without_rerank_on_xeon.sh b/DocIndexRetriever/tests/test_compose_without_rerank_on_xeon.sh index c0e32c4e93..dde5d84ef9 100644 --- a/DocIndexRetriever/tests/test_compose_without_rerank_on_xeon.sh +++ b/DocIndexRetriever/tests/test_compose_without_rerank_on_xeon.sh @@ -21,6 +21,11 @@ function build_docker_images() { if [ ! -d "GenAIComps" ] ; then git clone --single-branch --branch "${opea_branch:-"main"}" https://github.com/opea-project/GenAIComps.git fi + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s + service_list="dataprep embedding retriever doc-index-retriever" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log @@ -114,19 +119,27 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi - echo "Dump current docker ps" - docker ps - start_time=$(date +%s) + echo "::endgroup::" + + echo "::group::start_services" start_services - end_time=$(date +%s) - duration=$((end_time-start_time)) - echo "Mega service start duration is $duration s" + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune + echo "::endgroup::" + + docker system prune -f } From c8abbc49587ffcdb11ea7a01429ad31607066822 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Fri, 16 May 2025 15:16:38 +0800 Subject: [PATCH 056/217] Integrate ProductivitySuite set_env to ut scripts and add README.md for UT scripts. (#1966) Signed-off-by: ZePan110 Co-authored-by: Ying Hu --- .../docker_compose/intel/cpu/xeon/README.md | 1 - .../docker_compose/intel/cpu/xeon/set_env.sh | 1 - ProductivitySuite/tests/README.md | 15 +++++++++++ .../tests/test_compose_on_xeon.sh | 25 ++----------------- 4 files changed, 17 insertions(+), 25 deletions(-) create mode 100644 ProductivitySuite/tests/README.md diff --git a/ProductivitySuite/docker_compose/intel/cpu/xeon/README.md b/ProductivitySuite/docker_compose/intel/cpu/xeon/README.md index af63141330..91921c8c23 100644 --- a/ProductivitySuite/docker_compose/intel/cpu/xeon/README.md +++ b/ProductivitySuite/docker_compose/intel/cpu/xeon/README.md @@ -137,7 +137,6 @@ export BACKEND_SERVICE_ENDPOINT_DOCSUM="http://${host_ip}:8890/v1/docsum" export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest" export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/get" export CHAT_HISTORY_CREATE_ENDPOINT="http://${host_ip}:6012/v1/chathistory/create" -export CHAT_HISTORY_CREATE_ENDPOINT="http://${host_ip}:6012/v1/chathistory/create" export CHAT_HISTORY_DELETE_ENDPOINT="http://${host_ip}:6012/v1/chathistory/delete" export CHAT_HISTORY_GET_ENDPOINT="http://${host_ip}:6012/v1/chathistory/get" export PROMPT_SERVICE_GET_ENDPOINT="http://${host_ip}:6018/v1/prompt/get" diff --git a/ProductivitySuite/docker_compose/intel/cpu/xeon/set_env.sh b/ProductivitySuite/docker_compose/intel/cpu/xeon/set_env.sh index 04d53e3639..a70561f28b 100755 --- a/ProductivitySuite/docker_compose/intel/cpu/xeon/set_env.sh +++ b/ProductivitySuite/docker_compose/intel/cpu/xeon/set_env.sh @@ -18,7 +18,6 @@ export BACKEND_SERVICE_ENDPOINT_DOCSUM="http://${host_ip}:8890/v1/docsum" export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/ingest" export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/get" export CHAT_HISTORY_CREATE_ENDPOINT="http://${host_ip}:6012/v1/chathistory/create" -export CHAT_HISTORY_CREATE_ENDPOINT="http://${host_ip}:6012/v1/chathistory/create" export CHAT_HISTORY_DELETE_ENDPOINT="http://${host_ip}:6012/v1/chathistory/delete" export CHAT_HISTORY_GET_ENDPOINT="http://${host_ip}:6012/v1/chathistory/get" export PROMPT_SERVICE_GET_ENDPOINT="http://${host_ip}:6018/v1/prompt/get" diff --git a/ProductivitySuite/tests/README.md b/ProductivitySuite/tests/README.md new file mode 100644 index 0000000000..a7bc0ab7ce --- /dev/null +++ b/ProductivitySuite/tests/README.md @@ -0,0 +1,15 @@ +# ProductivitySuite E2E test scripts + +## Set the required environment variable + +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +``` + +## Run test + +On Intel Xeon with TGI: + +```bash +bash test_compose_on_xeon.sh +``` diff --git a/ProductivitySuite/tests/test_compose_on_xeon.sh b/ProductivitySuite/tests/test_compose_on_xeon.sh index 34d9a96691..57f903ed95 100755 --- a/ProductivitySuite/tests/test_compose_on_xeon.sh +++ b/ProductivitySuite/tests/test_compose_on_xeon.sh @@ -29,33 +29,12 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon/ - - export DB_NAME="opea" - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" - export LLM_MODEL_ID_CODEGEN="Intel/neural-chat-7b-v3-3" - export INDEX_NAME="rag-redis" - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export BACKEND_SERVICE_ENDPOINT_CHATQNA="http://${ip_address}:8888/v1/chatqna" - export DATAPREP_DELETE_FILE_ENDPOINT="http://${ip_address}:6007/v1/dataprep/delete" - export BACKEND_SERVICE_ENDPOINT_CODEGEN="http://${ip_address}:7778/v1/codegen" - export BACKEND_SERVICE_ENDPOINT_DOCSUM="http://${ip_address}:8890/v1/docsum" - export DATAPREP_SERVICE_ENDPOINT="http://${ip_address}:6007/v1/dataprep/ingest" - export DATAPREP_GET_FILE_ENDPOINT="http://${ip_address}:6007/v1/dataprep/get" - export CHAT_HISTORY_CREATE_ENDPOINT="http://${ip_address}:6012/v1/chathistory/create" - export CHAT_HISTORY_CREATE_ENDPOINT="http://${ip_address}:6012/v1/chathistory/create" - export CHAT_HISTORY_DELETE_ENDPOINT="http://${ip_address}:6012/v1/chathistory/delete" - export CHAT_HISTORY_GET_ENDPOINT="http://${ip_address}:6012/v1/chathistory/get" - export PROMPT_SERVICE_GET_ENDPOINT="http://${ip_address}:6018/v1/prompt/get" - export PROMPT_SERVICE_CREATE_ENDPOINT="http://${ip_address}:6018/v1/prompt/create" - export PROMPT_SERVICE_DELETE_ENDPOINT="http://${ip_address}:6018/v1/prompt/delete" - export KEYCLOAK_SERVICE_ENDPOINT="http://${ip_address}:8080" - export DocSum_COMPONENT_NAME="OpeaDocSumTgi" export host_ip=${ip_address} export LOGFLAG=True export no_proxy="$no_proxy,tgi_service_codegen,llm_codegen,tei-embedding-service,tei-reranking-service,chatqna-xeon-backend-server,retriever,tgi-service,redis-vector-db,whisper,llm-docsum-tgi,docsum-xeon-backend-server,mongo,codegen" + source set_env.sh + # Start Docker Containers docker compose up -d > ${LOG_PATH}/start_services_with_compose.log sleep 30s From 22174e68a5ad00ed00cdff7a57c38956644919e7 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Fri, 16 May 2025 15:18:06 +0800 Subject: [PATCH 057/217] Integrate AvatarChatbot set_env to ut scripts. (#1961) Signed-off-by: ZePan110 --- .../docker_compose/amd/gpu/rocm/set_env.sh | 4 +- .../docker_compose/intel/cpu/xeon/set_env.sh | 29 ++++++++++++ .../docker_compose/intel/hpu/gaudi/set_env.sh | 32 ++++++++++++++ AvatarChatbot/tests/README.md | 27 ++++++++++++ AvatarChatbot/tests/test_compose_on_gaudi.sh | 32 +------------- AvatarChatbot/tests/test_compose_on_rocm.sh | 44 +------------------ AvatarChatbot/tests/test_compose_on_xeon.sh | 32 +------------- 7 files changed, 94 insertions(+), 106 deletions(-) create mode 100644 AvatarChatbot/tests/README.md diff --git a/AvatarChatbot/docker_compose/amd/gpu/rocm/set_env.sh b/AvatarChatbot/docker_compose/amd/gpu/rocm/set_env.sh index e6a2af0984..38d54c38f7 100644 --- a/AvatarChatbot/docker_compose/amd/gpu/rocm/set_env.sh +++ b/AvatarChatbot/docker_compose/amd/gpu/rocm/set_env.sh @@ -41,7 +41,7 @@ export FACE="/home/user/comps/animation/src/assets/img/avatar5.png" # export AUDIO='assets/audio/eg3_ref.wav' # audio file path is optional, will use base64str in the post request as input if is 'None' export AUDIO='None' export FACESIZE=96 -export OUTFILE="/outputs/result.mp4" +export OUTFILE="./outputs/result.mp4" export GFPGAN_MODEL_VERSION=1.4 # latest version, can roll back to v1.3 if needed export UPSCALE_FACTOR=1 -export FPS=10 +export FPS=5 diff --git a/AvatarChatbot/docker_compose/intel/cpu/xeon/set_env.sh b/AvatarChatbot/docker_compose/intel/cpu/xeon/set_env.sh index e4f5c207ba..49c7e4cdd7 100644 --- a/AvatarChatbot/docker_compose/intel/cpu/xeon/set_env.sh +++ b/AvatarChatbot/docker_compose/intel/cpu/xeon/set_env.sh @@ -5,3 +5,32 @@ pushd "../../../../../" > /dev/null source .set_env.sh popd > /dev/null + +export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export host_ip=$(hostname -I | awk '{print $1}') +export LLM_MODEL_ID=Intel/neural-chat-7b-v3-3 +export WAV2LIP_ENDPOINT=http://$host_ip:7860 +export MEGA_SERVICE_HOST_IP=${host_ip} +export WHISPER_SERVER_HOST_IP=${host_ip} +export WHISPER_SERVER_PORT=7066 +export SPEECHT5_SERVER_HOST_IP=${host_ip} +export SPEECHT5_SERVER_PORT=7055 +export LLM_SERVER_HOST_IP=${host_ip} +export LLM_SERVER_PORT=3006 +export ANIMATION_SERVICE_HOST_IP=${host_ip} +export ANIMATION_SERVICE_PORT=3008 + +export MEGA_SERVICE_PORT=8888 + +export DEVICE="cpu" +export WAV2LIP_PORT=7860 +export INFERENCE_MODE='wav2lip+gfpgan' +export CHECKPOINT_PATH='/usr/local/lib/python3.11/site-packages/Wav2Lip/checkpoints/wav2lip_gan.pth' +export FACE="/home/user/comps/animation/src/assets/img/avatar5.png" +# export AUDIO='assets/audio/eg3_ref.wav' # audio file path is optional, will use base64str in the post request as input if is 'None' +export AUDIO='None' +export FACESIZE=96 +export OUTFILE="/outputs/result.mp4" +export GFPGAN_MODEL_VERSION=1.4 # latest version, can roll back to v1.3 if needed +export UPSCALE_FACTOR=1 +export FPS=10 diff --git a/AvatarChatbot/docker_compose/intel/hpu/gaudi/set_env.sh b/AvatarChatbot/docker_compose/intel/hpu/gaudi/set_env.sh index e4f5c207ba..a55f4b4f58 100644 --- a/AvatarChatbot/docker_compose/intel/hpu/gaudi/set_env.sh +++ b/AvatarChatbot/docker_compose/intel/hpu/gaudi/set_env.sh @@ -5,3 +5,35 @@ pushd "../../../../../" > /dev/null source .set_env.sh popd > /dev/null + +export HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN +export host_ip=$(hostname -I | awk '{print $1}') + +export LLM_MODEL_ID=Intel/neural-chat-7b-v3-3 + +export WAV2LIP_ENDPOINT=http://$host_ip:7860 + +export MEGA_SERVICE_HOST_IP=${host_ip} +export WHISPER_SERVER_HOST_IP=${host_ip} +export WHISPER_SERVER_PORT=7066 +export SPEECHT5_SERVER_HOST_IP=${host_ip} +export SPEECHT5_SERVER_PORT=7055 +export LLM_SERVER_HOST_IP=${host_ip} +export LLM_SERVER_PORT=3006 +export ANIMATION_SERVICE_HOST_IP=${host_ip} +export ANIMATION_SERVICE_PORT=3008 + +export MEGA_SERVICE_PORT=8888 + +export DEVICE="hpu" +export WAV2LIP_PORT=7860 +export INFERENCE_MODE='wav2lip+gfpgan' +export CHECKPOINT_PATH='/usr/local/lib/python3.10/dist-packages/Wav2Lip/checkpoints/wav2lip_gan.pth' +export FACE="/home/user/comps/animation/src/assets/img/avatar1.jpg" +# export AUDIO='assets/audio/eg3_ref.wav' # audio file path is optional, will use base64str in the post request as input if is 'None' +export AUDIO='None' +export FACESIZE=96 +export OUTFILE="/outputs/result.mp4" +export GFPGAN_MODEL_VERSION=1.4 # latest version, can roll back to v1.3 if needed +export UPSCALE_FACTOR=1 +export FPS=10 diff --git a/AvatarChatbot/tests/README.md b/AvatarChatbot/tests/README.md new file mode 100644 index 0000000000..411afc28b7 --- /dev/null +++ b/AvatarChatbot/tests/README.md @@ -0,0 +1,27 @@ +# AvatarChatbot E2E test scripts + +## Set the required environment variable + +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +``` + +## Run test + +On Intel Xeon with TGI: + +```bash +bash test_compose_on_xeon.sh +``` + +On Intel Gaudi with TGI: + +```bash +bash test_compose_on_gaudi.sh +``` + +On AMD ROCm with TGI: + +```bash +bash test_compose_on_rocm.sh +``` diff --git a/AvatarChatbot/tests/test_compose_on_gaudi.sh b/AvatarChatbot/tests/test_compose_on_gaudi.sh index faf156907d..c9d693c415 100755 --- a/AvatarChatbot/tests/test_compose_on_gaudi.sh +++ b/AvatarChatbot/tests/test_compose_on_gaudi.sh @@ -45,37 +45,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/hpu/gaudi - export HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN - export host_ip=$(hostname -I | awk '{print $1}') - - export LLM_MODEL_ID=Intel/neural-chat-7b-v3-3 - - export WAV2LIP_ENDPOINT=http://$host_ip:7860 - - export MEGA_SERVICE_HOST_IP=${host_ip} - export WHISPER_SERVER_HOST_IP=${host_ip} - export WHISPER_SERVER_PORT=7066 - export SPEECHT5_SERVER_HOST_IP=${host_ip} - export SPEECHT5_SERVER_PORT=7055 - export LLM_SERVER_HOST_IP=${host_ip} - export LLM_SERVER_PORT=3006 - export ANIMATION_SERVICE_HOST_IP=${host_ip} - export ANIMATION_SERVICE_PORT=3008 - - export MEGA_SERVICE_PORT=8888 - - export DEVICE="hpu" - export WAV2LIP_PORT=7860 - export INFERENCE_MODE='wav2lip+gfpgan' - export CHECKPOINT_PATH='/usr/local/lib/python3.10/dist-packages/Wav2Lip/checkpoints/wav2lip_gan.pth' - export FACE="/home/user/comps/animation/src/assets/img/avatar1.jpg" - # export AUDIO='assets/audio/eg3_ref.wav' # audio file path is optional, will use base64str in the post request as input if is 'None' - export AUDIO='None' - export FACESIZE=96 - export OUTFILE="/outputs/result.mp4" - export GFPGAN_MODEL_VERSION=1.4 # latest version, can roll back to v1.3 if needed - export UPSCALE_FACTOR=1 - export FPS=10 + source set_env.sh # Start Docker Containers docker compose up -d > ${LOG_PATH}/start_services_with_compose.log diff --git a/AvatarChatbot/tests/test_compose_on_rocm.sh b/AvatarChatbot/tests/test_compose_on_rocm.sh index 514921f6e2..dab4564a2d 100644 --- a/AvatarChatbot/tests/test_compose_on_rocm.sh +++ b/AvatarChatbot/tests/test_compose_on_rocm.sh @@ -2,7 +2,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -set -e +set -xe IMAGE_REPO=${IMAGE_REPO:-"opea"} IMAGE_TAG=${IMAGE_TAG:-"latest"} echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" @@ -42,48 +42,8 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/amd/gpu/rocm - - export HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN export OPENAI_API_KEY=$OPENAI_API_KEY - export host_ip=${ip_address} - - export TGI_SERVICE_PORT=3006 - export TGI_LLM_ENDPOINT=http://${host_ip}:${TGI_SERVICE_PORT} - export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" - - export ASR_ENDPOINT=http://${host_ip}:7066 - export TTS_ENDPOINT=http://${host_ip}:7055 - export WAV2LIP_ENDPOINT=http://${host_ip}:7860 - - export MEGA_SERVICE_HOST_IP=${host_ip} - export ASR_SERVICE_HOST_IP=${host_ip} - export TTS_SERVICE_HOST_IP=${host_ip} - export LLM_SERVICE_HOST_IP=${host_ip} - export ANIMATION_SERVICE_HOST_IP=${host_ip} - export WHISPER_SERVER_HOST_IP=${host_ip} - export WHISPER_SERVER_PORT=7066 - - export SPEECHT5_SERVER_HOST_IP=${host_ip} - export SPEECHT5_SERVER_PORT=7055 - - export MEGA_SERVICE_PORT=8888 - export ASR_SERVICE_PORT=3001 - export TTS_SERVICE_PORT=3002 - export LLM_SERVICE_PORT=3006 - export ANIMATION_SERVICE_PORT=3008 - - export DEVICE="cpu" - export WAV2LIP_PORT=7860 - export INFERENCE_MODE='wav2lip+gfpgan' - export CHECKPOINT_PATH='/usr/local/lib/python3.11/site-packages/Wav2Lip/checkpoints/wav2lip_gan.pth' - export FACE="/home/user/comps/animation/src/assets/img/avatar5.png" - # export AUDIO='assets/audio/eg3_ref.wav' # audio file path is optional, will use base64str in the post request as input if is 'None' - export AUDIO='None' - export FACESIZE=96 - export OUTFILE="./outputs/result.mp4" - export GFPGAN_MODEL_VERSION=1.4 # latest version, can roll back to v1.3 if needed - export UPSCALE_FACTOR=1 - export FPS=5 + source set_env.sh # Start Docker Containers docker compose up -d --force-recreate diff --git a/AvatarChatbot/tests/test_compose_on_xeon.sh b/AvatarChatbot/tests/test_compose_on_xeon.sh index 8e9a04535b..b0013aa2af 100755 --- a/AvatarChatbot/tests/test_compose_on_xeon.sh +++ b/AvatarChatbot/tests/test_compose_on_xeon.sh @@ -45,37 +45,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon - export HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN - export host_ip=$(hostname -I | awk '{print $1}') - - export LLM_MODEL_ID=Intel/neural-chat-7b-v3-3 - - export WAV2LIP_ENDPOINT=http://$host_ip:7860 - - export MEGA_SERVICE_HOST_IP=${host_ip} - export WHISPER_SERVER_HOST_IP=${host_ip} - export WHISPER_SERVER_PORT=7066 - export SPEECHT5_SERVER_HOST_IP=${host_ip} - export SPEECHT5_SERVER_PORT=7055 - export LLM_SERVER_HOST_IP=${host_ip} - export LLM_SERVER_PORT=3006 - export ANIMATION_SERVICE_HOST_IP=${host_ip} - export ANIMATION_SERVICE_PORT=3008 - - export MEGA_SERVICE_PORT=8888 - - export DEVICE="cpu" - export WAV2LIP_PORT=7860 - export INFERENCE_MODE='wav2lip+gfpgan' - export CHECKPOINT_PATH='/usr/local/lib/python3.11/site-packages/Wav2Lip/checkpoints/wav2lip_gan.pth' - export FACE="/home/user/comps/animation/src/assets/img/avatar5.png" - # export AUDIO='assets/audio/eg3_ref.wav' # audio file path is optional, will use base64str in the post request as input if is 'None' - export AUDIO='None' - export FACESIZE=96 - export OUTFILE="/outputs/result.mp4" - export GFPGAN_MODEL_VERSION=1.4 # latest version, can roll back to v1.3 if needed - export UPSCALE_FACTOR=1 - export FPS=10 + source set_env.sh # Start Docker Containers docker compose up -d From 1c0b1731c5013c720966beeb86f3af3efa55f048 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Fri, 16 May 2025 15:19:07 +0800 Subject: [PATCH 058/217] Integrate VisualQnA set_env to ut scripts. (#1947) Signed-off-by: ZePan110 Co-authored-by: chen, suyue --- .../docker_compose/amd/gpu/rocm/set_env.sh | 6 +-- .../amd/gpu/rocm/set_env_vllm.sh | 6 +-- VisualQnA/tests/README.md | 45 +++++++++++++++++++ VisualQnA/tests/test_compose_on_gaudi.sh | 1 - VisualQnA/tests/test_compose_on_rocm.sh | 20 +-------- VisualQnA/tests/test_compose_tgi_on_gaudi.sh | 13 +----- VisualQnA/tests/test_compose_tgi_on_xeon.sh | 12 +---- VisualQnA/tests/test_compose_vllm_on_rocm.sh | 19 +------- 8 files changed, 58 insertions(+), 64 deletions(-) create mode 100644 VisualQnA/tests/README.md diff --git a/VisualQnA/docker_compose/amd/gpu/rocm/set_env.sh b/VisualQnA/docker_compose/amd/gpu/rocm/set_env.sh index eecc6d0575..1cdf88a262 100644 --- a/VisualQnA/docker_compose/amd/gpu/rocm/set_env.sh +++ b/VisualQnA/docker_compose/amd/gpu/rocm/set_env.sh @@ -6,7 +6,7 @@ export HOST_IP=${host_ip} export EXTERNAL_HOST_IP=${host_ip} export VISUALQNA_TGI_SERVICE_PORT="8399" -export VISUALQNA_HUGGINGFACEHUB_API_TOKEN=${Your_HUGGINGFACEHUB_API_TOKEN} +export VISUALQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export VISUALQNA_CARD_ID="card1" export VISUALQNA_RENDER_ID="renderD136" export LVM_MODEL_ID="Xkev/Llama-3.2V-11B-cot" @@ -16,8 +16,8 @@ export MEGA_SERVICE_HOST_IP=${HOST_IP} export LVM_SERVICE_HOST_IP=${HOST_IP} export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:${BACKEND_SERVICE_PORT}/v1/visualqna" export FRONTEND_SERVICE_IP=${HOST_IP} -export FRONTEND_SERVICE_PORT=18001 +export FRONTEND_SERVICE_PORT=5173 export BACKEND_SERVICE_NAME=visualqna export BACKEND_SERVICE_IP=${HOST_IP} -export BACKEND_SERVICE_PORT=18002 +export BACKEND_SERVICE_PORT=8888 export NGINX_PORT=18003 diff --git a/VisualQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh b/VisualQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh index 7786abe497..68a1bb0b9d 100644 --- a/VisualQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh +++ b/VisualQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh @@ -6,7 +6,7 @@ export HOST_IP=${host_ip} export EXTERNAL_HOST_IP=${host_ip} export VISUALQNA_VLLM_SERVICE_PORT="8081" -export VISUALQNA_HUGGINGFACEHUB_API_TOKEN=${Your_HUGGINGFACEHUB_API_TOKEN} +export VISUALQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export VISUALQNA_CARD_ID="card1" export VISUALQNA_RENDER_ID="renderD136" export VISUALQNA_LVM_MODEL_ID="Xkev/Llama-3.2V-11B-cot" @@ -16,8 +16,8 @@ export MEGA_SERVICE_HOST_IP=${HOST_IP} export LVM_SERVICE_HOST_IP=${HOST_IP} export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:${BACKEND_SERVICE_PORT}/v1/visualqna" export FRONTEND_SERVICE_IP=${HOST_IP} -export FRONTEND_SERVICE_PORT=18001 +export FRONTEND_SERVICE_PORT=5173 export BACKEND_SERVICE_NAME=visualqna export BACKEND_SERVICE_IP=${HOST_IP} -export BACKEND_SERVICE_PORT=18002 +export BACKEND_SERVICE_PORT=8888 export NGINX_PORT=18003 diff --git a/VisualQnA/tests/README.md b/VisualQnA/tests/README.md new file mode 100644 index 0000000000..8d07371b51 --- /dev/null +++ b/VisualQnA/tests/README.md @@ -0,0 +1,45 @@ +# VisualQnA E2E test scripts + +## Set the required environment variable + +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +``` + +## Run test + +On Intel Xeon with Vllm: + +```bash +bash test_compose_on_xeon.sh +``` + +On Intel Gaudi with Vllm: + +```bash +bash test_compose_on_gaudi.sh +``` + +On Intel Xeon with TGI: + +```bash +bash test_compose_tgi_on_xeon.sh +``` + +On Intel Gaudi with TGI: + +```bash +bash test_compose_tgi_on_gaudi.sh +``` + +On AMD ROCm with TGI: + +```bash +bash test_compose_on_rocm.sh +``` + +On AMD ROCm with vLLM: + +```bash +bash test_compose_vllm_on_rocm.sh +``` diff --git a/VisualQnA/tests/test_compose_on_gaudi.sh b/VisualQnA/tests/test_compose_on_gaudi.sh index 237ad3b7dd..1df520183c 100644 --- a/VisualQnA/tests/test_compose_on_gaudi.sh +++ b/VisualQnA/tests/test_compose_on_gaudi.sh @@ -32,7 +32,6 @@ function build_docker_images() { service_list="visualqna visualqna-ui lvm nginx vllm-gaudi" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log - docker images && sleep 1s } diff --git a/VisualQnA/tests/test_compose_on_rocm.sh b/VisualQnA/tests/test_compose_on_rocm.sh index ea53c0ab31..e7034baedf 100644 --- a/VisualQnA/tests/test_compose_on_rocm.sh +++ b/VisualQnA/tests/test_compose_on_rocm.sh @@ -15,26 +15,10 @@ ip_address=$(hostname -I | awk '{print $1}') export MODEL_CACHE=${model_cache:-"/var/lib/GenAI/data"} export REGISTRY=${IMAGE_REPO} export TAG=${IMAGE_TAG} -export HOST_IP=${ip_address} -export VISUALQNA_TGI_SERVICE_PORT="8399" -export VISUALQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -export VISUALQNA_CARD_ID="card1" -export VISUALQNA_RENDER_ID="renderD136" -export LVM_MODEL_ID="Xkev/Llama-3.2V-11B-cot" +export host_ip=${ip_address} export MODEL="llava-hf/llava-v1.6-mistral-7b-hf" -export LVM_ENDPOINT="http://${HOST_IP}:8399" -export LVM_SERVICE_PORT=9399 -export MEGA_SERVICE_HOST_IP=${HOST_IP} -export LVM_SERVICE_HOST_IP=${HOST_IP} -export BACKEND_SERVICE_ENDPOINT="http://${HOST_IP}:${BACKEND_SERVICE_PORT}/v1/visualqna" -export FRONTEND_SERVICE_IP=${HOST_IP} -export FRONTEND_SERVICE_PORT=5173 -export BACKEND_SERVICE_NAME=visualqna -export BACKEND_SERVICE_IP=${HOST_IP} -export BACKEND_SERVICE_PORT=8888 -export NGINX_PORT=18003 export PATH="~/miniconda3/bin:$PATH" -export MODEL_CACHE=${model_cache:-"/var/opea/multimodalqna-service/data"} +source $WORKPATH/docker_compose/amd/gpu/rocm/set_env.sh function build_docker_images() { opea_branch=${opea_branch:-"main"} diff --git a/VisualQnA/tests/test_compose_tgi_on_gaudi.sh b/VisualQnA/tests/test_compose_tgi_on_gaudi.sh index 495f764814..b469166a8e 100644 --- a/VisualQnA/tests/test_compose_tgi_on_gaudi.sh +++ b/VisualQnA/tests/test_compose_tgi_on_gaudi.sh @@ -34,20 +34,11 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/hpu/gaudi - export LVM_MODEL_ID="llava-hf/llava-v1.6-mistral-7b-hf" - export LVM_ENDPOINT="http://${ip_address}:8399" export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export LVM_SERVICE_PORT=9399 - export MEGA_SERVICE_HOST_IP=${ip_address} - export LVM_SERVICE_HOST_IP=${ip_address} - export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:8888/v1/visualqna" - export FRONTEND_SERVICE_IP=${ip_address} - export FRONTEND_SERVICE_PORT=5173 - export BACKEND_SERVICE_NAME=visualqna - export BACKEND_SERVICE_IP=${ip_address} - export BACKEND_SERVICE_PORT=8888 export NGINX_PORT=80 export host_ip=${ip_address} + source ./set_env.sh + export LVM_MODEL_ID="llava-hf/llava-v1.6-mistral-7b-hf" sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/VisualQnA/tests/test_compose_tgi_on_xeon.sh b/VisualQnA/tests/test_compose_tgi_on_xeon.sh index a8cc02a23e..29a009904d 100644 --- a/VisualQnA/tests/test_compose_tgi_on_xeon.sh +++ b/VisualQnA/tests/test_compose_tgi_on_xeon.sh @@ -34,20 +34,10 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon/ - export LVM_MODEL_ID="llava-hf/llava-v1.6-mistral-7b-hf" - export LVM_ENDPOINT="http://${ip_address}:8399" export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export LVM_SERVICE_PORT=9399 - export MEGA_SERVICE_HOST_IP=${ip_address} - export LVM_SERVICE_HOST_IP=${ip_address} - export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:8888/v1/visualqna" - export FRONTEND_SERVICE_IP=${ip_address} - export FRONTEND_SERVICE_PORT=5173 - export BACKEND_SERVICE_NAME=visualqna - export BACKEND_SERVICE_IP=${ip_address} - export BACKEND_SERVICE_PORT=8888 export NGINX_PORT=80 export host_ip=${ip_address} + source ./set_env.sh sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/VisualQnA/tests/test_compose_vllm_on_rocm.sh b/VisualQnA/tests/test_compose_vllm_on_rocm.sh index 606bb9e254..06e3fec7e5 100644 --- a/VisualQnA/tests/test_compose_vllm_on_rocm.sh +++ b/VisualQnA/tests/test_compose_vllm_on_rocm.sh @@ -14,26 +14,11 @@ ip_address=$(hostname -I | awk '{print $1}') export REGISTRY=${IMAGE_REPO} export TAG=${IMAGE_TAG} -export HOST_IP=${ip_address} -export VISUALQNA_VLLM_SERVICE_PORT="8081" -export VISUALQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -export VISUALQNA_CARD_ID="card1" -export VISUALQNA_RENDER_ID="renderD136" -export VISUALQNA_LVM_MODEL_ID="Xkev/Llama-3.2V-11B-cot" +export host_ip=${ip_address} export MODEL="llava-hf/llava-v1.6-mistral-7b-hf" -export LVM_ENDPOINT="http://${HOST_IP}:${VISUALQNA_VLLM_SERVICE_PORT}" -export LVM_SERVICE_PORT=9399 -export MEGA_SERVICE_HOST_IP=${HOST_IP} -export LVM_SERVICE_HOST_IP=${HOST_IP} -export BACKEND_SERVICE_ENDPOINT="http://${HOST_IP}:${BACKEND_SERVICE_PORT}/v1/visualqna" -export FRONTEND_SERVICE_IP=${HOST_IP} -export FRONTEND_SERVICE_PORT=5173 -export BACKEND_SERVICE_NAME=visualqna -export BACKEND_SERVICE_IP=${HOST_IP} -export BACKEND_SERVICE_PORT=8888 -export NGINX_PORT=18003 export PATH="~/miniconda3/bin:$PATH" export MODEL_CACHE=${model_cache:-"/var/opea/multimodalqna-service/data"} +source $WORKPATH/docker_compose/amd/gpu/rocm/set_env_vllm.sh function build_docker_images() { opea_branch=${opea_branch:-"main"} From ed918bcef103172e18c9ff8553dabbe564eb1450 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Fri, 16 May 2025 15:19:36 +0800 Subject: [PATCH 059/217] Integrate EdgeCraftRAG set_env to ut scripts and add README.md for UT scripts. (#1963) Signed-off-by: ZePan110 --- .../docker_compose/intel/gpu/arc/set_env.sh | 10 +++++++++ EdgeCraftRAG/tests/README.md | 21 +++++++++++++++++++ EdgeCraftRAG/tests/test_compose_on_arc.sh | 10 +-------- .../tests/test_compose_vllm_on_arc.sh | 11 +--------- 4 files changed, 33 insertions(+), 19 deletions(-) create mode 100644 EdgeCraftRAG/tests/README.md diff --git a/EdgeCraftRAG/docker_compose/intel/gpu/arc/set_env.sh b/EdgeCraftRAG/docker_compose/intel/gpu/arc/set_env.sh index e4f5c207ba..eef0ebd201 100644 --- a/EdgeCraftRAG/docker_compose/intel/gpu/arc/set_env.sh +++ b/EdgeCraftRAG/docker_compose/intel/gpu/arc/set_env.sh @@ -5,3 +5,13 @@ pushd "../../../../../" > /dev/null source .set_env.sh popd > /dev/null + +export MODEL_PATH=${MODEL_PATH} +export DOC_PATH=${DOC_PATH} +export UI_TMPFILE_PATH=${UI_TMPFILE_PATH} +export HOST_IP=${HOST_IP} +export LLM_MODEL=${LLM_MODEL} +export HF_ENDPOINT=${HF_ENDPOINT} +export vLLM_ENDPOINT=${vLLM_ENDPOINT} +export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export no_proxy="localhost, 127.0.0.1, 192.168.1.1" diff --git a/EdgeCraftRAG/tests/README.md b/EdgeCraftRAG/tests/README.md new file mode 100644 index 0000000000..3b2f72e0c1 --- /dev/null +++ b/EdgeCraftRAG/tests/README.md @@ -0,0 +1,21 @@ +# EdgeCraftRAG E2E test scripts + +## Set the required environment variable + +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +``` + +## Run test + +On Intel ARC with TGI: + +```bash +bash test_compose_on_arc.sh +``` + +On Intel ARC with vLLM: + +```bash +bash test_compose_vllm_on_arc.sh +``` diff --git a/EdgeCraftRAG/tests/test_compose_on_arc.sh b/EdgeCraftRAG/tests/test_compose_on_arc.sh index cdf445b1f4..509f6a7cf6 100755 --- a/EdgeCraftRAG/tests/test_compose_on_arc.sh +++ b/EdgeCraftRAG/tests/test_compose_on_arc.sh @@ -46,18 +46,10 @@ function build_docker_images() { } function start_services() { - export MODEL_PATH=${MODEL_PATH} - export DOC_PATH=${DOC_PATH} export UI_UPLOAD_PATH=${UI_UPLOAD_PATH} - export HOST_IP=${HOST_IP} - export LLM_MODEL=${LLM_MODEL} - export HF_ENDPOINT=${HF_ENDPOINT} - export vLLM_ENDPOINT=${vLLM_ENDPOINT} - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export no_proxy="localhost, 127.0.0.1, 192.168.1.1" cd $WORKPATH/docker_compose/intel/gpu/arc - + source set_env.sh # Start Docker Containers docker compose -f $COMPOSE_FILE up -d > ${LOG_PATH}/start_services_with_compose.log sleep 20 diff --git a/EdgeCraftRAG/tests/test_compose_vllm_on_arc.sh b/EdgeCraftRAG/tests/test_compose_vllm_on_arc.sh index 4c471c2450..43df736fd2 100755 --- a/EdgeCraftRAG/tests/test_compose_vllm_on_arc.sh +++ b/EdgeCraftRAG/tests/test_compose_vllm_on_arc.sh @@ -53,17 +53,8 @@ function build_docker_images() { } function start_services() { - export MODEL_PATH=${MODEL_PATH} - export DOC_PATH=${DOC_PATH} - export UI_TMPFILE_PATH=${UI_TMPFILE_PATH} - export HOST_IP=${HOST_IP} - export LLM_MODEL=${LLM_MODEL} - export HF_ENDPOINT=${HF_ENDPOINT} - export vLLM_ENDPOINT=${vLLM_ENDPOINT} - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export no_proxy="localhost, 127.0.0.1, 192.168.1.1" - cd $WORKPATH/docker_compose/intel/gpu/arc + source set_env.sh # Start Docker Containers docker compose -f $COMPOSE_FILE up -d > ${LOG_PATH}/start_services_with_compose.log From 09d93ecce65b8817fd8a6faa2adcfb4ded45e465 Mon Sep 17 00:00:00 2001 From: "chen, suyue" Date: Mon, 19 May 2025 09:31:56 +0800 Subject: [PATCH 060/217] [CICD enhance] CodeGen run CI with latest base image, group logs in GHA outputs. (#1928) Signed-off-by: chensuyue --- CodeGen/Dockerfile | 3 +- CodeGen/docker_image_build/build.yaml | 3 ++ CodeGen/tests/test_compose_on_gaudi.sh | 30 ++++++++++-------- CodeGen/tests/test_compose_on_rocm.sh | 35 +++++++++++++-------- CodeGen/tests/test_compose_on_xeon.sh | 27 +++++++++------- CodeGen/tests/test_compose_vllm_on_rocm.sh | 36 ++++++++++++++-------- 6 files changed, 86 insertions(+), 48 deletions(-) diff --git a/CodeGen/Dockerfile b/CodeGen/Dockerfile index 5305a9d89f..74d8bd98f1 100644 --- a/CodeGen/Dockerfile +++ b/CodeGen/Dockerfile @@ -1,8 +1,9 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 +ARG IMAGE_REPO=opea ARG BASE_TAG=latest -FROM opea/comps-base:$BASE_TAG +FROM $IMAGE_REPO/comps-base:$BASE_TAG COPY ./codegen.py $HOME/codegen.py diff --git a/CodeGen/docker_image_build/build.yaml b/CodeGen/docker_image_build/build.yaml index 38b28ef80b..282c29766c 100644 --- a/CodeGen/docker_image_build/build.yaml +++ b/CodeGen/docker_image_build/build.yaml @@ -5,6 +5,8 @@ services: codegen: build: args: + IMAGE_REPO: ${REGISTRY} + BASE_TAG: ${TAG} http_proxy: ${http_proxy} https_proxy: ${https_proxy} no_proxy: ${no_proxy} @@ -39,6 +41,7 @@ services: build: context: GenAIComps dockerfile: comps/third_parties/vllm/src/Dockerfile.amd_gpu + extends: codegen image: ${REGISTRY:-opea}/vllm-rocm:${TAG:-latest} vllm: build: diff --git a/CodeGen/tests/test_compose_on_gaudi.sh b/CodeGen/tests/test_compose_on_gaudi.sh index 58eb0888db..413ce53808 100644 --- a/CodeGen/tests/test_compose_on_gaudi.sh +++ b/CodeGen/tests/test_compose_on_gaudi.sh @@ -27,19 +27,13 @@ export no_proxy=${no_proxy},${ip_address} function build_docker_images() { opea_branch=${opea_branch:-"main"} - # If the opea_branch isn't main, replace the git clone branch in Dockerfile. - if [[ "${opea_branch}" != "main" ]]; then - cd $WORKPATH - OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git" - NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git" - find . -type f -name "Dockerfile*" | while read -r file; do - echo "Processing file: $file" - sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file" - done - fi cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s # Download Gaudi vllm of latest tag git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork @@ -250,24 +244,36 @@ function main() { stop_docker "${docker_compose_profiles[${i}]}" done - # build docker images + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" # loop all profiles for ((i = 0; i < len_profiles; i++)); do echo "Process [${i}]: ${docker_compose_profiles[$i]}, ${docker_llm_container_names[${i}]}" + + echo "::group::start_services" start_services "${docker_compose_profiles[${i}]}" "${docker_llm_container_names[${i}]}" + echo "::endgroup::" docker ps -a + echo "::group::validate_microservices" validate_microservices "${docker_llm_container_names[${i}]}" + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice + echo "::endgroup::" + + echo "::group::validate_gradio" validate_gradio + echo "::endgroup::" stop_docker "${docker_compose_profiles[${i}]}" sleep 5s done - echo y | docker system prune + docker system prune -f } main diff --git a/CodeGen/tests/test_compose_on_rocm.sh b/CodeGen/tests/test_compose_on_rocm.sh index d1a6f57309..94f006e358 100644 --- a/CodeGen/tests/test_compose_on_rocm.sh +++ b/CodeGen/tests/test_compose_on_rocm.sh @@ -17,19 +17,13 @@ ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { opea_branch=${opea_branch:-"main"} - # If the opea_branch isn't main, replace the git clone branch in Dockerfile. - if [[ "${opea_branch}" != "main" ]]; then - cd $WORKPATH - OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git" - NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git" - find . -type f -name "Dockerfile*" | while read -r file; do - echo "Processing file: $file" - sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file" - done - fi cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="codegen codegen-ui llm-textgen" @@ -164,18 +158,35 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services + echo "::endgroup::" + echo "::group::validate_microservices" validate_microservices + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice + echo "::endgroup::" + + echo "::group::validate_frontend" validate_frontend + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune - cd $WORKPATH + echo "::endgroup::" + + docker system prune -f } diff --git a/CodeGen/tests/test_compose_on_xeon.sh b/CodeGen/tests/test_compose_on_xeon.sh index 249f735c8a..4aaa180ec3 100644 --- a/CodeGen/tests/test_compose_on_xeon.sh +++ b/CodeGen/tests/test_compose_on_xeon.sh @@ -27,19 +27,13 @@ export no_proxy=${no_proxy},${ip_address} function build_docker_images() { opea_branch=${opea_branch:-"main"} - # If the opea_branch isn't main, replace the git clone branch in Dockerfile. - if [[ "${opea_branch}" != "main" ]]; then - cd $WORKPATH - OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git" - NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git" - find . -type f -name "Dockerfile*" | while read -r file; do - echo "Processing file: $file" - sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file" - done - fi cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s git clone https://github.com/vllm-project/vllm.git && cd vllm VLLM_VER="v0.8.3" @@ -256,17 +250,28 @@ function main() { for ((i = 0; i < len_profiles; i++)); do echo "Process [${i}]: ${docker_compose_profiles[$i]}, ${docker_llm_container_names[${i}]}" docker ps -a + + echo "::group::start_services" start_services "${docker_compose_profiles[${i}]}" "${docker_llm_container_names[${i}]}" + echo "::endgroup::" + echo "::group::validate_microservices" validate_microservices "${docker_llm_container_names[${i}]}" + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice + echo "::endgroup::" + + echo "::group::validate_gradio" validate_gradio + echo "::endgroup::" stop_docker "${docker_compose_profiles[${i}]}" sleep 5s done - echo y | docker system prune + docker system prune -f } main diff --git a/CodeGen/tests/test_compose_vllm_on_rocm.sh b/CodeGen/tests/test_compose_vllm_on_rocm.sh index bb75bdafa8..1d78f2a0d7 100644 --- a/CodeGen/tests/test_compose_vllm_on_rocm.sh +++ b/CodeGen/tests/test_compose_vllm_on_rocm.sh @@ -17,19 +17,13 @@ ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { opea_branch=${opea_branch:-"main"} - # If the opea_branch isn't main, replace the git clone branch in Dockerfile. - if [[ "${opea_branch}" != "main" ]]; then - cd $WORKPATH - OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git" - NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git" - find . -type f -name "Dockerfile*" | while read -r file; do - echo "Processing file: $file" - sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file" - done - fi cd $WORKPATH/docker_image_build git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="vllm-rocm llm-textgen codegen codegen-ui" @@ -164,17 +158,35 @@ function stop_docker() { function main() { + echo "::group::stop_docker" stop_docker + echo "::endgroup::" + + echo "::group::build_docker_images" if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + echo "::endgroup::" + + echo "::group::start_services" start_services + echo "::endgroup::" + echo "::group::validate_microservices" validate_microservices + echo "::endgroup::" + + echo "::group::validate_megaservice" validate_megaservice + echo "::endgroup::" + + echo "::group::validate_frontend" validate_frontend + echo "::endgroup::" + echo "::group::stop_docker" stop_docker - echo y | docker system prune - cd $WORKPATH + echo "::endgroup::" + + docker system prune -f } From 8a9f3f43510f23fb8cb1f666b9e1ec39a6d07d5e Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Tue, 20 May 2025 10:05:00 +0800 Subject: [PATCH 061/217] Organize set_env.sh paths and update README.md (#1920) Signed-off-by: ZePan110 Co-authored-by: chen, suyue Co-authored-by: Ying Hu --- .github/workflows/pr-link-path-scan.yml | 6 ++++-- CodeTrans/docker_compose/intel/cpu/xeon/README.md | 2 +- CodeTrans/docker_compose/intel/hpu/gaudi/README.md | 2 +- CodeTrans/docker_compose/{ => intel}/set_env.sh | 2 +- CodeTrans/tests/test_compose_on_gaudi.sh | 4 ++-- CodeTrans/tests/test_compose_on_xeon.sh | 4 ++-- CodeTrans/tests/test_compose_tgi_on_gaudi.sh | 4 ++-- CodeTrans/tests/test_compose_tgi_on_xeon.sh | 4 ++-- DocSum/docker_compose/intel/cpu/xeon/README.md | 5 +++-- DocSum/docker_compose/intel/hpu/gaudi/README.md | 5 +++-- DocSum/docker_compose/{ => intel}/set_env.sh | 0 DocSum/tests/test_compose_on_gaudi.sh | 2 +- DocSum/tests/test_compose_on_xeon.sh | 2 +- DocSum/tests/test_compose_tgi_on_gaudi.sh | 2 +- DocSum/tests/test_compose_tgi_on_xeon.sh | 2 +- Translation/docker_compose/intel/cpu/xeon/README.md | 4 ++-- Translation/docker_compose/intel/hpu/gaudi/README.md | 4 ++-- Translation/docker_compose/{ => intel}/set_env.sh | 2 +- Translation/tests/test_compose_on_gaudi.sh | 4 ++-- Translation/tests/test_compose_on_xeon.sh | 4 ++-- 20 files changed, 34 insertions(+), 30 deletions(-) rename CodeTrans/docker_compose/{ => intel}/set_env.sh (95%) rename DocSum/docker_compose/{ => intel}/set_env.sh (100%) rename Translation/docker_compose/{ => intel}/set_env.sh (95%) diff --git a/.github/workflows/pr-link-path-scan.yml b/.github/workflows/pr-link-path-scan.yml index 30040bc8b0..3b147af241 100644 --- a/.github/workflows/pr-link-path-scan.yml +++ b/.github/workflows/pr-link-path-scan.yml @@ -80,6 +80,7 @@ jobs: - name: Checking Relative Path Validity run: | cd ${{github.workspace}} + delay=15 fail="FALSE" repo_name=${{ github.event.pull_request.head.repo.full_name }} branch="https://github.com/$repo_name/blob/${{ github.event.pull_request.head.ref }}" @@ -111,14 +112,15 @@ jobs: if [[ "$png_line" == *#* ]]; then if [ -n "changed_files" ] && echo "$changed_files" | grep -q "^${refer_path}$"; then url_dev=$branch$(echo "$real_path" | sed 's|.*/GenAIExamples||')$png_path + sleep $delay response=$(curl -I -L -s -o /dev/null -w "%{http_code}" "$url_dev") if [ "$response" -ne 200 ]; then - echo "**********Validation failed, try again**********" + echo "**********Validation failed ($response), try again**********" response_retry=$(curl -s -o /dev/null -w "%{http_code}" "$url_dev") if [ "$response_retry" -eq 200 ]; then echo "*****Retry successfully*****" else - echo "Invalid path from ${{github.workspace}}/$refer_path: $png_path" + echo "Invalid path ($response_retry) from ${{github.workspace}}/$refer_path: $png_path" fail="TRUE" fi else diff --git a/CodeTrans/docker_compose/intel/cpu/xeon/README.md b/CodeTrans/docker_compose/intel/cpu/xeon/README.md index 2e2cb44c0d..a4061fd42c 100755 --- a/CodeTrans/docker_compose/intel/cpu/xeon/README.md +++ b/CodeTrans/docker_compose/intel/cpu/xeon/README.md @@ -46,7 +46,7 @@ export http_proxy="Your_HTTP_Proxy" # http proxy if any export https_proxy="Your_HTTPs_Proxy" # https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip # additional no proxies if needed export NGINX_PORT=${your_nginx_port} # your usable port for nginx, 80 for example -source ./set_env.sh +source docker_compose/intel/set_env.sh ``` Consult the section on [CodeTrans Service configuration](#codetrans-configuration) for information on how service specific configuration parameters affect deployments. diff --git a/CodeTrans/docker_compose/intel/hpu/gaudi/README.md b/CodeTrans/docker_compose/intel/hpu/gaudi/README.md index d43f12bcfb..9d6bc8ae44 100755 --- a/CodeTrans/docker_compose/intel/hpu/gaudi/README.md +++ b/CodeTrans/docker_compose/intel/hpu/gaudi/README.md @@ -46,7 +46,7 @@ export http_proxy="Your_HTTP_Proxy" # http proxy if any export https_proxy="Your_HTTPs_Proxy" # https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip # additional no proxies if needed export NGINX_PORT=${your_nginx_port} # your usable port for nginx, 80 for example -source ./set_env.sh +source docker_compose/intel/set_env.sh ``` Consult the section on [CodeTrans Service configuration](#codetrans-configuration) for information on how service specific configuration parameters affect deployments. diff --git a/CodeTrans/docker_compose/set_env.sh b/CodeTrans/docker_compose/intel/set_env.sh similarity index 95% rename from CodeTrans/docker_compose/set_env.sh rename to CodeTrans/docker_compose/intel/set_env.sh index d24bc1c20b..075b62e89f 100644 --- a/CodeTrans/docker_compose/set_env.sh +++ b/CodeTrans/docker_compose/intel/set_env.sh @@ -2,7 +2,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -pushd "../../" > /dev/null +pushd "../../../" > /dev/null source .set_env.sh popd > /dev/null diff --git a/CodeTrans/tests/test_compose_on_gaudi.sh b/CodeTrans/tests/test_compose_on_gaudi.sh index 7b0baa6602..600c20a0c3 100644 --- a/CodeTrans/tests/test_compose_on_gaudi.sh +++ b/CodeTrans/tests/test_compose_on_gaudi.sh @@ -37,12 +37,12 @@ function build_docker_images() { } function start_services() { - cd $WORKPATH/docker_compose + cd $WORKPATH/docker_compose/intel export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export NGINX_PORT=80 export host_ip=${ip_address} source set_env.sh - cd intel/hpu/gaudi + cd hpu/gaudi sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/CodeTrans/tests/test_compose_on_xeon.sh b/CodeTrans/tests/test_compose_on_xeon.sh index 54ae5ee0ca..42f80469e0 100644 --- a/CodeTrans/tests/test_compose_on_xeon.sh +++ b/CodeTrans/tests/test_compose_on_xeon.sh @@ -39,13 +39,13 @@ function build_docker_images() { } function start_services() { - cd $WORKPATH/docker_compose + cd $WORKPATH/docker_compose/intel export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export NGINX_PORT=80 export host_ip=${ip_address} source set_env.sh - cd intel/cpu/xeon/ + cd cpu/xeon/ sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/CodeTrans/tests/test_compose_tgi_on_gaudi.sh b/CodeTrans/tests/test_compose_tgi_on_gaudi.sh index 5914dc29ce..051afce9d4 100644 --- a/CodeTrans/tests/test_compose_tgi_on_gaudi.sh +++ b/CodeTrans/tests/test_compose_tgi_on_gaudi.sh @@ -34,13 +34,13 @@ function build_docker_images() { } function start_services() { - cd $WORKPATH/docker_compose + cd $WORKPATH/docker_compose/intel export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export NGINX_PORT=80 export host_ip=${ip_address} source set_env.sh - cd intel/hpu/gaudi/ + cd hpu/gaudi/ sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/CodeTrans/tests/test_compose_tgi_on_xeon.sh b/CodeTrans/tests/test_compose_tgi_on_xeon.sh index 99a4f6a7d0..00da9bde73 100644 --- a/CodeTrans/tests/test_compose_tgi_on_xeon.sh +++ b/CodeTrans/tests/test_compose_tgi_on_xeon.sh @@ -34,13 +34,13 @@ function build_docker_images() { } function start_services() { - cd $WORKPATH/docker_compose + cd $WORKPATH/docker_compose/intel export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export NGINX_PORT=80 export host_ip=${ip_address} source set_env.sh - cd intel/cpu/xeon/ + cd cpu/xeon/ sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/DocSum/docker_compose/intel/cpu/xeon/README.md b/DocSum/docker_compose/intel/cpu/xeon/README.md index b06d6007e8..f62a62c1e8 100644 --- a/DocSum/docker_compose/intel/cpu/xeon/README.md +++ b/DocSum/docker_compose/intel/cpu/xeon/README.md @@ -27,9 +27,9 @@ Clone the GenAIExample repository and access the ChatQnA Intel Xeon platform Doc ```bash git clone https://github.com/opea-project/GenAIExamples.git -cd GenAIExamples/DocSum/docker_compose +cd GenAIExamples/DocSum/docker_compose/intel source set_env.sh -cd intel/cpu/xeon/ +cd cpu/xeon/ ``` NOTE: by default vLLM does "warmup" at start, to optimize its performance for the specified model and the underlying platform, which can take long time. For development (and e.g. autoscaling) it can be skipped with `export VLLM_SKIP_WARMUP=true`. @@ -49,6 +49,7 @@ Some HuggingFace resources, such as some models, are only accessible if you have To deploy the DocSum services, execute the `docker compose up` command with the appropriate arguments. For a default deployment, execute: ```bash +cd cpu/xeon/ docker compose up -d ``` diff --git a/DocSum/docker_compose/intel/hpu/gaudi/README.md b/DocSum/docker_compose/intel/hpu/gaudi/README.md index 2edd6934fe..98a0b132ac 100644 --- a/DocSum/docker_compose/intel/hpu/gaudi/README.md +++ b/DocSum/docker_compose/intel/hpu/gaudi/README.md @@ -29,9 +29,9 @@ Clone the GenAIExample repository and access the DocSum Intel® Gaudi® platform ```bash git clone https://github.com/opea-project/GenAIExamples.git -cd GenAIExamples/DocSum/docker_compose +cd GenAIExamples/DocSum/docker_compose/intel source set_env.sh -cd intel/hpu/gaudi/ +cd hpu/gaudi/ ``` NOTE: by default vLLM does "warmup" at start, to optimize its performance for the specified model and the underlying platform, which can take long time. For development (and e.g. autoscaling) it can be skipped with `export VLLM_SKIP_WARMUP=true`. @@ -51,6 +51,7 @@ Some HuggingFace resources, such as some models, are only accessible if you have To deploy the DocSum services, execute the `docker compose up` command with the appropriate arguments. For a default deployment, execute: ```bash +cd hpu/gaudi/ docker compose up -d ``` diff --git a/DocSum/docker_compose/set_env.sh b/DocSum/docker_compose/intel/set_env.sh similarity index 100% rename from DocSum/docker_compose/set_env.sh rename to DocSum/docker_compose/intel/set_env.sh diff --git a/DocSum/tests/test_compose_on_gaudi.sh b/DocSum/tests/test_compose_on_gaudi.sh index 64d3063872..a75a16c5d4 100644 --- a/DocSum/tests/test_compose_on_gaudi.sh +++ b/DocSum/tests/test_compose_on_gaudi.sh @@ -16,7 +16,7 @@ echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" echo "TAG=IMAGE_TAG=${IMAGE_TAG}" export REGISTRY=${IMAGE_REPO} export TAG=${IMAGE_TAG} -source $WORKPATH/docker_compose/set_env.sh +source $WORKPATH/docker_compose/intel/set_env.sh export MODEL_CACHE=${model_cache:-"./data"} diff --git a/DocSum/tests/test_compose_on_xeon.sh b/DocSum/tests/test_compose_on_xeon.sh index c231e7264e..ee2ed30251 100644 --- a/DocSum/tests/test_compose_on_xeon.sh +++ b/DocSum/tests/test_compose_on_xeon.sh @@ -17,7 +17,7 @@ echo "TAG=IMAGE_TAG=${IMAGE_TAG}" export REGISTRY=${IMAGE_REPO} export TAG=${IMAGE_TAG} -source $WORKPATH/docker_compose/set_env.sh +source $WORKPATH/docker_compose/intel/set_env.sh export MODEL_CACHE=${model_cache:-"./data"} export MAX_INPUT_TOKENS=2048 diff --git a/DocSum/tests/test_compose_tgi_on_gaudi.sh b/DocSum/tests/test_compose_tgi_on_gaudi.sh index 06dd9b7292..301a6f2342 100644 --- a/DocSum/tests/test_compose_tgi_on_gaudi.sh +++ b/DocSum/tests/test_compose_tgi_on_gaudi.sh @@ -16,7 +16,7 @@ echo "TAG=IMAGE_TAG=${IMAGE_TAG}" export REGISTRY=${IMAGE_REPO} export TAG=${IMAGE_TAG} -source $WORKPATH/docker_compose/set_env.sh +source $WORKPATH/docker_compose/intel/set_env.sh export MODEL_CACHE=${model_cache:-"./data"} export MAX_INPUT_TOKENS=2048 diff --git a/DocSum/tests/test_compose_tgi_on_xeon.sh b/DocSum/tests/test_compose_tgi_on_xeon.sh index 52edea31f8..4ac895d7a0 100644 --- a/DocSum/tests/test_compose_tgi_on_xeon.sh +++ b/DocSum/tests/test_compose_tgi_on_xeon.sh @@ -16,7 +16,7 @@ echo "TAG=IMAGE_TAG=${IMAGE_TAG}" export REGISTRY=${IMAGE_REPO} export TAG=${IMAGE_TAG} -source $WORKPATH/docker_compose/set_env.sh +source $WORKPATH/docker_compose/intel/set_env.sh export MODEL_CACHE=${model_cache:-"./data"} export MAX_INPUT_TOKENS=2048 diff --git a/Translation/docker_compose/intel/cpu/xeon/README.md b/Translation/docker_compose/intel/cpu/xeon/README.md index 1af360be83..095ca54c38 100644 --- a/Translation/docker_compose/intel/cpu/xeon/README.md +++ b/Translation/docker_compose/intel/cpu/xeon/README.md @@ -42,9 +42,9 @@ Some HuggingFace resources, such as some models, are only accessible if you have To set up environment variables for deploying Translation service, source the set_env.sh script in this directory: ``` -cd ../../../ +cd ../../ source set_env.sh -cd intel/cpu/xeon +cd cpu/xeon ``` The set_env.sh script will prompt for required and optional environment variables used to configure the Translation service. If a value is not entered, the script will use a default value for the same. It will also generate a env file defining the desired configuration. Consult the section on [Translation Service configuration](#translation-service-configuration) for information on how service specific configuration parameters affect deployments. diff --git a/Translation/docker_compose/intel/hpu/gaudi/README.md b/Translation/docker_compose/intel/hpu/gaudi/README.md index 005504a1a3..097cb42c81 100644 --- a/Translation/docker_compose/intel/hpu/gaudi/README.md +++ b/Translation/docker_compose/intel/hpu/gaudi/README.md @@ -42,9 +42,9 @@ Some HuggingFace resources, such as some models, are only accessible if you have To set up environment variables for deploying Translation service, source the _set_env.sh_ script in this directory: ``` -cd ../../../ +cd ../../ source set_env.sh -cd intel/hpu/gaudi/ +cd hpu/gaudi/ ``` The set_env.sh script will prompt for required and optional environment variables used to configure the Translation service. If a value is not entered, the script will use a default value for the same. It will also generate a env file defining the desired configuration. Consult the section on [Translation Service configuration](#translation-service-configuration) for information on how service specific configuration parameters affect deployments. diff --git a/Translation/docker_compose/set_env.sh b/Translation/docker_compose/intel/set_env.sh similarity index 95% rename from Translation/docker_compose/set_env.sh rename to Translation/docker_compose/intel/set_env.sh index 25edfd4022..37762fbd50 100644 --- a/Translation/docker_compose/set_env.sh +++ b/Translation/docker_compose/intel/set_env.sh @@ -2,7 +2,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -pushd "../../" > /dev/null +pushd "../../../" > /dev/null source .set_env.sh popd > /dev/null diff --git a/Translation/tests/test_compose_on_gaudi.sh b/Translation/tests/test_compose_on_gaudi.sh index 2f39c36282..afd33981a0 100644 --- a/Translation/tests/test_compose_on_gaudi.sh +++ b/Translation/tests/test_compose_on_gaudi.sh @@ -40,10 +40,10 @@ function build_docker_images() { } function start_services() { - cd $WORKPATH/docker_compose + cd $WORKPATH/docker_compose/intel export host_ip=${ip_address} source set_env.sh - cd intel/hpu/gaudi + cd hpu/gaudi sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/Translation/tests/test_compose_on_xeon.sh b/Translation/tests/test_compose_on_xeon.sh index 6b06eacdba..7eeec8c7a0 100644 --- a/Translation/tests/test_compose_on_xeon.sh +++ b/Translation/tests/test_compose_on_xeon.sh @@ -40,10 +40,10 @@ function build_docker_images() { } function start_services() { - cd $WORKPATH/docker_compose + cd $WORKPATH/docker_compose/intel export host_ip=${ip_address} source set_env.sh - cd intel/cpu/xeon + cd cpu/xeon sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env From 581e954a8d6db533cc47bda49844bd6b884447d2 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Tue, 20 May 2025 13:42:18 +0800 Subject: [PATCH 062/217] Integrate ChatQnA set_env to ut scripts and add README.md for UT scripts. (#1971) Signed-off-by: ZePan110 --- .../docker_compose/amd/gpu/rocm/compose.yaml | 2 +- .../amd/gpu/rocm/compose_faqgen.yaml | 2 +- .../amd/gpu/rocm/compose_faqgen_vllm.yaml | 2 +- .../amd/gpu/rocm/compose_vllm.yaml | 2 +- .../docker_compose/amd/gpu/rocm/set_env.sh | 10 +- .../amd/gpu/rocm/set_env_faqgen.sh | 10 +- .../amd/gpu/rocm/set_env_faqgen_vllm.sh | 10 +- .../amd/gpu/rocm/set_env_vllm.sh | 10 +- .../dashboards/download_opea_dashboard.sh | 4 +- .../docker_compose/intel/cpu/xeon/set_env.sh | 3 + .../docker_compose/intel/hpu/gaudi/README.md | 2 +- .../docker_compose/intel/hpu/gaudi/set_env.sh | 58 +++++---- .../intel/hpu/gaudi/set_env_faqgen.sh | 32 +++++ ChatQnA/tests/README.md | 123 ++++++++++++++++++ ChatQnA/tests/test_compose_faqgen_on_gaudi.sh | 22 +--- ChatQnA/tests/test_compose_faqgen_on_rocm.sh | 39 +----- ChatQnA/tests/test_compose_faqgen_on_xeon.sh | 12 +- .../tests/test_compose_faqgen_tgi_on_gaudi.sh | 19 +-- .../tests/test_compose_faqgen_tgi_on_xeon.sh | 11 +- .../tests/test_compose_faqgen_vllm_on_rocm.sh | 36 +---- .../tests/test_compose_guardrails_on_gaudi.sh | 10 +- ChatQnA/tests/test_compose_mariadb_on_xeon.sh | 10 +- ChatQnA/tests/test_compose_milvus_on_xeon.sh | 7 +- ChatQnA/tests/test_compose_on_gaudi.sh | 14 +- ChatQnA/tests/test_compose_on_rocm.sh | 36 +---- ChatQnA/tests/test_compose_on_xeon.sh | 12 +- .../tests/test_compose_pinecone_on_xeon.sh | 7 +- ChatQnA/tests/test_compose_qdrant_on_xeon.sh | 7 +- ChatQnA/tests/test_compose_tgi_on_gaudi.sh | 15 +-- ChatQnA/tests/test_compose_tgi_on_xeon.sh | 11 +- ChatQnA/tests/test_compose_vllm_on_rocm.sh | 39 +----- .../test_compose_without_rerank_on_gaudi.sh | 9 +- .../test_compose_without_rerank_on_xeon.sh | 7 +- 33 files changed, 259 insertions(+), 334 deletions(-) create mode 100755 ChatQnA/docker_compose/intel/hpu/gaudi/set_env_faqgen.sh create mode 100644 ChatQnA/tests/README.md diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/compose.yaml b/ChatQnA/docker_compose/amd/gpu/rocm/compose.yaml index a71fcc830a..14f2eb3312 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/compose.yaml +++ b/ChatQnA/docker_compose/amd/gpu/rocm/compose.yaml @@ -16,7 +16,7 @@ services: - chatqna-redis-vector-db - chatqna-tei-embedding-service ports: - - "${CHATQNA_REDIS_DATAPREP_PORT}:5000" + - "${CHATQNA_REDIS_DATAPREP_PORT:-18103}:5000" environment: no_proxy: ${no_proxy} http_proxy: ${http_proxy} diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/compose_faqgen.yaml b/ChatQnA/docker_compose/amd/gpu/rocm/compose_faqgen.yaml index 161bb4589f..df2a9a42a3 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/compose_faqgen.yaml +++ b/ChatQnA/docker_compose/amd/gpu/rocm/compose_faqgen.yaml @@ -16,7 +16,7 @@ services: - chatqna-redis-vector-db - chatqna-tei-embedding-service ports: - - "${CHATQNA_REDIS_DATAPREP_PORT}:5000" + - "${CHATQNA_REDIS_DATAPREP_PORT:-18103}:5000" environment: no_proxy: ${no_proxy} http_proxy: ${http_proxy} diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/compose_faqgen_vllm.yaml b/ChatQnA/docker_compose/amd/gpu/rocm/compose_faqgen_vllm.yaml index b89b367e29..fa1f70d74f 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/compose_faqgen_vllm.yaml +++ b/ChatQnA/docker_compose/amd/gpu/rocm/compose_faqgen_vllm.yaml @@ -16,7 +16,7 @@ services: - chatqna-redis-vector-db - chatqna-tei-embedding-service ports: - - "${CHATQNA_REDIS_DATAPREP_PORT}:5000" + - "${CHATQNA_REDIS_DATAPREP_PORT:-18103}:5000" environment: no_proxy: ${no_proxy} http_proxy: ${http_proxy} diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml b/ChatQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml index d95ec39e92..0dbbfab0d5 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml +++ b/ChatQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml @@ -16,7 +16,7 @@ services: - chatqna-redis-vector-db - chatqna-tei-embedding-service ports: - - "${CHATQNA_REDIS_DATAPREP_PORT:-5000}:5000" + - "${CHATQNA_REDIS_DATAPREP_PORT:-18103}:5000" environment: no_proxy: ${no_proxy} http_proxy: ${http_proxy} diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/set_env.sh b/ChatQnA/docker_compose/amd/gpu/rocm/set_env.sh index 5691d8fa48..5fcdad0a06 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/set_env.sh +++ b/ChatQnA/docker_compose/amd/gpu/rocm/set_env.sh @@ -2,17 +2,17 @@ # Copyright (C) 2025 Advanced Micro Devices, Inc. -export HOST_IP='' -export HOST_IP_EXTERNAL='' +export HOST_IP=${ip_address} +export HOST_IP_EXTERNAL=${ip_address} export CHATQNA_EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" export CHATQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export CHATQNA_LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" export CHATQNA_RERANK_MODEL_ID="BAAI/bge-reranker-base" -export CHATQNA_BACKEND_SERVICE_PORT=18102 -export CHATQNA_FRONTEND_SERVICE_PORT=18101 -export CHATQNA_NGINX_PORT=18104 +export CHATQNA_BACKEND_SERVICE_PORT=8888 +export CHATQNA_FRONTEND_SERVICE_PORT=5173 +export CHATQNA_NGINX_PORT=80 export CHATQNA_REDIS_DATAPREP_PORT=18103 export CHATQNA_REDIS_RETRIEVER_PORT=7000 export CHATQNA_REDIS_VECTOR_INSIGHT_PORT=8001 diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/set_env_faqgen.sh b/ChatQnA/docker_compose/amd/gpu/rocm/set_env_faqgen.sh index 6361f5a9fd..543119eadc 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/set_env_faqgen.sh +++ b/ChatQnA/docker_compose/amd/gpu/rocm/set_env_faqgen.sh @@ -2,18 +2,18 @@ # Copyright (C) 2025 Advanced Micro Devices, Inc. -export HOST_IP='' -export HOST_IP_EXTERNAL='' +export HOST_IP=${ip_address} +export HOST_IP_EXTERNAL=${ip_address} export CHATQNA_EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" export CHATQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export CHATQNA_LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" export CHATQNA_RERANK_MODEL_ID="BAAI/bge-reranker-base" -export CHATQNA_BACKEND_SERVICE_PORT=18102 -export CHATQNA_FRONTEND_SERVICE_PORT=18101 +export CHATQNA_BACKEND_SERVICE_PORT=8888 +export CHATQNA_FRONTEND_SERVICE_PORT=5173 export CHATQNA_LLM_FAQGEN_PORT=18011 -export CHATQNA_NGINX_PORT=18104 +export CHATQNA_NGINX_PORT=80 export CHATQNA_REDIS_DATAPREP_PORT=18103 export CHATQNA_REDIS_RETRIEVER_PORT=7000 export CHATQNA_REDIS_VECTOR_INSIGHT_PORT=8001 diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/set_env_faqgen_vllm.sh b/ChatQnA/docker_compose/amd/gpu/rocm/set_env_faqgen_vllm.sh index 20dd880b2d..d2462d2646 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/set_env_faqgen_vllm.sh +++ b/ChatQnA/docker_compose/amd/gpu/rocm/set_env_faqgen_vllm.sh @@ -2,18 +2,18 @@ # Copyright (C) 2025 Advanced Micro Devices, Inc. -export HOST_IP='' -export HOST_IP_EXTERNAL='' +export HOST_IP=${ip_address} +export HOST_IP_EXTERNAL=${ip_address} export CHATQNA_EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" export CHATQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export CHATQNA_LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" export CHATQNA_RERANK_MODEL_ID="BAAI/bge-reranker-base" -export CHATQNA_BACKEND_SERVICE_PORT=18102 -export CHATQNA_FRONTEND_SERVICE_PORT=18101 +export CHATQNA_BACKEND_SERVICE_PORT=8888 +export CHATQNA_FRONTEND_SERVICE_PORT=5173 export CHATQNA_LLM_FAQGEN_PORT=18011 -export CHATQNA_NGINX_PORT=18104 +export CHATQNA_NGINX_PORT=80 export CHATQNA_REDIS_DATAPREP_PORT=18103 export CHATQNA_REDIS_RETRIEVER_PORT=7000 export CHATQNA_REDIS_VECTOR_INSIGHT_PORT=8001 diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh b/ChatQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh index 2d1c3920fd..0000b233e1 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh +++ b/ChatQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh @@ -2,17 +2,17 @@ # Copyright (C) 2025 Advanced Micro Devices, Inc. -export HOST_IP='' -export HOST_IP_EXTERNAL='' +export HOST_IP=${ip_address} +export HOST_IP_EXTERNAL=${ip_address} export CHATQNA_EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" export CHATQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export CHATQNA_LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" export CHATQNA_RERANK_MODEL_ID="BAAI/bge-reranker-base" -export CHATQNA_BACKEND_SERVICE_PORT=18102 -export CHATQNA_FRONTEND_SERVICE_PORT=18101 -export CHATQNA_NGINX_PORT=18104 +export CHATQNA_BACKEND_SERVICE_PORT=8888 +export CHATQNA_FRONTEND_SERVICE_PORT=5173 +export CHATQNA_NGINX_PORT=80 export CHATQNA_REDIS_DATAPREP_PORT=18103 export CHATQNA_REDIS_RETRIEVER_PORT=7000 export CHATQNA_REDIS_VECTOR_INSIGHT_PORT=8001 diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/grafana/dashboards/download_opea_dashboard.sh b/ChatQnA/docker_compose/intel/cpu/xeon/grafana/dashboards/download_opea_dashboard.sh index 079cabf6a4..c3739ec705 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/grafana/dashboards/download_opea_dashboard.sh +++ b/ChatQnA/docker_compose/intel/cpu/xeon/grafana/dashboards/download_opea_dashboard.sh @@ -1,6 +1,8 @@ # Copyright (C) 2025 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -rm *.json +if ls *.json 1> /dev/null 2>&1; then + rm *.json +fi wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/chatqna_megaservice_grafana.json wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/qdrant_grafana.json wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/milvus_grafana.json diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/set_env.sh b/ChatQnA/docker_compose/intel/cpu/xeon/set_env.sh index 711e27e058..2959f94321 100755 --- a/ChatQnA/docker_compose/intel/cpu/xeon/set_env.sh +++ b/ChatQnA/docker_compose/intel/cpu/xeon/set_env.sh @@ -7,6 +7,9 @@ pushd "../../../../../" > /dev/null source .set_env.sh popd > /dev/null +export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} +export host_ip=${ip_address} export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" export RERANK_MODEL_ID="BAAI/bge-reranker-base" export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/README.md b/ChatQnA/docker_compose/intel/hpu/gaudi/README.md index 6dea162563..4de795be43 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/README.md +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/README.md @@ -43,7 +43,7 @@ Some HuggingFace resources, such as some models, are only accessible if you have ### Configure the Deployment Environment -To set up environment variables for deploying ChatQnA services, source the _setup_env.sh_ script in this directory: +To set up environment variables for deploying ChatQnA services, source the _setup_env.sh_ script in this directory (If using faqgen or guardrails, source the _set_env_faqgen.sh_): ``` source ./set_env.sh diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/set_env.sh b/ChatQnA/docker_compose/intel/hpu/gaudi/set_env.sh index 0b55559f5e..fe847e6036 100755 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/set_env.sh +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/set_env.sh @@ -4,12 +4,20 @@ # SPDX-License-Identifier: Apache-2.0 # Function to prompt for input and set environment variables +NON_INTERACTIVE=${NON_INTERACTIVE:-false} + prompt_for_env_var() { local var_name="$1" local prompt_message="$2" local default_value="$3" local mandatory="$4" + if [[ "$NON_INTERACTIVE" == "true" ]]; then + echo "Non-interactive environment detected. Setting $var_name to default: $default_value" + export "$var_name"="$default_value" + return + fi + if [[ "$mandatory" == "true" ]]; then while [[ -z "$value" ]]; do read -p "$prompt_message [default: \"${default_value}\"]: " value @@ -34,7 +42,7 @@ popd > /dev/null # Prompt the user for each required environment variable prompt_for_env_var "EMBEDDING_MODEL_ID" "Enter the EMBEDDING_MODEL_ID" "BAAI/bge-base-en-v1.5" false -prompt_for_env_var "HUGGINGFACEHUB_API_TOKEN" "Enter the HUGGINGFACEHUB_API_TOKEN" "" true +prompt_for_env_var "HUGGINGFACEHUB_API_TOKEN" "Enter the HUGGINGFACEHUB_API_TOKEN" "${HF_TOKEN}" true prompt_for_env_var "RERANK_MODEL_ID" "Enter the RERANK_MODEL_ID" "BAAI/bge-reranker-base" false prompt_for_env_var "LLM_MODEL_ID" "Enter the LLM_MODEL_ID" "meta-llama/Meta-Llama-3-8B-Instruct" false prompt_for_env_var "INDEX_NAME" "Enter the INDEX_NAME" "rag-redis" false @@ -42,34 +50,40 @@ prompt_for_env_var "NUM_CARDS" "Enter the number of Gaudi devices" "1" false prompt_for_env_var "host_ip" "Enter the host_ip" "$(curl ifconfig.me)" false #Query for enabling http_proxy -prompt_for_env_var "http_proxy" "Enter the http_proxy." "" false +prompt_for_env_var "http_proxy" "Enter the http_proxy." "${http_proxy}" false #Query for enabling https_proxy -prompt_for_env_var "https_proxy" "Enter the https_proxy." "" false +prompt_for_env_var "http_proxy" "Enter the http_proxy." "${https_proxy}" false #Query for enabling no_proxy -prompt_for_env_var "no_proxy" "Enter the no_proxy." "" false +prompt_for_env_var "no_proxy" "Enter the no_proxy." "${no_proxy}" false # Query for enabling logging -read -p "Enable logging? (yes/no): " logging && logging=$(echo "$logging" | tr '[:upper:]' '[:lower:]') -if [[ "$logging" == "yes" || "$logging" == "y" ]]; then - export LOGFLAG=true -else - export LOGFLAG=false -fi - -# Query for enabling OpenTelemetry Tracing Endpoint -read -p "Enable OpenTelemetry Tracing Endpoint? (yes/no): " telemetry && telemetry=$(echo "$telemetry" | tr '[:upper:]' '[:lower:]') -if [[ "$telemetry" == "yes" || "$telemetry" == "y" ]]; then - export JAEGER_IP=$(ip route get 8.8.8.8 | grep -oP 'src \K[^ ]+') - export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=grpc://$JAEGER_IP:4317 - export TELEMETRY_ENDPOINT=http://$JAEGER_IP:4318/v1/traces - telemetry_flag=true - pushd "grafana/dashboards" > /dev/null - source download_opea_dashboard.sh - popd > /dev/null +if [[ "$NON_INTERACTIVE" == "true" ]]; then + # Query for enabling logging + prompt_for_env_var "LOGFLAG" "Enable logging? (yes/no): " "true" false + export JAEGER_IP=$(ip route get 8.8.8.8 | grep -oP 'src \K[^ ]+') + export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=grpc://$JAEGER_IP:4317 + export TELEMETRY_ENDPOINT=http://$JAEGER_IP:4318/v1/traces + telemetry_flag=true else - telemetry_flag=false + # Query for enabling logging + read -p "Enable logging? (yes/no): " logging && logging=$(echo "$logging" | tr '[:upper:]' '[:lower:]') + if [[ "$logging" == "yes" || "$logging" == "y" ]]; then + export LOGFLAG=true + else + export LOGFLAG=false + fi + # Query for enabling OpenTelemetry Tracing Endpoint + read -p "Enable OpenTelemetry Tracing Endpoint? (yes/no): " telemetry && telemetry=$(echo "$telemetry" | tr '[:upper:]' '[:lower:]') + if [[ "$telemetry" == "yes" || "$telemetry" == "y" ]]; then + export JAEGER_IP=$(ip route get 8.8.8.8 | grep -oP 'src \K[^ ]+') + export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=grpc://$JAEGER_IP:4317 + export TELEMETRY_ENDPOINT=http://$JAEGER_IP:4318/v1/traces + telemetry_flag=true + else + telemetry_flag=false + fi fi # Generate the .env file diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/set_env_faqgen.sh b/ChatQnA/docker_compose/intel/hpu/gaudi/set_env_faqgen.sh new file mode 100755 index 0000000000..fde0b35fd0 --- /dev/null +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/set_env_faqgen.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash + +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +pushd "../../../../../" > /dev/null +source .set_env.sh +popd > /dev/null + +export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} +export host_ip=${ip_address} +export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" +export RERANK_MODEL_ID="BAAI/bge-reranker-base" +export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" +export INDEX_NAME="rag-redis" +export NUM_CARDS=1 +export VLLM_SKIP_WARMUP=true +export LOGFLAG=True +export http_proxy=${http_proxy} +export https_proxy=${https_proxy} +export no_proxy="${ip_address},redis-vector-db,dataprep-redis-service,tei-embedding-service,retriever,tei-reranking-service,tgi-service,vllm-service,guardrails,llm-faqgen,chatqna-gaudi-backend-server,chatqna-gaudi-ui-server,chatqna-gaudi-nginx-server" + +export LLM_ENDPOINT_PORT=8010 +export LLM_SERVER_PORT=9001 +export CHATQNA_BACKEND_PORT=8888 +export CHATQNA_REDIS_VECTOR_PORT=6377 +export CHATQNA_REDIS_VECTOR_INSIGHT_PORT=8006 +export CHATQNA_FRONTEND_SERVICE_PORT=5175 +export NGINX_PORT=80 +export FAQGen_COMPONENT_NAME="OpeaFaqGenvLLM" +export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}" diff --git a/ChatQnA/tests/README.md b/ChatQnA/tests/README.md new file mode 100644 index 0000000000..c622008650 --- /dev/null +++ b/ChatQnA/tests/README.md @@ -0,0 +1,123 @@ +# ChatQnA E2E test scripts + +## Set the required environment variable + +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +``` + +## Run test + +On Intel Xeon with TGI: + +```bash +bash test_compose_tgi_on_xeon.sh +``` + +On Intel Xeon with vLLM: + +```bash +bash test_compose_on_xeon.sh +``` + +On Intel Xeon with MariaDB Vector: + +```bash +bash test_compose_mariadb_on_xeon.sh +``` + +On Intel Xeon with Pinecone: + +```bash +bash test_compose_pinecone_on_xeon.sh +``` + +On Intel Xeon with Milvus + +```bash +bash test_compose_milvus_on_xeon.sh +``` + +On Intel Xeon with Qdrant + +```bash +bash test_compose_qdrant_on_xeon.sh +``` + +On Intel Xeon without Rerank: + +```bash +bash test_compose_without_rerank_on_xeon.sh +``` + +On Intel Gaudi with TGI: + +```bash +bash test_compose_tgi_on_gaudi.sh +``` + +On Intel Gaudi with vLLM: + +```bash +bash test_compose_on_gaudi.sh +``` + +On Intel Gaudi with Guardrails: + +```bash +bash test_compose_guardrails_on_gaudi.sh +``` + +On Intel Gaudi without Rerank: + +```bash +bash test_compose_without_rerank_on_gaudi.sh +``` + +On AMD ROCm with TGI: + +```bash +bash test_compose_on_rocm.sh +``` + +On AMD ROCm with vLLM: + +```bash +bash test_compose_vllm_on_rocm.sh +``` + +Test FAQ Generation On Intel Xeon with TGI: + +```bash +bash test_compose_faqgen_tgi_on_xeon.sh +``` + +Test FAQ Generation On Intel Xeon with vLLM: + +```bash +bash test_compose_faqgen_on_xeon.sh +``` + +Test FAQ Generation On Intel Gaudi with TGI: + +```bash +bash test_compose_faqgen_tgi_on_gaudi.sh +``` + +Test FAQ Generation On Intel Gaudi with vLLM: + +```bash +bash test_compose_faqgen_on_gaudi.sh +``` + +Test FAQ Generation On AMD ROCm with TGI: + +```bash +bash test_compose_faqgen_on_rocm.sh +``` + +Test FAQ Generation On AMD ROCm with vLLM: + +```bash +bash test_compose_faqgen_vllm_on_rocm.sh +``` diff --git a/ChatQnA/tests/test_compose_faqgen_on_gaudi.sh b/ChatQnA/tests/test_compose_faqgen_on_gaudi.sh index 2a30dbb773..fc95182346 100644 --- a/ChatQnA/tests/test_compose_faqgen_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_faqgen_on_gaudi.sh @@ -36,27 +36,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/hpu/gaudi - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" - export NUM_CARDS=1 - export INDEX_NAME="rag-redis" - export host_ip=${ip_address} - export LLM_ENDPOINT_PORT=8010 - export LLM_SERVER_PORT=9001 - export CHATQNA_BACKEND_PORT=8888 - export CHATQNA_REDIS_VECTOR_PORT=6377 - export CHATQNA_REDIS_VECTOR_INSIGHT_PORT=8006 - export CHATQNA_FRONTEND_SERVICE_PORT=5175 - export NGINX_PORT=80 - export FAQGen_COMPONENT_NAME="OpeaFaqGenvLLM" - export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}" - export HF_TOKEN=${HF_TOKEN} - export VLLM_SKIP_WARMUP=true - export LOGFLAG=True - export http_proxy=${http_proxy} - export https_proxy=${https_proxy} - export no_proxy="${ip_address},redis-vector-db,dataprep-redis-service,tei-embedding-service,retriever,tei-reranking-service,tgi-service,vllm-service,guardrails,llm-faqgen,chatqna-gaudi-backend-server,chatqna-gaudi-ui-server,chatqna-gaudi-nginx-server" + source set_env_faqgen.sh # Start Docker Containers docker compose -f compose_faqgen.yaml up -d > ${LOG_PATH}/start_services_with_compose.log diff --git a/ChatQnA/tests/test_compose_faqgen_on_rocm.sh b/ChatQnA/tests/test_compose_faqgen_on_rocm.sh index 7b05bb8c06..a874ca3e90 100644 --- a/ChatQnA/tests/test_compose_faqgen_on_rocm.sh +++ b/ChatQnA/tests/test_compose_faqgen_on_rocm.sh @@ -15,44 +15,7 @@ WORKPATH=$(dirname "$PWD") LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') -export HOST_IP=${ip_address} -export HOST_IP_EXTERNAL=${ip_address} - -export CHATQNA_EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" -export CHATQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -export CHATQNA_LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" -export CHATQNA_RERANK_MODEL_ID="BAAI/bge-reranker-base" - -export CHATQNA_BACKEND_SERVICE_PORT=8888 -export CHATQNA_FRONTEND_SERVICE_PORT=5173 -export CHATQNA_LLM_FAQGEN_PORT=18011 -export CHATQNA_NGINX_PORT=80 -export CHATQNA_REDIS_DATAPREP_PORT=18103 -export CHATQNA_REDIS_RETRIEVER_PORT=7000 -export CHATQNA_REDIS_VECTOR_INSIGHT_PORT=8001 -export CHATQNA_REDIS_VECTOR_PORT=6379 -export CHATQNA_TEI_EMBEDDING_PORT=18090 -export CHATQNA_TEI_RERANKING_PORT=18808 -export CHATQNA_TGI_SERVICE_PORT=18008 - -export CHATQNA_BACKEND_SERVICE_ENDPOINT="http://${HOST_IP_EXTERNAL}:${CHATQNA_BACKEND_SERVICE_PORT}/v1/chatqna" -export CHATQNA_BACKEND_SERVICE_IP=${HOST_IP} -export CHATQNA_DATAPREP_DELETE_FILE_ENDPOINT="http://${HOST_IP_EXTERNAL}:${CHATQNA_REDIS_DATAPREP_PORT}/v1/dataprep/delete" -export CHATQNA_DATAPREP_GET_FILE_ENDPOINT="http://${HOST_IP_EXTERNAL}:${CHATQNA_REDIS_DATAPREP_PORT}/v1/dataprep/get" -export CHATQNA_DATAPREP_SERVICE_ENDPOINT="http://${HOST_IP_EXTERNAL}:${CHATQNA_REDIS_DATAPREP_PORT}/v1/dataprep/ingest" -export CHATQNA_EMBEDDING_SERVICE_HOST_IP=${HOST_IP} -export CHATQNA_FRONTEND_SERVICE_IP=${HOST_IP} -export CHATQNA_LLM_SERVICE_HOST_IP=${HOST_IP} -export CHATQNA_LLM_ENDPOINT="http://${HOST_IP}:${CHATQNA_TGI_SERVICE_PORT}" -export CHATQNA_MEGA_SERVICE_HOST_IP=${HOST_IP} -export CHATQNA_REDIS_URL="redis://${HOST_IP}:${CHATQNA_REDIS_VECTOR_PORT}" -export CHATQNA_RERANK_SERVICE_HOST_IP=${HOST_IP} -export CHATQNA_RETRIEVER_SERVICE_HOST_IP=${HOST_IP} -export CHATQNA_TEI_EMBEDDING_ENDPOINT="http://${HOST_IP}:${CHATQNA_TEI_EMBEDDING_PORT}" - -export CHATQNA_BACKEND_SERVICE_NAME=chatqna -export CHATQNA_INDEX_NAME="rag-redis" -export FAQGen_COMPONENT_NAME="OpeaFaqGenTgi" +source $WORKPATH/docker_compose/amd/gpu/rocm/set_env_faqgen.sh export PATH="~/miniconda3/bin:$PATH" diff --git a/ChatQnA/tests/test_compose_faqgen_on_xeon.sh b/ChatQnA/tests/test_compose_faqgen_on_xeon.sh index dc42798732..e9a15842ab 100644 --- a/ChatQnA/tests/test_compose_faqgen_on_xeon.sh +++ b/ChatQnA/tests/test_compose_faqgen_on_xeon.sh @@ -37,26 +37,16 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" - export INDEX_NAME="rag-redis" - export host_ip=${ip_address} - export LLM_ENDPOINT_PORT=8010 export LLM_SERVER_PORT=9001 - export CHATQNA_BACKEND_PORT=8888 export CHATQNA_REDIS_VECTOR_PORT=6377 export CHATQNA_REDIS_VECTOR_INSIGHT_PORT=8006 export CHATQNA_FRONTEND_SERVICE_PORT=5175 - export NGINX_PORT=80 - export FAQGen_COMPONENT_NAME="OpeaFaqGenvLLM" - export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}" - export HF_TOKEN=${HF_TOKEN} export VLLM_SKIP_WARMUP=true export LOGFLAG=True export http_proxy=${http_proxy} export https_proxy=${https_proxy} export no_proxy="${ip_address},redis-vector-db,dataprep-redis-service,tei-embedding-service,retriever,tei-reranking-service,tgi-service,vllm-service,guardrails,llm-faqgen,chatqna-xeon-backend-server,chatqna-xeon-ui-server,chatqna-xeon-nginx-server" + source set_env.sh # Start Docker Containers docker compose -f compose_faqgen.yaml up -d > ${LOG_PATH}/start_services_with_compose.log diff --git a/ChatQnA/tests/test_compose_faqgen_tgi_on_gaudi.sh b/ChatQnA/tests/test_compose_faqgen_tgi_on_gaudi.sh index 8eef9c6040..563de7eb6a 100644 --- a/ChatQnA/tests/test_compose_faqgen_tgi_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_faqgen_tgi_on_gaudi.sh @@ -33,25 +33,8 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/hpu/gaudi - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" - export INDEX_NAME="rag-redis" - export host_ip=${ip_address} - export LLM_ENDPOINT_PORT=8010 - export LLM_SERVER_PORT=9001 - export CHATQNA_BACKEND_PORT=8888 - export CHATQNA_REDIS_VECTOR_PORT=6377 - export CHATQNA_REDIS_VECTOR_INSIGHT_PORT=8006 - export CHATQNA_FRONTEND_SERVICE_PORT=5175 - export NGINX_PORT=80 export FAQGen_COMPONENT_NAME="OpeaFaqGenTgi" - export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}" - export HF_TOKEN=${HF_TOKEN} - export LOGFLAG=True - export http_proxy=${http_proxy} - export https_proxy=${https_proxy} - export no_proxy="${ip_address},redis-vector-db,dataprep-redis-service,tei-embedding-service,retriever,tei-reranking-service,tgi-service,vllm-service,guardrails,llm-faqgen,chatqna-gaudi-backend-server,chatqna-gaudi-ui-server,chatqna-gaudi-nginx-server" + source set_env_faqgen.sh # Start Docker Containers docker compose -f compose_faqgen_tgi.yaml up -d > ${LOG_PATH}/start_services_with_compose.log diff --git a/ChatQnA/tests/test_compose_faqgen_tgi_on_xeon.sh b/ChatQnA/tests/test_compose_faqgen_tgi_on_xeon.sh index d4140a3ab1..44cdc03ceb 100644 --- a/ChatQnA/tests/test_compose_faqgen_tgi_on_xeon.sh +++ b/ChatQnA/tests/test_compose_faqgen_tgi_on_xeon.sh @@ -37,25 +37,16 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" - export INDEX_NAME="rag-redis" - export host_ip=${ip_address} - export LLM_ENDPOINT_PORT=8010 export LLM_SERVER_PORT=9001 - export CHATQNA_BACKEND_PORT=8888 export CHATQNA_REDIS_VECTOR_PORT=6377 export CHATQNA_REDIS_VECTOR_INSIGHT_PORT=8006 export CHATQNA_FRONTEND_SERVICE_PORT=5175 - export NGINX_PORT=80 export FAQGen_COMPONENT_NAME="OpeaFaqGenTgi" - export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}" - export HF_TOKEN=${HF_TOKEN} export LOGFLAG=True export http_proxy=${http_proxy} export https_proxy=${https_proxy} export no_proxy="${ip_address},redis-vector-db,dataprep-redis-service,tei-embedding-service,retriever,tei-reranking-service,tgi-service,vllm-service,guardrails,llm-faqgen,chatqna-xeon-backend-server,chatqna-xeon-ui-server,chatqna-xeon-nginx-server" + source set_env.sh # Start Docker Containers docker compose -f compose_faqgen_tgi.yaml up -d > ${LOG_PATH}/start_services_with_compose.log diff --git a/ChatQnA/tests/test_compose_faqgen_vllm_on_rocm.sh b/ChatQnA/tests/test_compose_faqgen_vllm_on_rocm.sh index 83e71da34e..774aca814d 100644 --- a/ChatQnA/tests/test_compose_faqgen_vllm_on_rocm.sh +++ b/ChatQnA/tests/test_compose_faqgen_vllm_on_rocm.sh @@ -14,41 +14,7 @@ WORKPATH=$(dirname "$PWD") LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') -export HOST_IP=${ip_address} -export HOST_IP_EXTERNAL=${ip_address} - -export CHATQNA_EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" -export CHATQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -export CHATQNA_LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" -export CHATQNA_RERANK_MODEL_ID="BAAI/bge-reranker-base" - -export CHATQNA_BACKEND_SERVICE_PORT=8888 -export CHATQNA_FRONTEND_SERVICE_PORT=5173 -export CHATQNA_LLM_FAQGEN_PORT=18011 -export CHATQNA_NGINX_PORT=80 -export CHATQNA_REDIS_DATAPREP_PORT=18103 -export CHATQNA_REDIS_RETRIEVER_PORT=7000 -export CHATQNA_REDIS_VECTOR_INSIGHT_PORT=8001 -export CHATQNA_REDIS_VECTOR_PORT=6379 -export CHATQNA_TEI_EMBEDDING_PORT=18090 -export CHATQNA_TEI_RERANKING_PORT=18808 -export CHATQNA_VLLM_SERVICE_PORT=18008 - -export CHATQNA_BACKEND_SERVICE_ENDPOINT="http://${HOST_IP_EXTERNAL}:${CHATQNA_BACKEND_SERVICE_PORT}/v1/chatqna" -export CHATQNA_BACKEND_SERVICE_IP=${HOST_IP_EXTERNAL} -export CHATQNA_DATAPREP_DELETE_FILE_ENDPOINT="http://${HOST_IP_EXTERNAL}:${CHATQNA_REDIS_DATAPREP_PORT}/v1/dataprep/delete" -export CHATQNA_DATAPREP_GET_FILE_ENDPOINT="http://${HOST_IP_EXTERNAL}:${CHATQNA_REDIS_DATAPREP_PORT}/v1/dataprep/get" -export CHATQNA_DATAPREP_SERVICE_ENDPOINT="http://${HOST_IP_EXTERNAL}:${CHATQNA_REDIS_DATAPREP_PORT}/v1/dataprep/ingest" -export CHATQNA_FRONTEND_SERVICE_IP=${HOST_IP} -export CHATQNA_MEGA_SERVICE_HOST_IP=${HOST_IP} -export CHATQNA_REDIS_URL="redis://${HOST_IP}:${CHATQNA_REDIS_VECTOR_PORT}" -export CHATQNA_TEI_EMBEDDING_ENDPOINT="http://${HOST_IP}:${CHATQNA_TEI_EMBEDDING_PORT}" -export LLM_ENDPOINT="http://${HOST_IP}:${CHATQNA_VLLM_SERVICE_PORT}" - -export CHATQNA_BACKEND_SERVICE_NAME=chatqna -export CHATQNA_INDEX_NAME="rag-redis" -export CHATQNA_TYPE="CHATQNA_FAQGEN" -export FAQGen_COMPONENT_NAME="OpeaFaqGenvLLM" +source $WORKPATH/docker_compose/amd/gpu/rocm/set_env_faqgen_vllm.sh function build_docker_images() { opea_branch=${opea_branch:-"main"} diff --git a/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh b/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh index f9057f6ec0..c24a0c537f 100644 --- a/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh @@ -2,7 +2,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -set -e +set -xe IMAGE_REPO=${IMAGE_REPO:-"opea"} IMAGE_TAG=${IMAGE_TAG:-"latest"} echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" @@ -36,14 +36,8 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/hpu/gaudi - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" - export NUM_CARDS=1 - export INDEX_NAME="rag-redis" - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export host_ip=${ip_address} export GURADRAILS_MODEL_ID="meta-llama/Meta-Llama-Guard-2-8B" + source set_env_faqgen.sh # Start Docker Containers docker compose -f compose_guardrails.yaml up -d > ${LOG_PATH}/start_services_with_compose.log diff --git a/ChatQnA/tests/test_compose_mariadb_on_xeon.sh b/ChatQnA/tests/test_compose_mariadb_on_xeon.sh index 412e32626a..61581bfd28 100644 --- a/ChatQnA/tests/test_compose_mariadb_on_xeon.sh +++ b/ChatQnA/tests/test_compose_mariadb_on_xeon.sh @@ -2,7 +2,7 @@ # Copyright (C) 2025 MariaDB Foundation # SPDX-License-Identifier: Apache-2.0 -set -e +set -xe IMAGE_REPO=${IMAGE_REPO:-"opea"} IMAGE_TAG=${IMAGE_TAG:-"latest"} echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" @@ -39,14 +39,8 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon - export MARIADB_DATABASE="vectordb" - export MARIADB_USER="chatqna" export MARIADB_PASSWORD="test" - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export host_ip=${ip_address} + source set_env_mariadb.sh # Start Docker Containers docker compose -f compose_mariadb.yaml up -d > ${LOG_PATH}/start_services_with_compose.log diff --git a/ChatQnA/tests/test_compose_milvus_on_xeon.sh b/ChatQnA/tests/test_compose_milvus_on_xeon.sh index 47a5b43ddc..5316fd08fc 100644 --- a/ChatQnA/tests/test_compose_milvus_on_xeon.sh +++ b/ChatQnA/tests/test_compose_milvus_on_xeon.sh @@ -2,7 +2,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -set -e +set -xe IMAGE_REPO=${IMAGE_REPO:-"opea"} IMAGE_TAG=${IMAGE_TAG:-"latest"} echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" @@ -39,11 +39,8 @@ function build_docker_images() { } function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon/ - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export LOGFLAG=true + source set_env.sh # Start Docker Containers docker compose -f compose_milvus.yaml up -d > ${LOG_PATH}/start_services_with_compose.log diff --git a/ChatQnA/tests/test_compose_on_gaudi.sh b/ChatQnA/tests/test_compose_on_gaudi.sh index 144f541907..857d25ce05 100644 --- a/ChatQnA/tests/test_compose_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_on_gaudi.sh @@ -2,7 +2,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -set -e +set -xe IMAGE_REPO=${IMAGE_REPO:-"opea"} IMAGE_TAG=${IMAGE_TAG:-"latest"} echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" @@ -36,16 +36,10 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/hpu/gaudi - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" - export NUM_CARDS=1 - export INDEX_NAME="rag-redis" - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} + export NON_INTERACTIVE=true export host_ip=${ip_address} - export JAEGER_IP=$(ip route get 8.8.8.8 | grep -oP 'src \K[^ ]+') - export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=grpc://$JAEGER_IP:4317 - export TELEMETRY_ENDPOINT=http://$JAEGER_IP:4318/v1/traces + export telemetry=yes + source set_env.sh # Start Docker Containers docker compose -f compose.yaml -f compose.telemetry.yaml up -d > ${LOG_PATH}/start_services_with_compose.log diff --git a/ChatQnA/tests/test_compose_on_rocm.sh b/ChatQnA/tests/test_compose_on_rocm.sh index 3ff91522c8..a36aecf68e 100644 --- a/ChatQnA/tests/test_compose_on_rocm.sh +++ b/ChatQnA/tests/test_compose_on_rocm.sh @@ -15,41 +15,7 @@ WORKPATH=$(dirname "$PWD") LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') -export HOST_IP=${ip_address} -export HOST_IP_EXTERNAL=${ip_address} - -export CHATQNA_EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" -export CHATQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -export CHATQNA_LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" -export CHATQNA_RERANK_MODEL_ID="BAAI/bge-reranker-base" - -export CHATQNA_BACKEND_SERVICE_PORT=8888 -export CHATQNA_FRONTEND_SERVICE_PORT=5173 -export CHATQNA_NGINX_PORT=80 -export CHATQNA_REDIS_DATAPREP_PORT=18103 -export CHATQNA_REDIS_RETRIEVER_PORT=7000 -export CHATQNA_REDIS_VECTOR_INSIGHT_PORT=8001 -export CHATQNA_REDIS_VECTOR_PORT=6379 -export CHATQNA_TEI_EMBEDDING_PORT=18090 -export CHATQNA_TEI_RERANKING_PORT=18808 -export CHATQNA_TGI_SERVICE_PORT=18008 - -export CHATQNA_BACKEND_SERVICE_ENDPOINT="http://${HOST_IP_EXTERNAL}:${CHATQNA_BACKEND_SERVICE_PORT}/v1/chatqna" -export CHATQNA_BACKEND_SERVICE_IP=${HOST_IP} -export CHATQNA_DATAPREP_DELETE_FILE_ENDPOINT="http://${HOST_IP_EXTERNAL}:${CHATQNA_REDIS_DATAPREP_PORT}/v1/dataprep/delete" -export CHATQNA_DATAPREP_GET_FILE_ENDPOINT="http://${HOST_IP_EXTERNAL}:${CHATQNA_REDIS_DATAPREP_PORT}/v1/dataprep/get" -export CHATQNA_DATAPREP_SERVICE_ENDPOINT="http://${HOST_IP_EXTERNAL}:${CHATQNA_REDIS_DATAPREP_PORT}/v1/dataprep/ingest" -export CHATQNA_EMBEDDING_SERVICE_HOST_IP=${HOST_IP} -export CHATQNA_FRONTEND_SERVICE_IP=${HOST_IP} -export CHATQNA_LLM_SERVICE_HOST_IP=${HOST_IP} -export CHATQNA_MEGA_SERVICE_HOST_IP=${HOST_IP} -export CHATQNA_REDIS_URL="redis://${HOST_IP}:${CHATQNA_REDIS_VECTOR_PORT}" -export CHATQNA_RERANK_SERVICE_HOST_IP=${HOST_IP} -export CHATQNA_RETRIEVER_SERVICE_HOST_IP=${HOST_IP} -export CHATQNA_TEI_EMBEDDING_ENDPOINT="http://${HOST_IP}:${CHATQNA_TEI_EMBEDDING_PORT}" - -export CHATQNA_BACKEND_SERVICE_NAME=chatqna -export CHATQNA_INDEX_NAME="rag-redis" +source $WORKPATH/docker_compose/amd/gpu/rocm/set_env.sh export PATH="~/miniconda3/bin:$PATH" diff --git a/ChatQnA/tests/test_compose_on_xeon.sh b/ChatQnA/tests/test_compose_on_xeon.sh index 38226ec9be..89357285c8 100644 --- a/ChatQnA/tests/test_compose_on_xeon.sh +++ b/ChatQnA/tests/test_compose_on_xeon.sh @@ -2,7 +2,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -set -e +set -xe IMAGE_REPO=${IMAGE_REPO:-"opea"} IMAGE_TAG=${IMAGE_TAG:-"latest"} echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" @@ -40,15 +40,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" - export INDEX_NAME="rag-redis" - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export host_ip=${ip_address} - export JAEGER_IP=$(ip route get 8.8.8.8 | grep -oP 'src \K[^ ]+') - export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=grpc://$JAEGER_IP:4317 - export TELEMETRY_ENDPOINT=http://$JAEGER_IP:4318/v1/traces + source set_env.sh # Start Docker Containers docker compose -f compose.yaml -f compose.telemetry.yaml up -d > ${LOG_PATH}/start_services_with_compose.log diff --git a/ChatQnA/tests/test_compose_pinecone_on_xeon.sh b/ChatQnA/tests/test_compose_pinecone_on_xeon.sh index 98bfd21368..e02b13637d 100755 --- a/ChatQnA/tests/test_compose_pinecone_on_xeon.sh +++ b/ChatQnA/tests/test_compose_pinecone_on_xeon.sh @@ -2,7 +2,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -set -e +set -xe IMAGE_REPO=${IMAGE_REPO:-"opea"} IMAGE_TAG=${IMAGE_TAG:-"latest"} echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" @@ -41,14 +41,11 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon/ export no_proxy=${no_proxy},${ip_address} - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" export PINECONE_API_KEY=${PINECONE_KEY_LANGCHAIN_TEST} export PINECONE_INDEX_NAME="langchain-test" export INDEX_NAME="langchain-test" - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export LOGFLAG=true + source set_env.sh # Start Docker Containers docker compose -f compose_pinecone.yaml up -d > ${LOG_PATH}/start_services_with_compose.log diff --git a/ChatQnA/tests/test_compose_qdrant_on_xeon.sh b/ChatQnA/tests/test_compose_qdrant_on_xeon.sh index f2a30be1e7..594deec288 100644 --- a/ChatQnA/tests/test_compose_qdrant_on_xeon.sh +++ b/ChatQnA/tests/test_compose_qdrant_on_xeon.sh @@ -2,7 +2,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -set -e +set -xe IMAGE_REPO=${IMAGE_REPO:-"opea"} IMAGE_TAG=${IMAGE_TAG:-"latest"} echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" @@ -40,11 +40,8 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" export INDEX_NAME="rag-qdrant" - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} + source set_env.sh sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/ChatQnA/tests/test_compose_tgi_on_gaudi.sh b/ChatQnA/tests/test_compose_tgi_on_gaudi.sh index b334fc35c8..7ab0565a3c 100644 --- a/ChatQnA/tests/test_compose_tgi_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_tgi_on_gaudi.sh @@ -2,7 +2,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -set -e +set -xe IMAGE_REPO=${IMAGE_REPO:-"opea"} IMAGE_TAG=${IMAGE_TAG:-"latest"} echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" @@ -32,15 +32,10 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/hpu/gaudi - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" - export NUM_CARDS=1 - export INDEX_NAME="rag-redis" - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export JAEGER_IP=$(ip route get 8.8.8.8 | grep -oP 'src \K[^ ]+') - export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=grpc://$JAEGER_IP:4317 - export TELEMETRY_ENDPOINT=http://$JAEGER_IP:4318/v1/traces + export NON_INTERACTIVE=true + export host_ip=${ip_address} + export telemetry=yes + source set_env.sh # Start Docker Containers docker compose -f compose_tgi.yaml -f compose_tgi.telemetry.yaml up -d > ${LOG_PATH}/start_services_with_compose.log diff --git a/ChatQnA/tests/test_compose_tgi_on_xeon.sh b/ChatQnA/tests/test_compose_tgi_on_xeon.sh index 12c9552ca5..c00fa861aa 100644 --- a/ChatQnA/tests/test_compose_tgi_on_xeon.sh +++ b/ChatQnA/tests/test_compose_tgi_on_xeon.sh @@ -2,7 +2,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -set -e +set -xe IMAGE_REPO=${IMAGE_REPO:-"opea"} IMAGE_TAG=${IMAGE_TAG:-"latest"} echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" @@ -33,14 +33,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export RERANK_MODEL_ID="BAAI/bge-reranker-base" - export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" - export INDEX_NAME="rag-redis" - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export JAEGER_IP=$(ip route get 8.8.8.8 | grep -oP 'src \K[^ ]+') - export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=grpc://$JAEGER_IP:4317 - export TELEMETRY_ENDPOINT=http://$JAEGER_IP:4318/v1/traces + source set_env.sh # Start Docker Containers docker compose -f compose_tgi.yaml -f compose_tgi.telemetry.yaml up -d > ${LOG_PATH}/start_services_with_compose.log diff --git a/ChatQnA/tests/test_compose_vllm_on_rocm.sh b/ChatQnA/tests/test_compose_vllm_on_rocm.sh index 622c90ece9..992c4f4aac 100644 --- a/ChatQnA/tests/test_compose_vllm_on_rocm.sh +++ b/ChatQnA/tests/test_compose_vllm_on_rocm.sh @@ -2,7 +2,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -set -e +set -xe IMAGE_REPO=${IMAGE_REPO:-"opea"} IMAGE_TAG=${IMAGE_TAG:-"latest"} echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" @@ -14,42 +14,7 @@ WORKPATH=$(dirname "$PWD") LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') -export HOST_IP=${ip_address} -export HOST_IP_EXTERNAL=${ip_address} - -export CHATQNA_EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" -export CHATQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -export CHATQNA_LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" -export CHATQNA_RERANK_MODEL_ID="BAAI/bge-reranker-base" - -export CHATQNA_BACKEND_SERVICE_PORT=8888 -export CHATQNA_FRONTEND_SERVICE_PORT=5173 -export CHATQNA_NGINX_PORT=80 -export CHATQNA_REDIS_DATAPREP_PORT=18103 -export CHATQNA_REDIS_RETRIEVER_PORT=7000 -export CHATQNA_REDIS_VECTOR_INSIGHT_PORT=8001 -export CHATQNA_REDIS_VECTOR_PORT=6379 -export CHATQNA_TEI_EMBEDDING_PORT=18090 -export CHATQNA_TEI_RERANKING_PORT=18808 -export CHATQNA_VLLM_SERVICE_PORT=18008 - -export CHATQNA_BACKEND_SERVICE_ENDPOINT="http://${HOST_IP_EXTERNAL}:${CHATQNA_BACKEND_SERVICE_PORT}/v1/chatqna" -export CHATQNA_BACKEND_SERVICE_IP=${HOST_IP_EXTERNAL} -export CHATQNA_DATAPREP_DELETE_FILE_ENDPOINT="http://${HOST_IP_EXTERNAL}:${CHATQNA_REDIS_DATAPREP_PORT}/v1/dataprep/delete" -export CHATQNA_DATAPREP_GET_FILE_ENDPOINT="http://${HOST_IP_EXTERNAL}:${CHATQNA_REDIS_DATAPREP_PORT}/v1/dataprep/get" -export CHATQNA_DATAPREP_SERVICE_ENDPOINT="http://${HOST_IP_EXTERNAL}:${CHATQNA_REDIS_DATAPREP_PORT}/v1/dataprep/ingest" -export CHATQNA_EMBEDDING_SERVICE_HOST_IP=${HOST_IP} -export CHATQNA_FRONTEND_SERVICE_IP=${HOST_IP} -export CHATQNA_LLM_SERVICE_HOST_IP=${HOST_IP} -export CHATQNA_MEGA_SERVICE_HOST_IP=${HOST_IP} -export CHATQNA_REDIS_URL="redis://${HOST_IP}:${CHATQNA_REDIS_VECTOR_PORT}" -export CHATQNA_RERANK_SERVICE_HOST_IP=${HOST_IP} -export CHATQNA_RETRIEVER_SERVICE_HOST_IP=${HOST_IP} -export CHATQNA_TEI_EMBEDDING_ENDPOINT="http://${HOST_IP}:${CHATQNA_TEI_EMBEDDING_PORT}" - -export CHATQNA_BACKEND_SERVICE_NAME=chatqna -export CHATQNA_INDEX_NAME="rag-redis" - +source $WORKPATH/docker_compose/amd/gpu/rocm/set_env_vllm.sh function build_docker_images() { opea_branch=${opea_branch:-"main"} diff --git a/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh b/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh index 7d6837402f..a1ee6922c0 100644 --- a/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh @@ -2,7 +2,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -set -e +set -xe IMAGE_REPO=${IMAGE_REPO:-"opea"} IMAGE_TAG=${IMAGE_TAG:-"latest"} echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" @@ -36,11 +36,8 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/hpu/gaudi - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" - export NUM_CARDS=1 - export INDEX_NAME="rag-redis" - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} + export NON_INTERACTIVE=true + source set_env.sh # Start Docker Containers docker compose -f compose_without_rerank.yaml up -d > ${LOG_PATH}/start_services_with_compose.log diff --git a/ChatQnA/tests/test_compose_without_rerank_on_xeon.sh b/ChatQnA/tests/test_compose_without_rerank_on_xeon.sh index 2d79b0e7a2..256d9de230 100644 --- a/ChatQnA/tests/test_compose_without_rerank_on_xeon.sh +++ b/ChatQnA/tests/test_compose_without_rerank_on_xeon.sh @@ -2,7 +2,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -set -e +set -xe IMAGE_REPO=${IMAGE_REPO:-"opea"} IMAGE_TAG=${IMAGE_TAG:-"latest"} echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" @@ -41,10 +41,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" - export INDEX_NAME="rag-redis" - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} + source set_env.sh # Start Docker Containers docker compose -f compose_without_rerank.yaml up -d > ${LOG_PATH}/start_services_with_compose.log From 0890e94a212e2b895b908da4b056246a25c20601 Mon Sep 17 00:00:00 2001 From: Letong Han <106566639+letonghan@users.noreply.github.com> Date: Tue, 20 May 2025 13:43:24 +0800 Subject: [PATCH 063/217] Refine CodeTrans README (#1960) Signed-off-by: letonghan Co-authored-by: Ying Hu --- CodeTrans/README.md | 17 ++++----- CodeTrans/README_miscellaneous.md | 35 ++++++++++++++++++ .../assets/img/code_trans_architecture.png | Bin 123004 -> 92511 bytes CodeTrans/assets/img/example_dashboards.png | Bin 0 -> 102817 bytes CodeTrans/assets/img/tgi_dashboard.png | Bin 0 -> 423754 bytes 5 files changed, 43 insertions(+), 9 deletions(-) create mode 100644 CodeTrans/assets/img/example_dashboards.png create mode 100644 CodeTrans/assets/img/tgi_dashboard.png diff --git a/CodeTrans/README.md b/CodeTrans/README.md index 78527cb948..b5c05e9fcf 100644 --- a/CodeTrans/README.md +++ b/CodeTrans/README.md @@ -22,12 +22,11 @@ This Code Translation use case demonstrates Text Generation Inference across mul The table below lists currently available deployment options. They outline in detail the implementation of this example on selected hardware. -| Category | Deployment Option | Description | -| ---------------------- | -------------------- | ----------------------------------------------------------------- | -| On-premise Deployments | Docker compose | [CodeTrans deployment on Xeon](./docker_compose/intel/cpu/xeon) | -| | | [CodeTrans deployment on Gaudi](./docker_compose/intel/hpu/gaudi) | -| | | [CodeTrans deployment on AMD ROCm](./docker_compose/amd/gpu/rocm) | -| | Kubernetes | [Helm Charts](./kubernetes/helm) | -| | | [GMC](./kubernetes/gmc) | -| | Azure | Work-in-progress | -| | Intel Tiber AI Cloud | Work-in-progress | +| Category | Deployment Option | Description | +| ---------------------- | -------------------- | --------------------------------------------------------------------------- | +| On-premise Deployments | Docker compose | [CodeTrans deployment on Xeon](./docker_compose/intel/cpu/xeon/README.md) | +| | | [CodeTrans deployment on Gaudi](./docker_compose/intel/hpu/gaudi/README.md) | +| | | [CodeTrans deployment on AMD ROCm](./docker_compose/amd/gpu/rocm/README.md) | +| | Kubernetes | [Helm Charts](./kubernetes/helm/README.md) | +| | Azure | Work-in-progress | +| | Intel Tiber AI Cloud | Work-in-progress | diff --git a/CodeTrans/README_miscellaneous.md b/CodeTrans/README_miscellaneous.md index b0c5a11f43..482659d156 100644 --- a/CodeTrans/README_miscellaneous.md +++ b/CodeTrans/README_miscellaneous.md @@ -44,3 +44,38 @@ Some HuggingFace resources, such as some models, are only accessible if the deve 2. (Docker only) If all microservices work well, check the port ${host_ip}:7777, the port may be allocated by other users, you can modify the `compose.yaml`. 3. (Docker only) If you get errors like "The container name is in use", change container name in `compose.yaml`. + +## Monitoring OPEA Services with Prometheus and Grafana Dashboard + +OPEA microservice deployment can easily be monitored through Grafana dashboards using data collected via Prometheus. Follow the [README](https://github.com/opea-project/GenAIEval/blob/main/evals/benchmark/grafana/README.md) to setup Prometheus and Grafana servers and import dashboards to monitor the OPEA services. + +![example dashboards](./assets/img/example_dashboards.png) +![tgi dashboard](./assets/img/tgi_dashboard.png) + +## Tracing with OpenTelemetry and Jaeger + +> NOTE: This feature is disabled by default. Please use the compose.telemetry.yaml file to enable this feature. + +OPEA microservice and [TGI](https://huggingface.co/docs/text-generation-inference/en/index)/[TEI](https://huggingface.co/docs/text-embeddings-inference/en/index) serving can easily be traced through [Jaeger](https://www.jaegertracing.io/) dashboards in conjunction with [OpenTelemetry](https://opentelemetry.io/) Tracing feature. Follow the [README](https://github.com/opea-project/GenAIComps/tree/main/comps/cores/telemetry#tracing) to trace additional functions if needed. + +Tracing data is exported to http://{EXTERNAL_IP}:4318/v1/traces via Jaeger. +Users could also get the external IP via below command. + +```bash +ip route get 8.8.8.8 | grep -oP 'src \K[^ ]+' +``` + +Access the Jaeger dashboard UI at http://{EXTERNAL_IP}:16686 + +For TGI serving on Gaudi, users could see different services like opea, TEI and TGI. +![Screenshot from 2024-12-27 11-58-18](https://github.com/user-attachments/assets/6126fa70-e830-4780-bd3f-83cb6eff064e) + +Here is a screenshot for one tracing of TGI serving request. +![Screenshot from 2024-12-27 11-26-25](https://github.com/user-attachments/assets/3a7c51c6-f422-41eb-8e82-c3df52cd48b8) + +There are also OPEA related tracings. Users could understand the time breakdown of each service request by looking into each opea:schedule operation. +![image](https://github.com/user-attachments/assets/6137068b-b374-4ff8-b345-993343c0c25f) + +There could be asynchronous function such as `llm/MicroService_asyn_generate` and user needs to check the trace of the asynchronous function in another operation like +opea:llm_generate_stream. +![image](https://github.com/user-attachments/assets/a973d283-198f-4ce2-a7eb-58515b77503e) diff --git a/CodeTrans/assets/img/code_trans_architecture.png b/CodeTrans/assets/img/code_trans_architecture.png index 09a7ffccd0f8b4dfcd45adffa68f23d1d516709e..5327de86f2b62e24b14a570977f19b6f610bf8c4 100644 GIT binary patch literal 92511 zcmd?RbySs6`z?wf;id%yq(KBh8VM*Wsu)8ocG&KCD@^E!DG^`LbH1sSS zEbyJG&wc&izZ*{Ka#Coez0|AViya4tgrIs6@)QUw31`e7js?is=$@oxi*l|X#wrw!Zkk9$|%OP#4tc~fs$3dFlL zIF%bH-iQC!i*&bi2=QNE!%62T@QDBGC36c)`oDj!7SK%eUq4n$Fya31ACofwpY-S9 z569lG3jD9>s)bC{c~rbXA+;)NomUxaoR+n17-6*9(2A*btPJFans>gJQDWCA9ai+= zld!wGIQ1-$4!_N;zNNSxmS=GfweN;|(wy}>gp=}6YFPZ7XU-fA? z6%lA?ziac1MLDfMcF!0rw?>A<%l@ncSG_9yol9{r!3XQR2ZmI!t*>XxUQO6CGOFnE zKF4-Y#v4)i)1KRj4cXTqbZpES{Rycj=jbwdE(@V1P~7ENq*c$aRW5` z4Rlt08HSZZ^A&yKP6ebNL_Nx__2)#h{Qh3;?e|G2mc96Spw5_cV0L|yNwMtJ$G;j& zh%Il>a-@L0{;E&lPhpS@O@K95>~tX>$;|6>=8W3w6WOlI)!&+l9qJP0v<6Y`92v$s z&rJah1V*yhojk@7YJ&af(d*G+j=v^e+*fLE-V}oa!ferU0R{f~(dJO>!9$%=gEism zL(H!hby%ef{E@M-Dm8t@+9i4qx_LajXrhYvT{m>YBO{$W2$srzk5wy<-=7z5YnUIc zvvId$^q@C|=B?VFua=vz=B4>pw^771VEa{gczB^{4WsQU2kt- zsQcn^o6vSUG$KfJ_5}>e++&jV{ckE$l>~olCTjTdtu=P9$ z?F%@f){Q+gi_uO5#Z*(a*vlL^3HH+gm%K=vo8Qauj^}>2>AaIA-TH^m!3E_O-D}$R z)Z*T{2)N@?&nvIsPY#Y2;-)`F63p77)JV=u1v5|P%<&V&n~&x~={~;+q^xS!jd`^; zTyRFqk|zJhd`M{E#}VOOetEI!I*IB=fDOLs9d-a5ONPLa#5fd(fK#n)WiT(&HvDXB zt*HFwj8pX68#BRl9t*@H1UJ`_;9>Dg_N)~RwI2ejj;Fm;5Tbmck8CEt4{-c7t=$!; zG$puF>Y3U6HDxvJ8t5?nf}r;sVcULLEY`)}feI@XF*(xI|4b+xV9aXLd=TM00Vb|; zUC#{yW#k4PCGWjFDI6B7j{-LD(?#40B^`N8=~46Tv9+RfS~3}>e{e~i$zOoEodpXf znXQeMd~9gHAwiU)a*vqW^jt26x_~zJa-zYfw)^V)n+bJja@iU_4;-t}i_XVmvl#PPvjU?t%`bIA^eWGcKoyYmT&Bu0rNGU$^B{vG{!zM`C2FY^&1=Lz6u-}i_tGZtdu$%ys& zvG1UaffE|w(<_zmZ^ml!Em_~HbGkEMwa3t$J&(g>(ZkWi15q$i($up4?4y8^VYHR_ zuIKJga;>Qzq*A8$UU<9jn8ToDL!tf7Y%`KfuLl)8YmT~0eZ!Zu>576SU8vyT3zxGf z*xeI`rnj!#7~;^>9M{Ina&3is1?zbHj4RKTQ;cxziph=VY9BT5G1BjSDQDqptj&qu zzZrG& zAFtb@D3OaRMlE9|bt|&HZ~ct@lwJ95Ba^wY4I8HCeJ{^elRA<(^qspaCMvA>ZbH-O zg6CiGuu0Wn-os(Emzedk)nhZb@FzlN2q{y-@@A32B<2j**9<3t%@}6-p1nfd_Lz?r zbnZ^C*}KoN%(Ju`PV{*q_%YU-ibwQ(nTXlHH|+WZ|4IbFnCklAabee_NG1F5NCo1|nBCt*!yzdxboBqcW#7m0hw!1`g zj6&XiRqKNfNx}JPSJ-o&ORk|3_Ny9-Ijzfe-JC=&O6*?=yc&cfM2n2-k+wArV5mXY zLk+%{Mgd~w*VoS9I_^TPoup^I0xCk3{Ij3zFV7Bd;8R33YxAVmx-@l8IrWJxR~EM8 z6g7G0tSUh;uXyjlgQo~746KPqCVxv9(p`A}nmmC9JD08WQ2=br`yRbGvrSh#vU|-T zxMkeEzxpKpKHOjW=oO76i#Yxa{36?v|U5Lnf;*Smu zTbQh>ee04(rP?Hxb&%&@N-?c2B+z-3c7?2Lh*)-L|MRL(BgnB##K*cTJ&i* zSwLzt=cfwgrBadWbR{}_{(aYn{0^59DGqC3kCWWJr`Tn zrTJ3*mKFrxd&p#8X;r;>j#8kiBXr%4_29);gU3buXKR~ATt}_*Aq;6~r+Q@liH^(w z$JSoTYD=){_bLYNCO~l>~W&i{OszFvbgk7Of-tbWJ{u zdJ5nDwfIvpx2-f=538-OwW`@tNQ=J78Lw|YfBaZUa^LtI2>o^oT?^j+xFnRlkm+Jx zRY=Sz1SPD;%=ts`pxYQrrnfr6pf(;?6Kt>%HekLv`d55zG$~2TBr=TuGVB5tOZSra zNP8^ZSAFFrlHzD8!L=l-744Sa#%_meBQD*Y-h*y)Etn|B(D<1rgfRWj&nQuj&e`qu z(EGDGX5Vdp%Z;HW4HCXJE%_4*O`YBb_p@Fqg&D(Lqg3{9JM}Zaqp5q@?S13457)=W zIL6rS`TcBbTlE*?jW* zE6;GN{AwP*Wtn7$5Y=-Kb4_~H_SuSWhdwIG z4JCQ(m)iOn)F*36^`@;M01W{dF$a|1vzNB2Z_Mm^>G;8Ns353OEL3`x)_MB=p@r^s z+Q(a{kz>7D&X2p7lrub2m9M`f3rR5qJ-5t0K#h2-Q8pY?od-vLAJq4;pe2EA({S=u z;JOLeO&3pV?4M(~`!K;<_vioosWTc5aWOkD6VH+Vd0m2piSlMXoH8^-n0MYzj{mNZ z$W}nYk!Jrwie9ta{An`1)pVVQElVX92FAb{e<*?G!BU?aP(i*n>0;Rv))V#+o5)EKD!? zbZ+O7A+yhSS_u!+=6za)Lp&siaYhQY*cT0&*Xoa_ojhiCjb??Hj&H5E^49%5aZ43) z$ng|agCk`jj?Q?O|D`>?u?lxIC76xA-N~*U{r*xIp7jOxf#r zt3Jv5_R5}6?SoVJ#$*H;`$2_h>)DQOt&2&^>p7mQhUM|lh4@#=E-5S`fmrzEKE0MrQmID0H#36DAU)YT;L9>KN<}yO3_I{o z&TSkW-Mj19-{Vyd?wx*8h@lZ#(h2vSia0=BEz^mZKUsxh=l(e*9s5pP(Pz zv5W6p&iDDkD)gnY3?8$#j_G=@Jbi@i8EsBS zJm%^&r9v5+0XxbgpvQBMWGHyj#OxMgQH4B2s@G%R(}U)E_96;Ge7WX+2o5A zOse{|)>Y^-wRqZ0GU8aYeaxkWD8ol~PvA=AS553(rGI)IDra$)w#e(e@vwgQf!FBN zLdU3ZVLCzz*X8x2xk@8zksa^icDB#Y)SlPAteb2YeDruFuQaKbn)A^Mvrle&z7KV) zl(}52b&riwADO z#J)s+quR+w_pRE@d#hbvt*@@E7e5)tx*s0rdhX5eJei8}vr9j@{6RLu_2s2|hS>6{`7ZKKN=gNuZ&N*Vx6Df}z#xfZM^tU-;hQg)8<{gyomQ}bA@0#E_ylI0$h{L3f+Uj#+Hex)j>Ju|ET2WMrpi z+CdQ`+O`=^ro*ef9<oxXD>tD zkWaxpE$boBkdnY|n3C5A5Vtcb{({SmM%_<(7X!_gmH)~&r{#{H$P~X%7|RhHo{HAK za@svwU_0CN`;shXOh|SgTbHrxVVj#=&?rtw)a-8OrPu$>wMbZen70YGbV2d4toW6^3>==|DwJ(p?`x&-cNfcy+YJ2j!zlcq9qeVHxBIDk77>LeVeN*D zFhQZDIiZlPNMZs?`|JU}SP@@D_DU0!<_9s2*$8}{6w`=B9~9g^BUiq1zS| zU?!wDg;I^{ZNIFw;FulE+3oFj`uTY*vJk7C(Fe(C-Jh+)!l*Nw7HZhL2nl1n!O7o( zDJQfNXrHQ+#7sk0INv9ExUHCC$oz|b519Y_C461Sn zaD+&g`6*JwuF8-X#0gBWdlu|ON`gsw0Dc;>HL?6n#Oqciw2FfyJxgeGPb}8 z>o1`$^xAd1n2j4Mj1<0_1taOj3Fr@2e`Db0a)TPF`e!#TtfN^^DSSJ=Vw$#T-aP19 zqNklxL~A4FHI{7WBm+CuqOp3HW_ZS_i7fNNOBtGtBsee%*)-uA8Z8W0{ZHZMgk~i! zYhrE0cx5oL7uWTO{z+>r$$f)Ycy$3WV1es)3so2+T-#zlu2_=z_fIi-7HkQGhjj=t z!R5%#prw~$^v?XWa~IWk z9sFfsyteSL!BViWYv=dA!Nj~Fex5@r^t9zxJkL1WRe!RI_^Us$AY9>b*mJ4?PV>t! zc4^-FZHKd}&r!Eiz-c5*a=hNq?t^$FXxF6HzaiH1FkRT7@`3mDEHF`QvxFio^v++J ze9giE0%sEAP|np+kOjyQplQ9Ra*-cK9@J545IZUk1;EP>&7;D5_ ztcRi@EXtNF!y~7xArvenBc<~6Zs2NRAEGD{{$d_kw2c8uVP%@3yUscYq0u>O*AOW< z+&@qun@N8;eDFAmb=#%ySo8lM$wm zQ?14j;-s}6;Kne=*<_oRGhQ$lPtH&f zas6;LRlipU{P|89(>)M4EJCi7*+jh#)5_viAV1j8AwFrXR~6yL4%AaO%7rwn8!LUD z0t1qXDiU53RecUys?|6M)(`6CFK_q|605C9+u`Ki z_fEJCW7t>BqZ_4P?}w! zU5p3XlJze`mP{PYyqxv{Ni3&Tc$gFolIu_GJLtrc-@&I5QLJ=O<7E$(B5a42i1DPg zM-{9~*o8%uaFGA+=y)%{9vx>tYN#V+v0c*8q3uZ}M0r&C6qn*{&xB{IhP?o)6e)95 z(IW~N^j?kjjCPu^0^Dap@3@JB-&HL8sU}9R9627IrF%XA=_lY8i2b)XJ7Tvb#e)f; z!d4B9MFCghRI1XU5=$B)(tALgsR0PZOU;<`?yXara$js*1-Y;4vY>1*6L`d(d8x&u z^w2^x!C~nrxFTfydhRHtrR%uUrxLAV~5wQi9 zCTFQ00yUsiMNk5Q+j&mpUd#vuApsdY;q-dZSuaciw~YELTcFdNd>gDqUb;giOX;xP zfJZ(sm=el}H0NQ?1lG!MQd%O|uMgZ$DhEoa+o`&)0h@97aV9JR{J)`=Hx8-qf8?_tRW&ks|(9V!xy-ggXUq{+!b#yLyF|@IZ7PPf?xKB804|=?kNRki2z%D9jxiZ=8=AU?k8~o^2C>pbdcui*(VP^9e#_8ezFnHy3^3E)((>>y#+y-T-&rauvs^X(VTnxpO8d zhKs77X1;BZL`=HW>vJ5B-2B)dqN4-0RqZoxVF>pT0~p`Mc`Mo4oqEP^BEpP09U+k! zB8RlOY%&kb?4uYMb8wm(5a4{UYEsP8GT!@hOv0#un>Le$E38}lQZ$ws9FtyAqs874 z4UgY_Xwa&_G){6)+ZIj>e@Uc36JWDcqR6`ME5VTSy@jGsXePYKiwpWYH8gTE%2pO3zC$N28Hzg#@wD-IY(rIE}TVRF=C%pn8q)_PITo zKLxPfp*R~4qE4*OD1DC2-az}8qgsBpzsc*<>*(RT1FsJX>+o9@?NL6;4G*e z6sVP?J-%YPmos|}a3Oa%&!tdR9vci-WgtuByffE)#C7sxw>~(mgWQ=$^Q#M;YqAoY zG|%3Fm`&C}u^G~GSbJu>^rN_fp0FAvk3)zMd^fj{az=k#MU?o|?21<|n4t)yp`7I1 zTJrla+UtE^0{ne(mF2FY0m>`Sz{Bu{|Nz%z`5m%+Zp4#;~8j&f2+OnM?OJv;PntVP6BOqZTN_(&NUGov^I?SGpJ=4_y2z9oLS!4MT@H zFllSwGcm%LWFKq}rnZhGIzv&NO@M)uTsk1Bg+@K`SPsB-d8io{G@=};MhPss>#^M| zt&(94l8Cs@*sKP#wTjK&9>cf9q5Udv*==ru0DA|4W=Nc@l5uj$BLu;5aCo4n0Rxn- z;H0)u+guwg8DSd_Pu$ZNO8TW-J}SZCH$iUI5nCed$&};DesaCZ_qD&y6xenrau(Y{ zYuO@$e?+1JH==0A(#8d;jgq}L5TdPWDNRjY)sq5%k9yvM*2_H%gp*6H%km8uMYws0 zZH2#1FbQ#7Glf;Y(YmaGdE%Q4KR-!S?M z5)|1X053jKG!uC@`V}DanyxVb%ie|KiazHBD_^b7II#b^oOl1o)F-fdF7~GDv+0zk z@v7N_CzDc(CK0PyN|8&JIiT`P8br(a90~z~!x!r8(dO}gCOjxiR;Py$ABIug;@Vv9 z!V4sU!K&wNuFRnlB;83I@xZJtVd+v#(n z$a>5ueJ>0vZEKMo%(;zyL6M2J0DZMJRt+41Gx6ZD7J*KcdU2ieh z5_}<<)Pe0SyYsx+3M-RFL4r0gjMs=e)*|1R_OWYH)W>pFE`}mtM7!e!qa8EfzuyJ) z;7{(me|Uj??n8*F56pL%qSIp6&%zNjY5E|#i?jRR2NXd>P)D&b7?{BQe}Wo4hx^@t zk1cpdePEhAobf#`F_MB8Xo04JS0jaqng!}ITnv8D52?Y zE!;UqbQm=R6AYZWe(>k@o|WD^&Dfs36@HiNo&^RqpAy)OA&Z(#*zxygJ3F0YVws4> zt22F0<0~0XL*xhV^CF~}^N#*R#c~R=FBvi&{`~oKKz;gWn+FGDHTi*&b_6*m=X1Nh zBvFgRHwdfbVuXd(F>zc`Qh#JO`d{OBTWFaUz_r5h;hBxzS z2Ni5p>gM-ZdP(wv2K~vU0?+*vry!A0z}rb-t#T{3%Zg{*hzF#3t6{t-d9pRO;6yF4Zhx6Vb2CHtfmZSN1P-;B8$@bo^rTWk&I>;T z%h%V_JqzlePwVeohU-D0ZM%&q&KK=KS)dvieu=Hj- z*&MY*0KoG3>l{gB6~(35@IBXf#H7g`-L7soWQ@-I^;m=4G{u-<#_A-zP=~8kH7ti# z9HXqvV%Q2j6W?1_wAl(blP4zLtS{3{H8G`duVu*RlT54jZ4D&4Gy|xetO~pfC=MrD ztne})sh~SR;nE(=mEa0&i6qFP6^;Wg$?6@oljf1xCiZ4fIKoDxs=wtQ&xQkZfdR21 zhbXk@RpU&f=KZLZyyTnHplJz+&tjgvWNd*(9+kPy>(iQhi?z}KV)?uUb|{;zFf%Dd zPNL>2WVh;Q0=p7e*-;rQ3?+9FaUxC?UX(c|*Msq1+L@?`pEi5$U%Rb3N92d5+z3!> zJ*gG=C@$Tmi(QF@U6?}+KIg-{zuVf|zdVeMj!ukcQ7Jqx{?-7Dm;82$Siy|_y>6~A z#Y0r{HSS6nqpzn!3~C$;#>#9ZdNL(u7R#6w5>#VlekKTlmMle&7!Q#$am@zGW6H0a zuZ(-X+GrPPmn1BJ|Fdc9nrAq>_vi(s1j`3^5v-A@5@waE4c6d&@=?VwR;F3L(7tJmk&W6eg}$<4 zAcSg-Roi?V8Bt`_C7`yydh2+&rbQN^S*Y1*@rF_Ju3@#kWS=m-vm|}Mj#*d21KSb# z6uL*&33}w{jCN7WP;*uZIRhRvK_HqJj5RyHVVW(&v7fAby}YOEoF3Yq^zixMYwpB^ zgirc0KJ8}2>ZU%xj60)Uer;wPH71~6VHs`f5qhXP$!ur}Y2*{#Nxyax&DCLV(XSIv zm~`fp2m5!Nt`Xvu+ieYj%_Ig zZPPNGM#{BpUJc|_WiaJ@*F!qzK73xM)>pGOh4>0VX`c+8U3*q2ojaVi^tlR{t>wA* zzTT;Kx0PqFmUXNJg+7#N3q^!A3F3-Ca|qN?q)9awj@ZE;d5mL;X8H^CP-sn=7$x6e zAe71kEhq&Z&P>T-UapTUiRl-3TBw5VLv0_!#j8fbmcGJfZ}GT$8u}g4L^XBcL~l&X1$r2R#y0cw2GvQE z-#7-hu8zWtithpx7p&I3%iV*HNdS==DT8lVW`=d{Wwtd^i5LI_nwqRSH5T7}=`;Dh z^ZjqWG9X-}dON8Hn!*W_W~sYAZ7Yu>2C@8Ogi}^0D~oJBLi#Zz=xYrQVK<2S*c;IcEjb7zSwe*gWB4d+qQNPP=+!eM2%Sl_tA?-4s;Ch;LNA~ zHqHwH8>Jf1cfakjTw${;>hi}IxKCL8 zB~)QqgXJp^%mpx?4Qsi?Ex6J6@q(rLoA;qOxgN|qPXECmBiv4%9365sZ zJU;LSl@J?=T*gL--_P6S_DErp{^*#_-4Xz+c22Lp+oJRk7r57z?>x{!o+^>*;)QrO zCn~NeQfM4)FxoYiY|}HsR3L%`dSssV35!i0+yLE@Cg08h=7#l|^Y1$2dG0hkM#{jm zd$uY*d@y^ipyOCfv3E;Lh=QeE6ws12FPcQ|{Nh`~x`MLfH>ahj`GPn@K7G1}QV=t4 zR{BQ9rYVnryKanU>q9IT#=RwQANCMSx*-Moyv|W(;kU3D3A6osQ-Zdf@7+{6J;Q?8 z(a+pVlCK%>(@enI){`_gCy(U|)C+3Q-iN7UXYzQD7H^2h$Mx9y75&gmY&peU7Y0)5 zN%0@P*JL*BXKh%%w6U@=H7dKblc2IvthzcZjI*K5&H3wnx} zCXUdN`otBkTL4PK{^{~7I zn^QKs2nZ5Jm;_XtNEra#V3-ugRbpK9K$W>Od7D5?ZeePqsIef=_0W2 zuDFx-04d&cty&iCtsa_`2zj6gob>#uc)hq?RxJA_>I3VA&*6v;l5Dy+)f0;+_9IlY zkQQ9)J;b$iZ`ib4kMLC*%Jf18i(W7v_ZjeGOXo!<4}dYVz&ru8yL0?a5%pyf3 zi}>_mLkNEQvA&01-xez7a8|L(!T4EJ$CRDO7Q)Q;;iPUT&HDT|{I@oON&1!6i*GB1W%G$l$iF5N(102b_|b}R+}cRdy7o&+cZT$FwN3L-KfX6u^09*{c6KUn-KR(=(s}hy^-Pzo z7>?iCutRY>j?L_>WE{B>QNeQ!u5a8DcQ!S?97F{lI*eoOxKT6gAKg~#5DA)(z?5QG zCme>88f2h9ukCwzuWU*JU>RLfIIE~T%$4a<_f97u%b)cbHGoao94nYB7dy%k2Qrib z)Uj&-hWp68gGg-I7Iao|-XL#tEpOdT2nf{*0^3O$+pL;(c|OB9^ktor(EjgxX3sW^ zAbA|Wx?nE-s`1cgtBZEbs#R~0lJL1%(EW{E)GYNT21hx{{CJ|`JWBRJNb&76|E^EVO_@SykL%DK<$!s5!jXXgs7)d~Bb+r(pi($hU(4E6H zCelLC4U^$yQ5yC^#0@MY{kMpBVPSQrzJJvpL3jt_XXxKG2BNThwfAYct?*(`supM&XPWRi z+7Qo;`(gXHe7ZOCSy)Rd%1UT3)*{Xpv>880;Vpkv@J+4$7b${v z8SUK6q!j1zDJl6?H0LH8xJP|Be7n`1@hnAE>DS#fOfPvy1D|k28VYVe$AclNS(RIK z=WT~S1_iu{XQn*QERDXRMBk+~G5}LXyTnnH&R*nAO0LaP5x|U=$w`^{kpv@Got>o) zhmW?zYO>|$8!~5FN3Q3|hpr(*@VTBXKI+x>a{17=R-oaIGf^5x6AKUMOg9;q=q-Lm zkg-wRvdBMZc75ShwmFV%W{g)&c)nR8QU|(_YNA@ri`xx^AqT{td@8H<1M4?H>s#wQ zkf!R54Yn_@G*5fdR&hdmhzOo1142UuCk=nmMXIIYS@weUAWhUm8LLrL@@|_;;(B?4 zN~%IqSbT<Xs+Me@T zu>~sj5bAQ&meny3INxuGw_Z_q3-#Ao1;fY&IpPYniq^Cze}FzQ?X}Th`<0X3`NG>c zX4sTuf^P+o`X3*=6jF=0K7&f9?J}#I58MICJk^{cnRn^QPDn3)Uh!E}0JPq&jgr4v z2D$ql)W&i--T+!K@_lx-4iiM&Y@sRD580HPaopM- zk~9A;Bo>c1^0-8AmGajX$Kj!n-xvly)#EmL-YltC`<7zyJ;+}xSHJQf^X@M@T$M(K z1$>6oP}kL-xix{3$IdJCkdBezdHBi6f<(~$%GWOaPK9>cSym#F(ojsAfQYx=;U(M^Os%&TX@&f)>X<5mi zF2%wu{Vop?qKBWH`fZz@Y&k9U3^>KLZ&4iR&8S&70tlGn6Epu!FSbmy+Rh6jT=|1P zBKnpd*PkG`w2n+KUq6OU5)OAgabpzHA>5og+}9H} z#}EES(}s+)(fekS=Sx$q6V{QX`0g-7 zU77JTJ~SVU;e4e`YmFx)m2aaxPntQwrS{(_r23s#2gxp3fjT){G=_}*5Eh0e#`Fo{ z1~v66&|pc=tf%9kS?!tmkqMJ1%vpof&^Sg#APWoD=f+Ce~=-w zylj3rc`7dyeckvQ^6Z0<4(NXSIblOy^$K-1!Koz=opkuB!_fsHM9y-eT1{9JN(_sr z?u?qSau{}d=m`Ql;a688)#8uvio$67JVo+tvnqx(9-VXVqX})4LOMF;9%w9#p*eTy zzJ(gjjuevaGum|NHvAcpXED4&en1U!3Glmrw7KYj{+77(xl54(sOD2;^}6zitB9c^ z(f~)dVPo3MMVfu(f>`ZzPZ8A*dR30Sjhyl*7we@ssZ*w&tT)F88## z$U_ll_RS5XzlitACtG3nLUG;%E@LSKI2vJ6D$k;v>OMe+@(xt0BS6VNxyQH)5#97eJRTw+^Ne#jHxx% zzE&9Cl37EA=+f$1OL7a2To~er>rVEG-3%-X_^hPQYS!K>pStwRFnhbwZR__jl6f%8 zpD*m1$H+zBef0kISZTqlg^svu4LCS=)hVgydIj>O-wqWf$|pUHnPN|(!!-8M5`3nI z7{$l$c;C=EgF1Mt+)=675U4#+q>G67833T26>%NUPO@%%YO#2uHC{@nv1 zz9@`?39GHCx2HcNWkLu>aBZG_yw7VE(t+`r(|_l8mCZnMTt~G->3P8UH!kh*UacbP zzKX3cUw6X8&~r4ldBAX-XPbCHZ>&Q0(IzCRSdVmd1MunCB?;f_s|$Ts2?rn8}z6WnfR>#c`W(o?9L60nPcmqkY+%_Eap&CP>8ITL$)|xRw8^{HCq0dG+ z?|!`=ln_LB!+1Ny$Q0L;IzBuCef%QMWLEiFwS~6AAbn{g;2CS<&k*Wd#^TFbQ^}l$ zYKijG)S~VN&{Ngz)_XC)Q!VjA)1a&eb?I4L>x*#8g*Z9um86bv5-W=j1%KKJw&YYX zgqfPo^RETaI`SQ2>&Rp z0Af{b!0BH8KfQ)n4NDR&{vFZRWjN-;r3iYyAgLuZOK{$uyE}hZ{*F4n)BQ6t`7jM* z3A<6W`78Z$C2=3<$xV2$Z-c;{GN&B^!v+KA0_R1&vM~2HwHbkom(X`>lq-a+6BDUI zTK|*Vx}GT}g?+cqv*N!RB2p`da>3Mf|J$pP1|Kh!sU4G@WfSTWwTS*%FUE}1!zBf)2D2qQk(hQ%o?N7OZTwGY!y_3H!0H3o2%%|M6 z6$@;CoYz(jNcqBy(4{6M7bmCkR_(UO7a|C^bxS>$kA^$l7W-NO@X5X0$Wdss(&6Rds}Y83$go-lTBDX^wXh zduf(hMxe<_*)7)Wx;b8>`uUlUj1~`Nl)}Bbm@=`pvz}O>BMEz6gte>U*w3_;P?-;U zkt{afe`GPa@!*UWR`q%Fqx^tE@F(?6nB#-h_eKhF!2HZ((8i-AI;gukdO5;r@a^08*Q;knG(z4_1bk`+m_uv*Iqi${{eAWa=eQII= z>;~V$oYzxkdFw?i-xnJi*zzVGbf zc-#6nCMsMaS>1s_aPR5Ur&VOqSTVu?s1_o~q??-|8}kQHT_T?0g;3y8Uf_qAZ2gH{ zSS02G5USzTw{iy)BamT+k(t61RLbBvCv}+8L^gB%qv0fAxpWX&xi({ z+kzU{MbRHxG^tS_c6#bcXehvM1u z(i!kBB&!KMl?2bTQeJ^YP(2F_iOixT=cBIOND*Pb0PY3ayCt-kdmB6vEHUx4snG z5G1G$nB8Q>QBt)DYvq=4NweB2>s0H~i?PC4 zlE-dsW&`-t$SgkENRQUliZ4D5oLP#(H=njOfM3ZQI$NYNL@zfrGaBFsef((s^F}Qt zvYs!=OiBkh*v)PBARXnP4urc$fGr5^yAikopcyKSD{k}!3^&5osyE$X^?iw<;*6mY zgQ%xp7j^P(P`>~=^ZMhNkK98xJ#-jScQvOe+EhHH5u(!lfkZf_k(9_lK5Qiu(a}k* zw8JtHSCKP{W6Zf&mzf*q;8fw^d@RA&gmuijzy^Zh4p0*u((nHLVQViASrGTMI1moV>J-y;DozaDKR#y8LRL-Cb$W#6l zA}v?dtM>mg71F(}>UjcSl}A|ffg$C{hpf%l>Jc)sAO`6rG?DXsALp`62#VR{-G<0XLJ91 z=FS;dd8B#gpA_rSVK*W*es+z|c&(rXB|>3k56^6)CrwDdLARmvwS|)Gx0nf%(jF~& zS#lrH+}Px8OL}Wg6%r%?rkb+=feOev!A9IcbY5RmN=1K{%Lsvw#A6Z!#-R{fnjHWqwa^=>}L`r@7zx;M!jy&DLDMznyXckPSO}Apr%%Ug<&@ zYcYIs#No&I0769KUq#-9~IYzl< zT7%V;`2cn5LA4GDB9VbR1Ph zVt^t_x<&+pRMNsKMgI z24X$^qMSawH;^mWjZNoGpXqayN-bpnYWEdXJTFUVcTM^OKYf(pzs?TcOc0QQE`2gkwgD+(!@ml)b6&wUuGco zVT<{=uQ%})YU=z*E*snwUe`VWGkT9?13rc9R!#e9YvP39jCcG+qukzKx=zN!ANjzz zSre$&lD@ty_$79UV3dQg>t!rL<;vHT$8F01)g|<`;j9qJq4z!i@ox8f=Ex6({IzEN z`8Uj**^x>g#bk{9yuMqTMP(nee7SVboOIzZz6=)?{Faoo(9S- zqIlvpr;1@p4Eq`j>foo%qi8 znM7-ycc0jw7H+;a&Kb$#!Eo+=z!zh&YC(;=8KVVmi&4WKz3hLo)%e$Uguo5hud0Zd zZR0}Qa8@%gH5Gw3P_J^{s0l)A(E^Tp{t z^!X8=z-?o7+D2TBkUxO>g%w9I1knY*SK%a1DmL1@YmyahQO$L~?GZH(MK%$=_mg-2 z?aG;3XYRj`yVzsSH-Md-x>40i;#3*9#Ud2^MFD4h9$b{hC%G_*v3WHD^Z8l5EqF9B zo?Q9*P%8dl3)iGxw-NRDo9`R)DlTy&uJ28a(&>+5{IH!^Z;t!k49D5)z*O*T^^sy- zP~_JnUQ)hBg<#Z5D8=QlOn2JOR+^! zGss$}jG6fms*+WBRAlh%9ts6X;b>k=5I*_&i?TcM!zzmWrUXv#1qv%yz5A_CzMfW} z``oHn#EUkqSa7LU12M;`k|Bz;;Cjy9zLCT6a>n{UH9PSxy2Ylb)!sP?<3LURHv+N2Yon z(A4qvTs*Z_3Pxsc8#?j(CtYVZlhL444BNfp<)%3dwtM?kzXljV;gIb2by{AuZ zC17j1=bK@5U+H?~!DE-M8af=GY0vI5{ERA027n_^3 zIxNcd``23}2Behk5F$siP$`iX1O`O9dnf_v7^EBNX6TOd%>JHV zT<7@5{%3o6!8`A~@3YprKliG^-sb;%avXE@S?DoZW0*G`=*RKd>fHQOA(iP6`SI-?iE<9HP!ygaj{+=$73^Z4W;j(t^lvp88cR5Hnq)= z-REeGJ0ayHPCoqb~EKZH0Ofb0V|SKaMV#$Dcl>wfAh8!6a~W5XL4 z%ND~uIA1t%-4*^*ofoC?oax4gs{;vJ4tau(^UwS*kJU*8-$Usk3Br7q{R*0_@$B+C zJ+AT|^-Q_~V}r3b0zHW*I@KBZ2jIojO=j!|>wM<}bO*sY4`qp;#S-Bru&?8CstFBG zO^Js7?OIwP$fI(Fyu;dV|M0$ZQcw>O!S!-FtkTLb1UHn&5~3Qp&Xm?lE&<`iZQZHJ zu~zRA11^;;6FX)zYFaacBFTv)<1M)g@Wq`6RH@Z3*R=s@>dMglLf+oq8NEGQ#ZJA;0`OZMBOoW_ZK)3>@5*N5m+waAYFqz_xad(JyW5f0o;iDrj5`u;vNC*!y#$A`BHe`ox-sa=Q%qPlNgMm{*VZ-F4;Gx7~2MfG>Bq zXDtQ1;+1JHoTZ>-5!|PB1QW)A-{aZRRVhW^zE1bORQI+r*G&;CNmBM{YG~xYeorA> zl^scb9@;ou`#sV(r_%4mrYwcwU7k;lItEaA!8xxGqf}8hGeN6C>gv}o1~0bH z76Ow{nh5O++NxV4>-(mv^%@vCx6%nYt_?oCRjQ5CNuRPCdS*&RvrqAMdrDclN6SS= z{rTp7z%Xb?=Vet16F-uJMF6gwj?RNd;B8tvP|yovezk%Wc`uNl0vD z=KZ3x?5%EUZi>cYEK{P+KAy}hj72B~C|>A(6XRKgor9m{UN2*L0 zcwPRi>eRzL+XedWP%R2aqxjt?E!}%>ZcZi^Ds~H*4p(&-y3=o1PBai;copyGFt6iK z*Xo)XT7oR2yN#6fcT5=8V5*7yt;3d|^aFtmyngCSkoGld2LH*z<$MN!%=(sN#5rX@ z69e#4N^=DR-1wjA@5$?x2ezD*+xuZik6+=<(}vS!511`w7GMP7?8&?TDSA_4iXDuv z#hililDtj8|1ux+M5B8d?7~^rOjw()-r>1lTg(&KTATl}t$xomoF7*i{PC@fulQyx zF+Rl{Tb2FV4UWBW76&<-QDdgaNOiRhl}ezmBpB2@P2v=~6L@aD|Dj0#dvEzDtF_vg z#bn+4=+PpIdNQ<78*X%}QBnY@0T)=Drz z=a0Zo!Vm@=cOau7atpEymAt!JxH}fz40jfLNNAddn4Hvms+ReonZ;kN{&-LrnYSP( zQh)K2Y86JwqmjENs=>WLe%Nx{=y+6c57=6iA&Clo2y7)J7ss&?!`yPs1TtL7HE#1b z@XcAf!LQJg!yytw?oMkQ*FUtll0UTZb9AdPq|?zdBFMPCl@tb(24(>ca zW)Tcg$#Lu1-Y}}Dq&ZP*>hTl51eZpI8yyRu`b%lct$sD|OR(f933rvg;=q(9w!;nM zmDdku0D-fTFW?0FmxugwryArugbw=XI;_ljS&jq(zKX^^@7KJ}7sIv5a-k2d=zL7< zm_nuU>?%U&R}V-D6!Vt`c{uroV~N%+TDwwLQkU0{AL(8kttZ{yTosT z`!n)`J&$K;SQ+MAIt5^^q;NxDdBD`@o8D7c-Jc0^j_XHa*C65~kPoC-j%tcI&A9#nyZmB`$ zl2-cz8diE^wO!Wx**w<m1`cCr)T_o}3g5z@fGqDNH`q&K z5OmJ*ND+im<=K@N$|!O~X0Jq@1T?Zh^sW=;{mXSM&!>$7|5opRo5Xijz%UNTr;;gg zg8J5lOA~wX+h~mgkDOgrn6n|XXUUbe_Jf~W<@P_UVkTBrUR@P_2X%xOK3ih{c8b6N z#Vssq)r+IS9XaFqc(s0a>1rz{ouYL0y}wSXAW~Wqqn!ka>C5aYy74O($N`#;`-Bp7$}*q?=71e~$Z2gN|TuKcz}0 zd(CJC{`1sE!>dH<)nDuU1L95Zie%DD1ym=1~B7|aOe z!=%x%2xuobkc4gYL)Hr=83oVaZaoyq$Z2=AC~Yk;OB##%&IL)@*Awa5=)^Be58Z3m z`WP(1{IrLMmBnbg4N9jqv$LhWPM?&-bww*Z=*6MB&1Z7W9BK?P2f+PmMU=G}#h&z+ zkS?EhJL`GsBV)}JW%xV7<3U~I<3$Ih@A|Iil?N-Aiyp1N$U0Y zZ|`5A9lFhSD z?I#6UD=4W!7FN&>k*44G|}_M$eM!qJ!dWhJ_t?J8^SAv19L?Zw|Dnd%sNqT5COK0n8dvGu~(Vi@dM>XBj%+Pcdy!F_Y92$sq?VC&&Q3mNN6V)rF zNbLCiV%!lw!6_k>pS%*&M&`)|%lWL3e2z?AG`~#H$w@O497@X;7sD?=h%;6^!oy78 zlfst@D3QKxGoyIP5V#^>NN~s^?afOr%cTf|oqE(7y(Fvgi~79$Ls`qm^!w81!pHpN zc%>LzYkej;?dYBWSV=-+v@rMT{Kbc}{>BWim`L~Jh4nM5z#;=iS260iLvp_5CV`(% zHDAxAe*FrL8y{XbkLIaMgVR!O$-H!}i*D&lG?eU0kt$3IKgZThjYwVY0*nV&1_T%4 z^EGpQcIO|MqQlv#ZVuxL3c1}(!)#y@Ql73++uCuuS2ansVd5My9dm_jej=?k*|zn7 z^6CR8^z6i9kl%>Hvw^BX?vZwq;AqR@ti|3#)WoMcx_Gc#(cs_hHMr!T(ajA(L)b_@ z_x-XmnDR&npC_|i5`3eW5ax~q7=?}(y7U@%+e-%eU)m+(62e&>28U77QCO-WeZ% z7=OA?eeFv=6SNsCPu;h}mbsrzR714gwH&!#WwQ#ufn@TsU1yhts9B#m$WLhFLLr31 znJyhei#kjr2w^=P0h; zM9uEe*vqmK@1w4s^d=>3z0e8i{jYz*@Vy0d;8>=E03onxDDzL&DNg645*$!pBsZZN z&=wEd{ON>iia*a}Mx(b7BW|xAq*wlK@BtPjf5G=d@=A|gs@!K?qs0bP{sb0@ASF-; z2qwC~c<2MNUhwwG|2I)SYQnk_?{=cg56+Mv;#5$Oii(0T@9jO+mGNPin_^j61ojkR0c{f__0`{WZzacJQ{U0BUu zcwhQn+{D~%3h8!4tRy65IC5a@4HhSbhT~W~DE!sOxiV937XHk*`7I(={NH3JPSMbV zFh3}bj-00zgx*iI*4|(a!WjB;T%wZ-of-0cP`|)_-n;AS*2Is7%Rhpx$+sb<-E4JN z6#I8`rj2zc?%kbXaZKM$+mO5-T(4K^fw;TJ8+Z&RwC+dK?1kTfJ`hMPE zUUcyzvc{*tFR5!fUi3z`i6Bg{3!EHt<5<22-iNz66YP{PKz{dc!}gs)YvdJ>o_@U( z?@75{zb(Q+`3YFvhnU#`jUO4_OX)a9Sv&6%CvxNEuf3}dN)@AT2PF-h5u<^56_goy z-b{!ttVn|+aBKt@5FvV#2f}r==?@GS(r+YhZx$&<*KrI7V0}#zy6-LGd6{wY?vi%k z38{dnq<-n9fQaS7Y8J|Q^6w@6_WYIM3v<69sWHOdCs)VLc+!NV^w)ZgdMlf`p!4d5 zXG`X`#$&s)Czw3p=udt7i=0IC!-O3651z%og)-Ai6}+I&{40{IAvdi@13_3uR9ODA z;r;>(RyVCwXN%5WjrC~QGm-ushZjeN3y!Xyc0to*q0*E1BBV2>0iwUeOnsfiX{`kO z;WtM^oxHQ*pPA{*nAF@p$1B3BBRUH|XjaN;-#})$biVaj^#!E{&WpJ>RbnQq$Yvcnr#0+Q7ZzRg+l~-Gx2A5kU z5@rr=g`J>QRn!7PO`?dBQTo5y%KcG^VN0s*&!kWVpW39?ZxVhKqGPblnb$)1{#ro zBg^&WvVlomGtsrPnZ`YEvZamK%uFaBFLf9Y8sY z?JVwnFdOal4kDU#@-KdQ9)9>wD?rMv)&=z7E8)ZTQxz5{4a&lk#&Mc>4>8?^aMDUn~T^)oO2QX|#f))|a|3}cGl*T{md3G+ z^Mnu4801O1Czy8RQrVp$L}6iLClX#w|t;^{a^=U&v$s0G2AvTHMHXvSxyGVNVqh7t0Pq%PeYf38c-zz2-`eCg_AI7SOy&GH`_=pbYhk%4uOL*xcx7w(*P z#r9y~v|j-ey(LsZl(*2)O~M|AmhaAS;Bk32akp zPPr4-1o#P8u8=KBAlH)tBQ%LjqYF5f{Ei3&&_l($4x9_{QJ`#;!$7v~4J!K8 z5moU75VzJ5Zx{E9&INU>4-`;QzNZ;(pd7U@LmFm=f3-KR&mhRkdZHoC(4ESXTjgLw z5zRz#E8b!CnKc_E3LZzghbX3wy5)q?cZ^I*q&)q4%G|s7K959uJ%ihD+@ux>kFl)Iz=A>em0z+%Wd6ER(j@xGw*gz^oKl$@6ogk zhuQm@bxx*F!A#Gl^i8E)v(yPUfD`KT-cDWV#r@fe>Ny&df$f5PicQ%yD8eBjFVIoV_Xwflw#jW7ib%sAjQf{i zGI?K0 zf{g2?OgX%>LXxQjYISo(C0&;t{Lqj6Udj+d-BP31VrP>@Pa?626+xNJ7$b7{@taz! zg8&G(2syWjig;&@Ne^!<+W2t#iQX(VfIMVnmx0*hsBP$aGYoBOtF-0S?p4})aO}QM`OE5*&}ZR=}ShTb4027R*&doK+>UA0*Mi?b`Wh&P4)Z-W+&!TI$Yj-@gq; zQyC6ZN4XKzMQrWC>wGw1*8(fvj+q`r9Hl;M`Bxvhz zYEnm0Rylb)~G$e`@!?Oh}^{@;8|y{WUaMjh;e#mCq0_+_F)d_y_okB z%b$_Yv-uT~@c+%X>3sWUIlmVxK^1_eKeg@$&0Y4BXnApgY9yfq=c!{q@(NW+f}$j^ zh52_9fa`9-{q8Zet*d$&B|GThvl;}_)pgH5)Xsh+d3zPi&>MLB7<5L zt^?JYqFTjPVExKh3lffVM8*rdTE(S0z$G2v2bedkzU1ud>q@$-8pEYM>*l&E>wPWV zfq{+Bz`>M5oEecT2a|%X;Bwm3uo{ZAphhg|4;<+@Z>4ksnw6@-^IZN-gGaGVdv#yy z?x6QkO_K)up!Y55&P0TH-^9)r@4KnIb!uR*B~fp>L)Spb9R3x5aA2SWAAsu6%ppXB z9fwJ+lxP=X0s#8R5`f}9gD}^mM7GC4zkhQKb2gWP2DD1L2+PP%4y{@p|4WZy4tXd+ zFeAGgffXLK@jZx&ID9$|aEPC&G4;<;6mY|aib&>Z6@O#0Zrf&3KGb<1I?;9oo#1YF zCEWSzU*=o7KY#6e$tW7`z^Js9sClwAxRvAWs=Gl7Wqb?!=pajoW7?1I2qSzL`$jn> z3lKnAL;K+9(CdqAV)4H^JrO2m18pJ7LaX1uSj@L3X%7#N>{ogO+(C1mWVDV465$7x z`p`A&ugh88tbdlEn;>c^{QQfjoJYb0mN+ww{)9RS^WEjd$t6Vzp(TYj2**aM+ea#y zpK8s71MKN2>G4mkP7^6Am;$SWrtpt-(g4#V{}Hb0e)Q|3Qkr*vC1~H4Cpd>K0HtQlb$_fVg4-m z1@x0vDiJkXRxeBqLonUPT(b(o_o>ZjAn&*>38g5QG7oaBxU5bv+Ma|;pU9J)`+hUxL75d0_Udd+bg6-$y*`so${_@QJL6X zjbY}LhI6JA0pHI>3u*Ae{GRu_SqG?(qQBM?X+*(o^1(QW5G1w*{%a%$eX&8wsfV-L zqKP-YP%8hx`|kVtYRqQF^MPLnA%r--~KSK~H>Gl~Jre=84^fx|~m+qqwSy_amU30{@N_kZ5Nku?YZp zs4l)G4)6@(Vl7q+pN&8IhdUqAx8wsw!ny|w5IVg8HTXKQK+Eyn*FC;no?z3Lm7n`V z888GqR7WXxWGD!<1%RYK0X3Ng(+{oN=k`vekrcz~P{}+9e07cLq=|w>De;*G%+B=v zL}QIIMO-GE)!|xCKGMDJKyKXwc8R9rSED30^Ea>>5h+BM8!9E_`T}wP6WVUNi>zDU zdmw9Z*_q@4%PHQX?qxd}Ggg^=&Ag#GI5WWekz0>k$!wg?%dY5iAmM~1sH7B*em}#* zzZVcUr#sMfVKX^ECs80L=zg0YdSXhuyxE^B&Z%v7WiqobBC^nEm@ID{x{tBVH~&E7 zNk&qfgPm}vc$;fEzHQFKwF6ocyG&8vB*j{XlAwQ#Y-dg{o+1Pt4!nQhZm+;7lw1c!$R-*V>GOuxPDAr1uf=&A(?14Q)>*mxL{aQQhWX?gzT+k=3`@P&8@%a9V5woRi>f zKZQHP2x@*>xmKDmv0(U2{5t*z^2F}aZe33o+fW~9L;h;BZsUd#`k}GSp;qBg08Rcg zgBs`v*2~B;teV(W{{=7q+QslaUcD_11EW#x23Q# z@){~`35ZUWbZQp!TtvXO$K9`W{d+#uE>#rIEk8I2UA#<0G4Q-f@GS@+(VAY*X*e<9O67cE$-|U_ zsxAABi9~M`DR!!O8DlmcM%(BX>&pgQAFz<%NP=ZFKkTL|3h<^X=h(1S`74S?KP1@J zC@c#g@&%+|jL^fJBH#Y9^&f%ATrFWOli6&2XT#&P?&?0{O&urm<89fHF5gHpWAOo9 zP_&?kFz*dm`0>|`c4Txb$^wq#4fY%nu>+}>-CvvviPz02{tGGve$1rv^K&hzR0_7X z__*2z;Hk~qb5mC%R#R61qCXlckQhZRq7S0-9tGOb2~AXIh5u~E)n~~0qr_W~e_Od2 zWbg>-TIYEw7^=m(9g?A8h6R;oH6H^9;`bws&FMr!0oQd7LXQnE#I=R0JhE@RB$3Ll z&4RlI!oJ@0IEgckO>b&NP*j?7Mzj$>W>2#}jBi|9ZfklG#iRmCJ?jhLK^O83P~NPP z@e)I-vYu41$DtQw_SsRL1#k!g!Og*9B(}e_Je~(Obl%%Pt5LxE2SD@1PDvB19Z%}6 z*V|RKs@3uzr8k5DQyL=<8Q7_mfSSC`H>Gz?wJ{Ns_s^+l-H))KDMWh&ZdL#RD2CnTA!s)Wc#$iDooeG?ua1 z;{IIQ?Myfg`}p2hps^fwWn7-Wk*JlqJob^Qy$&z?X`Wy?d;?WyGl9EJ32`z^KS(Tz z^)STjX(AARdWBCtcIFk9i8)uZ=)LA{N;@~lKo586xGTp$_{Pj8{v^yosTc%}q6BBFs%m7$N^>|=sd zP%BSZ8&((jXZ%eHGuAJSQMw={ckcoW<+K|>qgey)HqCC$3m8l*tYc;Dupv=ucm!Ak zq+YvKneesF!Ts>lsS*Dgt~m$+*~}RmY)!nU=SP;VRu`7@tq&@UoJNhg{(;gIB$!-m zshSU5{*Jd79G}WoQhjPrkfy(ehgNP;D3uJ0hdh1Vom>6c|A?J2flg*Tu1Xy4&tn^1wM>yllD zB#n{h0ZYW&ThZY91wBD{+|d)lyNsfRMwORmTY1y$WF}we_!a{be(Uw=2XuLg?p9y zd9OP{Vm}751c#t1m0@z6Pbe+>H&!2FE+e%$0QbBGBctqT8H(NRSf-~iH9#y!4v$6%O^3k;#t#}%VL2>WXSMVI4xh}sdUV^1~TFi=wZ%({ zbGyPa0#Fm^y5P|^8zVl=rYy^OuSi-=IDz?m+#{S;tL@sH&?gXOLY#{S4Dmw5iW|d< z7)o(nEkA-ED;Fjy$2X;}=SyJj>vISnYudAiask+pVPNpYRbw1_+DMT-E5+cruit4_ zJR|6$pjxfc7=2G$bw`KOYz2|3go18nefnOzj6@0Ml{ekqUeS411-Mgy@f%o%v8++< zr+#DYNiJ^-I+=$ZWF?D_V#R@YL2>iQ$k7Sx${5V?!W4j%GK#7Ttojxw; zc_9CkM8KrzK2d?eKQmEZ(A2zxZRmh~Aab2Tv+Fed>bUu`PrEv!(|0Lv*twaYHS`Rv z)IQW?JdnVwaE`rAh zg9SQ9OQDLPJR!opqN{$MPpJoA)_C69gV z-udK)OlflHuMSQ;`vg zw{!M>s%CJq>l6*S%G{cA6GO(ut8B+Ns-X!1Gv z0XkR(G!8{1Pj1~>*B-FxMLONv&%OqMqSb0irm#|YJ4tF6cX1OU zu~W;^dVTjBa=VYTHe$IgE*HDs_QZ{!tiT8Ol0kAQ87y=7#>$N<(@1bkGkpqYg(?*@ zLf3j0FkCj7V;?MjdYDE}4p>%l`TR4!!Cw$Hm>D_?#6!d#X^CYxlv3!6dFHEMyJI@H zw!gYeQld?(N7qTnd%QKRQ#)&Fm~HGFu&{4_{hPqO?`-nb8X?YHpZbPG!c1y=gF-u8 z`{a=)o&Ry{GyN(B#KVhD6IQK(w3KVzXGV{obXK$FS`Mr*xgWk--kZA&0=XKQpr8^t z0ehFoc1>W5TPdYkoN4N=e=zt!4^6x25J`}x>;3Ngx%#}rGqO;%qj^`O-A{Cz{_gj% zAOg2Xi!2Tdq(<4Qs#cy%YYuDukp&b*x%#VmV*>5u!jQ+bQLd-;bPAJJN89fj29__; zDdAdt1sNtwlRY)yaz1G+&3n`fX}!Hemn6ESDc=5l>6CGAnG#xn@{@o12>JI^5g&kR zfa5?_(q6!lURWSJ3UQ#o94^iE$#0EtE#v~|kp~T{6l>oWMqZcuPu!bxiJZ|q60`Dj zPaiD-2EuX5W@@pmX=r2VNdlL_Td@2uhUQp-1}R#REX{Lt#QHq5upYm7ySvbT=2g8$ z^xk?gYKGPgDEbrXSWnzADX?d;$7;8SWeB7^GkBOYSZPJ_kV7?glKzR+UP84|}*?!yf7n<4MM=uK# znm;9&`(xoI_-W)ZDtZ8!ONa#a7+Sc)#GxiQ3 zB7ay2xo-ttMK);)g(y-RI5t69o^5+QEHFU#heJK##kjL8x1-iI{iI}qx()RUzqhqt zTf}uw9QW_gE|9|w|Z|Ofjak3AJJ6b0ZM+XuC3E!ST=eyq?s`Ri&*h;{k?$ zOhhb)$%@UO@y$$LnXAoyyqHCH&K=OmqyUbmjEcDfse*mRfj2pwL~V6aE4?C?58-ze zh27-^MjhTKM?bkxTeg~mNnAax#jiK67k`=1s;=jU;@>0a0y-y4ih=9&ZwuS~#5;GY zIAqV3ctk=)JYNRd@v?P;G$0N5EzNePtDllxB+t1VMX?dH89gpFdL*iLFT~j$HzKKc zj05Yau^!Hve7M@ASv&h(=&Z>Bqw9n2{DF}A`L8$zE&f&s!m@?ZCO2N)7OvFMHecZD9axg~97-n&SogVf)> zhP`o)E^^*YsM@bo*bRs^%8w0&yte86qRv&wIl&k!bk5x}k@B?iP8{2E0z}w|;b4-( zRB1Ud?5ZL->(sO*R-$#_=kw~AC#I0kzh;}Due4=#$E8pSYr+{TgxR-d#+|(?^U59ikmy&=ar>(WAMZyBQg+~Yi{l;SIENX9Fr7u zc3K)0dQ)#1VS_OKnmn991-EC{dE8=ymFi9ni*(yP)D2J-o)B*6%o!TbI=C4wFZLJw z`$7OX6*Lgg(n(BExd)LrH?a2Roe5B?ZG0&j=;^^9Oo{8_L_U*w%CFBR4|V0lji%RqZj zagj95rfHI#asS&Wux{05y_fTVV;iLE2-X4%9IOzAB@T=A4!dsb?jWh+f$+Tk5!e!! z()Ezds0x(iqjj}eKvr?y%e&@98PBnLDcshPF=E`R0?P+neVq8w>DU(E(3MuQu0uarWF zw{Xy`qg!0~8Tt0~QXMZ7#&}R*NP+flyI3P_&B)#z#%#F7Bw3=I2D&Y9^n2QtcB>V5J)Luzyw35>xN7xS`r#_E6dB56&Ao;|kruk_{&tAZjeQ$Ht zkKe9z7oEK%Z1bKOu^A7&hiX?I*ZZ6lJd=yzC51j%iB&l;;uMJHoqJh5$`Om9G?7;T z`;BPPI)YeHj@2SR_a#;2d~bbF5Q`c`AiPfxK69bGHFH;PeXSz5gas+5G)@H9{rI$E zFoRRV&nfM>yAAjoq_SCqyIZf`8funmPJ_Z$Yn3oo8xRiXbR^=(zBteN3E7UfbFCUB z+@oqw80@?T+p#N3|3N=vNK;@(*BC#U$Xnca!pSjR?TRjbE1?i_Q%RI}BiB8V6JeXV zjNL}J9fWh&JkH&t@~G4&v%4A?u2J8}`%*BwI#%YkOap$`B%28FH$M#C$8nWb`o@k} zwTy8fQSo*-^dKm0Oqc?jLek%`e*J^8{(=HrjY&Z6b-WJ*%B}K4yGfsfso;@F6I*Az3${Y~#!Ik;7w%yPGsw3x zL}o4yA`LtUkLiEutqSp{QBMxb;qbg+2lntaqAGgzWkG&J|U z)THA&y=3OR-oc{8>F(}vHfjeKxRho8CQ`Og5;3uMBqQmD)ZXICr?)4*d#`|$bD3OU zG_kq2;9hUpo4J_%-4ajve8%Hz^Z>7M<@%{&~Z|Z8Krii+iM z+~9KXc|~DwtM3=myW3g^YlvZ4l-$X~d2zeIw`pec`}4kV7=}&D0rFkRXxp|*>e?j! zFOG5O3Dv&W@`7s@epfL}3Pb)R>E1HCsec~np#Uu^H!hnaFtBVG{E-avXkg zTqmBEJ$n(s85Q9_9o0Jt6`VygdBkno0$%3g5Hfs)r3ZK>P00uA0yr0wTX-y zHfm^A-y+6S!!|R^G+6Jxvr$)Eg3pBc&t)o?Y&E#;3DFnB16=|ljR86&8FM>cf$$yxE(vIT zsuCMTmYApDW`)gNFmUiD>pbTP{MRf|Xz1&q*pT~Od-^q{NC8@iiPRV9?x_|D)Lv>V z9#)rMfl)MLMV5sw6#^eb#>CC1Zjg>HF60372dV`GmWP7&Q^KR`#f2K(JKC*iy{v=)PA3$n3IW&#_} zxy7YB>fyOJ_QyW8EO7FPG$PkGf2bd)HS`{WPSDw|SIm?qXlOrHj#K+BWGm9X!yfef zEP$D1%zv!}xnn{%PsuNd_~c{sUEX%))|Y&{ifH5iehjR?(N_Tk#7L9*mV{%nM_Y#w0pN+XG z+g{@|o)o;Qw*H=~!b*Lqci2Wy~+#T zl*9ciCIv|nyeGD`7U!mS9qIljX+&&a2#d(~MXvW_%CO^vK1lrCzl|5FVNlN0F4p5< zb}Dy>tJi>Bjn(P~imn&PC#7?eyvW-q-_!J2Y)U?xM@w?*p46FLaz8*;xh&eef7?E84bGC!qFSKK(Gf;3v!cev*Gf z-Ln{byyzma5nXVnmfXFQG7)|xokdB*UO$X2yr`|*#u{vtw8{E~(_{jHfhoVW>4be+ zwNit%8GawP$57CHMtjltGsYJ(7{u)ipG}D5jy1rOIfn)<6u4S67|_}t+VbPHL9Qjl zUG}Y~_BSn0+M9FfwjLJ!D0$r=A8())|sK61_xJpB}bs8yg6Zmfa3bZCdlyp72yztM{Vb3v#f00tiU| zUD=dEK=K6owqZwYdGdI383MuG24yBtS&bA@ zCx3(5xnYme5~b?=kBFhTpC2%n6fge123anzgJ6v;8HV@cjYfwI1uckUS|8d0!Y%b^S zw~29J_)|d+o@+JFuKM(|0x~R;dh;Tm66WK_{!`#9Y)X8@+d)ysh(5Gn;#{91M3hN zT~JI9aLOS;2-)cY^e7FU0k=}8 z2h4M+p0tNY6d4s;4R37by0-&oNF$PcFXEFkrb2?9Vwk$2Sr;Yln~;2oUBO_{%Cq?+ zYMq?%Bg8f)@U39t(SDieYTV#8O<5eyVMnoL{TVG9TrjocG2Ow4do>PofV-5DVdQv8 zfHLzH89q09y)uKpjWO2qnG~*s!W|HRtYb4|Y|$!FQ$`3)LHmgfrP#?UqH9Cg2+;{m zwEq0aWv>{u8Icowj?S>QE2(P(aTQu(%9)Rk}wVlU0Rl<_5Q6MWELr} zmGk-PWtsh?5mRKV52b)xX-PSzX3(@_B$-9n`qD{pD>1yZ{qmt3%vb#WDaoCxN#Fss znRuij z!T9m1rWW{x2EPlt_dNwnH`oVXpFKUx+d< zSn$F)MICbeliM+dr#bqB=lww^)gFn!q;8|!SV8o}?@$TrRTF&J_ZZ`&Y%Ah4OuGufHlsAX882D;`jr}a@01L|bK2gBhuKiT z&iI)XWD62O*!Wwzs}}x1S~=LUfcm5U;&WI@<^4r4L5v4%#HLX0R)W$d>BHD2US{PM zI@?8JT$27IdEl;rO(e_~mR{mh7bWnt2D1@sReH_Ph6;Hp`z%&-dqndq9DoOrXMPwx z=FF1gNq?Cs<8tRFMV!RmS=IaK{T^db#mn*9Vx+Km(Q$+AVzMHk(ttr5buF1o_|H#r zj{0(c>lSWM+b9usRcLOk2h!Borbn{K#8gK(SXJ?unYrgpw&zd6S`ecJPaY{Qau6{< zdlZVjhC>-j0tqGM0!X!_mnk0M*2Dzfz*}uI%i^JO-?Va3FMHR&!RyT@iOXUlosA_| zdm~ZLCB7Vv=|&G>j%ltrVcI`y_xHl3wT0RXxptP@lM8oKK3cr+ay;;JG*zTOqgo^~ zBDfb_&4^ouTN-jwqMt+dr2X;$8Ff{3r;{vr8(8K`_mTCTcF7X89zV~YVSdXB5E3pt z2p<}I3qCTm|MP@}68?fbGNL=mqSWmsLVpaQ#Jzj#s)M)K zb&Icxt2Wnv@%0NK7lT*#$%F%eZ`%8H7L}T&z?xE#gqu^t)=2&0{ zH$nigdvw?AwXe@COJqYRPcoRp$3FV#?Ne0iFF;@ReypN4?Zx6vFGB<1b>BT8O-2jg zDs$ww)d3IUO>cW?@VoH#{eP}WlUknt<(f=HEq%Vaxy{*11Pi!gux}}ZJ&Jbb8gvmt zU;x7sBck^TdNkHm?ht?eV2Z|{ z<#c)(7w+XPaelzjgB{9B!2r8aE=Z_-+_OkmX)&n9r%mC$82}Wb8P{C1`^tgIqx8bpBZrP*1jH_y#<+M| z2N=z-%F-80MYj*XeUNwpyW$pQak-Xx*XwJN_$^QqGHRQ zd^B5ocY_Q(9Mx0>PbdPGli2N&KClYrqsZ0nnRrg$1LTHKI3Ph^f#iSAx)j-+3Od%m zdr4;h)rwIYc7PqI^r1Y`^uZ2eAk56|GI|7G&8s<>fS*(vqS1wtu9@63ePFb}NsK0P zi*mFI;Wn);*fKpv*;5S^g8%;R1^??vjLEOOP)CY6_F+Fgce_tfDfS$^TD{aPS;*{@ zLylpY@ytXE4X~Cz!+)+SV`)!USoq{{CpRNtm?Kx%#R9-!G64;MGh6Kh^wT=^?fEhR zD$H?AElY)3Swc)2c@ToT@u*)DvK+-vC_!e~oVf%xvkd!i?&1)4gY|*J9y=2p2(>vg zvVZQi;knjY=do2hfc|r3w>P~DQnYS;n*#@x?n)~iVl?i3MiUP9ppGA)l-E}CK|iX# zf8zLmsQT_eD*Hcdk%meVNoLbQ6hbyhWN+C@<}u>fJF9Hj`xx0FWUnZD&x7OGd++0z z@AvjR&+mQT)1Ntq%~i8z`#TMgr1Z4x4wi&nvK-9R=y> zgU}w`7O5d|WOt(F`9!+jlRgK?>O)mpwsx-9BOAK;?MkJL4phi>^Lg|A7lZ*AxO%39 z-v9g;st#9FfFr}Fk66m!U42Sm4|K3nP-oNbH**abBb}JvzV7o!OyI`Kc<#AF3ShtF z%e|uFOjir0UVjffK~k`3Pn*1P3!@^@SKs%59Z=U`axc9M#Sg+rIGW>wJH4%sX5q0C*)?JeX~B5sGAk`RglC;C!V?y-8x+t=B9 z*oi51wih<5PygXqJs`3=ar&FfShLFP>k?9Mn^m51Aje=MLKp@FTiC&3Ku%lBy%&V{ z@EMh#P?EgsNW}V53w>MMt^|vvy&+ei3&05}-U=~4U)A~3Pqv8alCVS8Wc|2%=mQ>p z$S$D+5`_I-yjIC^-L2zsYL9hm#)3cE@}@M!Ui}MTmI*${R>2jZIjgz3#S5-Nx$^b& zK_jF&ut4j|0X(?0X4SW47UnLhl!OhtCS4x*nDP5RzlK;o0&WXEQlgrGjB3+<;lAdZ z)(%t`AfH>8A4)Z;?%z_-1Lxy}C(0EV5kk>bXkWi_i$UPQ_5_d`nd?5Qu#<8GAX>Ix z>g_Xsz_}`i6oJ5n$e)Bv=`cj@MqMtMnyIfSEoVpwM_a>#g^AA9s~awl1BudW%RJxK z&~-z3lkL!-1>f2D$6a2lLvLUj@v`XXdNCBRH&@-tYpnOqY(NEH7QjWpFc16=K{SVa zgZxQ>Us*p(rhs#BtFC*6uIrt3x6_fG^2dw&EHPnWV^YO8ptbzGp8isEQ4FagHu0t{ z^BUMP%h0SPJS-8V5kwrUs*kw7dFY2Zbwy17TqFPan(T0Mq|s=&4XLkg=CuxB|9^xH z_}h~Ns=%?w`P7FFwsC5H7f1FgrfvgQMDf80#(aZkj<}MyMUH5l6N%JC#Qbqc<(y43 zjXePLA3~8$Ar-h@S*UqUUS7sjVce66#eIk{d72O4VRBbi@smyZ??_^JY4)@x$(ZMV zIK*y^e<)k;c`kG?cQp*iaO;pd@!FeYS5A8U$D+5u02Y1bxZK!iwpKcDw}d$Gm?DZr z6tS({=cj`42Z3+ctMp0S-U+%KheBR3A1GE2ou8hMYk+aV{ts}|_^W_LB`DRGfZyDo zMr641L4SECf*pXvJXURc{P(wZo*4`r#40l92M_^G)ZW5yAnKhh@$EsSSTCeMt3U=E z3V|cZ8X63UpJsnvp9y4b1){?b0GB)ziUNiDMp^P1AN^jEcTCm_uk@FNgP9UY_@H^q z(z#s_Wd8dYNEz%se094?H137tS0Dojj2Ffnn+ce{*Sp=lJ-?+B1sfNxLEc5T%6eVm zw3QkdK3}e=rUQN)&w!f65cbk(i6~}VU>(U*)ou8w2;H5=;HB9p{<_`@_ybnXk+C$h zKOp*XPP9pY#-4!iA#qB~uUcuh#suaX`L)fAOKk;}M?IYQ~RmF+}EU2L3t!sGlp46&}^k2Rz$v^SLIpRIyp=(CQu0 zb~}dMp!PcTW(JV~i7Z|v3)PFPT+_=$o=i&0Z3(^C2IPJ@(mdM_c3wB^t(hH$$?lu{6W(4j7dc>W8R zA8yZxsS=a;+;%bC8YzK{7EP>xA+D*v9nejQcHEh7E$$X%2F92Xz{FExe;vbIcl=>O zWv^m`wGHf^?`rMFu`TG2lr$@RP6AxFkt)X|fWc`TV%;QV0AuP=Gxlk?OsXv-+O|MSi7@E3}5ak|L5lJzhKm6fC z-)ZB;_kVl!bCoxh@-!T4$OzsWDShq%)$hmgV=F~@d%iDo>gMBU7u@#ui)8bA4qkft z-Q(#@*V&k&@yws{xjj%g#BPMK?+4@94ChY@GzO30fPkt}rd8EftnU`Gwm;%=^4s|( z2((Op*)MZ?UK9>FLu=eX1ZSbfH7)6BxpD19L z#AS8_tRi64!>)#DOCS%DN$9%a(gT5O9+2Z>0J_T20TdYb>Jl&cmV9`YF^B8EB8D`U z-z`}=uyCXA*Su@w$GtE=CfEaiE_AK+M*0Oz3&v?B&x!R{qQIFK1P(nqyFWEY2qM5N zAYw!TDS&03Of{V~Sh^&S)T$rsXE3b*!ylc*yoCfn^MBYG$Jvkg*4@vRg}yED{nKfK zQ}EYUgG)P^2BTlX!ARFM*x+BmO8e6o@z=e~8X;GYup7~kYH5n737zGZpX$xGXqKrRmi>ZzJArJd9h8v_Z)gvjFe1 zdOa-VAER5yR^#cVyT9I}oj@9vH^i#TtQDGp9Z?Vu{oerBfv~c1C#g3fv}^V-8^DE= z$fqt_;mOozsOx7T`W`-!6b*>FQf7(7Ob!hZxe_#-QN72H{SFn9YK}+0M=ab`%^#UA zsy*lmZcBH)9Gvm=&&laszNwRl1&9y)%+{wcdh?}O@zy)(m}cv-jT@yX2jbFOp90pn zqV%1&bQFyF+?M&uy7_{+s@Vcj|8@~z%hTKUI^_dC`^u1h zLtD9u2Yt1TUhJkl;q35NmX5W#&UoGVv(z(0luu-Jf$-iJ`AmoO3=lY{BMJj1`{tO# z4sg+Z`R(-NU+=#6ODx#uJyo6N@d(YfieJzgD$`rKXB)p~XT&{GK7n?+F~7b;G~b=8 zDSM?rQq|-MfsUiNc`@aDAvN~l@z(6`shP^h)_c)sN-IB10;skRK?XFZfXhkwcd)X<7z*Iw%){^!pe0xZ2n15VW08K61BeHLd+q(7 z^`G%h7hsM8tF)cKud7qV6$U6nfrW@D{gHG1>(>_{t8T_VAgds$a`ohRr`yS|>J@H7 z8S_Vm4kdL-5G@(UPs$9mLlm%)KrG>SMHiJ@ILt z_WHH6nUfDP7sVkKp!tsi1z`Jv_Q1P56?sc?s^rq%(vSW7DN?x+sHT%ExyM&iOD!fc zhfID8#+1drLN(`>wt_thxo#ee%T6aqT7gi~iU5wVkrowB?)#?dz?Kr^#@*jWSCNc- zNrGOz4gIIj&dg$6Y8~^yf5>}4#&vdm)=#M*r1$IBWZe)4PO&2FxU(D}I;Aa)MLvy^ zJKQ&BEx73>8ANGwWWFyn8YStLpz=%Y#-8i`+vpsy^#NGj^J>wSh7Q#{zv~^u_)y*7 zpm|3Bacr6aiURc053U&b2HMaex`_sPJmlwx*Fy|IP_cIDW``oO2C=J?K#FFU27G$CISJ>-G;Bxq#hPM4HjO30>wXDpy#j1_ka5Z37Gn7|6Q3n^0WD0X=?KO|{z))Pio%aLhB6^z_J!=mY& zx-dWmaSq5uq7Y<3=bnFsz`BidL?n`+N$h^=z#j+gBqpC~*Y(t(GA#b!)w|u^U)gUq zoop*J{sm14q2$K>+k<2#Kq8@_={B#^z^V9Y8@YcgoPaiZ6dQ?>a)FO8x^b@el&kUP z7Jkv-%1+tvv$&!z-|5BL)e-1%WoGH0@b*RTVhG_y`l0oa4iaDbMK-gExteAC&x7Bs z2=)>LyqV{1okmp=+}1;DuZ0bi`9-3MJAp4j-9iF|vH3`?S{Z-5fWckS<(T@@kUNRV@g)s9s2wbZ*S8p>fa%`6gvu~sg)M-Mj6owR3c zN9v6_l_I5oZQEUMaCk!ZG?pc^8~HycU0PrT`UiaPZmGErhlNs zSi9yYetu@N%Pem)bZ{T@s?2Vm!Z@z#Td~E$R9dD@d+%I)MNzXXuBeNsq+p*GxIH-t ztubZ6Y+Qe~)G>A3pyFx)dTYB{i#fNb(tz5^f9A9FX_`Kzoy$3By#oL!e4PI4S7g*p zTk}^N7-F`h$-pIQUF;Vhzy4VMX^KJpv{iMad5=Sr0{)N+u#rf^@f-Y#rLMu!3RnXzKRQ4JCZeTTO;6T7| zes2V0dHFLrQ#{FsdVimdauwDTIDguQEBa)yRcTaEW-RoOml-HYn?cOAmKm{rujU>$ zs%AKUqg)ESzDTCXtmMeYA42rF%=ut-XInxwe^aDm+1YItc=u@2ER1CCCR;E* zhfcnNKctwdT2Q)i+P2vv8u?vuj#RP2`YjGsmyeS2OYH{yNtP;mx^P$L5TLNB;I1i2@$8|2e{|$Z!2;V?SPdY;Tk?QTgId7xwVi8?(z6nZtn>b9+0JNM z*;eXxFH4_i3OTcCi#VO@_k{##)=_C^Oy5Yf0L+;mCLfh#B3)(d)~r;;-u=0U2o2Zx zOyrvkS}ex*GOwalO`p;e?58Pq+T{dIE*#LJ@^v~zWKDrYBDJO+b=2Ep)kPkLKsn#S zjvDYY==+3HLlHI?;-cCia$mK$XhO|o3Zsq%?D3APYy(p zKq@iB;EFeJ=v8?13V?1f?`zAXyj&k)5m)6^yyOYq`@cV2{e4nStyJo5k*<3l;%fMK zKpdFg`i!4qnqE{XHnlqWjiWS$gnwkTmq4awH1fN$71rVsfL%IvvS7d_2KdHAQ>wjL z1upikYzO;j%RSNul6heNNdnNQzk}s%r>nStX8P2gdMKc2F5ROUSOYHh1Loz2Q(x$m z?WNW7UsmFCtc+wA^2;X*)f#2sq)5}XqeXyq+^B(3S++^u=GG&Dm3Ecy!RGeX^A@wR zt0#ZWucrvB9sK+Nt4=suPp#e`!UtO+u3E*VA%kJf)9ZtX2S+!NQN0^a0pk=X7e^fK zxrP)*QUd!vYOv`JtdtM7jDYT0Uj@RbVum=F@pZ(#gO15!uWk}|2^lI{FW}(yJjCE- z1-I;`-&c|=KF+Xk3f!gIBwHN^u(PveF<@k5?Vnaw` zaS2s`Jp2X&6vsoI)J5NJe8JlXW|JJA-9&;MWH0$I-6Et5F#u|rJ&_5$iCX27($WJg znJ$iqwASaE@7}zs-Ik|SEOzIqZ7}!8aa9iGTPblZ&-J^SE5*KDPw9D9cbdPD07((?qvsUB z7+Rv9UvZBXyHzN7c8tIfvHXkHLAz>X)2~XN4FXfwHb^@Y_%m7Kwhf{+i8Um?&4Nv} zXrK%s2^_JsEZQTP8NiNbUr1_TlRk;UvUeZkOnv*CdO6)~z`VaN>U+HxAxo_WJLu5I zU0?186`k??Q0Ki@NfbKw3h?RvWDIhA2Tj^;!k_j6nJ|SJqa3>~>XF$$OAi&Bu!UZ~ z=i3H#AJwKiA4HF{1$4|D(XXHZzybsWviM$1*U91pUxx9 zF>QIO7W|{HzzD&pMIWN`!E)nsIJ;fgk(E7Ac2P|Bc$o0Lr3pfD!MFh!j;UJOLy z_5rp@uRq>dZ}xs0{-iaEMRPYn6v#QW#N4OZqb3aCsRjxi8LtaHb6k&SM21tA<|wuTS67E}hozkBG&|q4B&`F3 zeDu3_wCR+{MNjXt&x`VGtzn(IE@%^_Iq^q8%Vw29V7!Y_)O!v+9^jRF{SWwt9Syjc zPy4It&=P+V!s&49P_b-Yz$w_1@O@W&xS(|Q?$)IXt+QJMHJ6y*P0VyxCy*{5OlNyC z)YOxJ@?erw(UB-+H6|D7vh_;`I9ks9=FT)2@D@|7bZ`R;Y+$XTwz(`4xe<1g!aZey z8Z1IRdH5!<2a)o5g2u~B|9|Zc~;q*7Q>L_?-O5gDYyPR=-MUU)|hm?>?T08Uk!qR|Jj_G#&PsV?YXaSRKkUeQZSG z$!36Qw%Tg`8C_Ym+Wwp3xIma={I(fGDT=85r+b!g!_kN4YRvpeO3!Ve93>6fA|1C{ znVeVme^MDO0$&j8(`u|$6^tFsbHH)fYP)iN@%)viTxvN$YKAs^fy-)lP@9<%oOJen`!$Iu_1KMm958pNqmVE(mUDI zr?{R7^vTQqwQD?gUxNK*N#xN2%q!>>frBVHIH0Zs`~)jMRC3tc35dTY6Wm_t1j=z> zQ9Inip!QDUm;b=Iuenf-2%S|45=fE|oQy$8hf!0Odwh}i&E;DP<>e3UxiHq14qK_< zt$BPm|8+G~__`wCkP6=L_3MNBb3Yy+n6>JeLUjsZ4Gl``3G4Me(9SU+A^+2J|Fate z9l`_%_yN75UM;^V5hUl5DiOwKq>8bA_1xQngVFlGOO|^5Hspx|D80}yzo8X~G6-R; z8^Fry>#0Q$1;f!6HR|0wCqPaEMA&n~uYc~1f&Y1fpD?Tf9e)mQQ6N#{4+NzDx!>R; zC`CX}$kZb_T5SGCm#e)3(4@)$v%-dUTa-aBOC~rpnt-8=A`bcYO8tA!zafG2*PV); zE{$|+YLF%6A8)%?&PA|ja$(xE5}iVTlpo|(luBX`A(+sw z_rEA*L<~%#6kZlz^Q|2&f{Zg8nhVabRmXeLa9fhfj%vUb0h#dulo{{m>#PX=`##PO z`oNrttPAKmWPyXBdn(4taFPREfj^PCbxTo7S<2TGHvexV>v!IK4L+twxm}|6k9TaX zK*S8-N7XZ-z&F+BOTopxb}`V(;)YSOI_JB6Kw}KJES{xCvYzqiDX^ z2DD5~cfb92HlX4MACY?Yn8+M*^I-8;{(tY1H-h{h#Xd;Ly|Pic!kT?NDZ2QsFQ_Ab}l6a?1l zPi~m5-0?~6=2}-NgT#Kuy)xVVOR5H3z8~$cx`S96;h6$?ZT@qU{<-;p?fx_O-stAs z1>Ew=j1%i&@~e3Oe&q;!_;l0u8YnA}ieeE-*`SJd3fd=;?$ix~Z6)f44fE1yB?Yiz zdJ?Ia;*PUFHoz&nwZYzw&a%+zPQE~}RVGOoXU@j>iJsGY-5_%pFjsL}IxDuP`=m%_ zLf0I?k@)%PC6dRK7YNA+KH$B5qx?ElNv>Y>Iz#5&u1{lrw<45WWU{i1gyrRV$lrQi zP`G%3keoo|*7dI!i<1tw3$n+vm)2qvG2q|k(VGU>8?bUh*^Rej2v^5uVpQ_l1{?LP#|u@MKp;gRHlk}X4{#54o&&kTW^(E7N4%G!$U zt`pcTwZ*2&nqp7+gc^9GOy0eBA`&_-bCHcjzKqR}`Be>>lTuQ1Axyglevh#(jTsOD|>;^V0 zSS$t?bmRHx5{%!B9qCd%ZR57ADeeFQ-B7-Ug%A0$5h9U1;G1f80g%i;=oSQh@DvP{vI#8y~Xejkx_ZFSg*>&pL{&GPT~)@$C@9aG5Y9 z6H;J<05NytX1vda>*{*zO`_{QYb7Y1{74$rABTOcEl=@9@j~ZhSNVon!9BxGe!dkw zLKvI=G>RVn3{_LM%CY9xU2pz>U$sa$q&2Sqh!cJHj_rg>UjgzkXLw}Hgm)JHIIkd=- zWY41CwPYU#SE;t3LC4%eoi^I={)(*1EFXjq#Zy^0H%Q04Kx4v>#+~dd;Eg4wk;ixG z-AT?5CcNuG75I*`USa|N_d7nfTYbl2IshwEb&A%uQ!g|Z+#68JC;a%Ps+46lY(P--WMd|*|l6SUCg6Auds8Dw4Bq6=_7(PISXdt z!76bzDwyJ?gvcWU4ud$lM^rYa+4+%YtQC_sDE9gK-tKZ$r`@CLf}%vvX+|ohg+yz4 zOLF3CGi}&Wx>l7Vb$b@;x_grkdjZ_jVyhI?%9 zC0bB@8VqmD4jcmqXl*F8`^t4FG~k7b?@ze9_y00cLJw)Z1cD}oYkhYGoPL2%-^H^~ zekatCS=G%X+T|0_(~85`F((=P+%D!cJkXYly)ogkvEC|I(4{k}(PYE_D-W(hO-Ow_ z+1``;?D^3ziKID&*o-a*Ovwj>wT91`yg|G%rX~9&WlI-jUu*=^7Npxxv}0;U-fj#& zyRdNCS<<@OORiGYuA_CJbTEIuvR#hDd1J5z6K>o)9n$%euPGkscG`#u*VJ*!bFhsA zt#SFCZJN*bp>Kh=-SIeDYg(g;|3Gi)2_CBYwLQi*+MM~f4KSV_<&wWOYDc9P?UEwA zY=<^q7@CeMIOw}xcVtpLzv5sgRU6ALKzrT9Ubrzjb(?^g>3;N$&)y!ouhw?Qo6A+T z5=S{#^NJFW=@r>etNH4w1xGrNx@|Yd(^GL@J$Yiu-nH4>QByb9%=E<_>B_M;(@`ew z?R6a?W8&8x*cUuSL2nw2n@7`)X#~!b4kVb27`CJvjS|L+>0N<7x(7d^@9kD@i3+I& z2+bCGA5W`a1M4z-7QCflpk~~eEaLS*>bYCFO(YK084_Dz={mdK`V6s9x1!NBFp-i3 zS4k+8leNlQC3*xSka#@s`a*72pIzGnqA{#|D8oB73noXxRcfqmXH=*@)gy20wonviQQ_g~#E^BQ zebEJ*a%&1qxDl=~f+VTiaqHCgou;J)o0GidNNodm*DQ=A~8Y zh%HLH)8hhxSw`6WOQI|R5W@IG&MsW0)*E+PVZnya{t{v`$~?KbF9nDra9z>*1e?;^ zJ^JK&eS+RtIrPBF;vV?c^Xr6d8K5VhPYqyY2OUgB6+`b%|HKP5t+P|Flw`+? zHoNm+(R9gGk+9CeXLl>>@K~AhbJr;!?9#hl{`&jJD|YTa-YZp)}l!2E|@Cl zdI-YkuamS8!AjV0s-gxF>kS&-0ahUSQ-TffONRDm$lli_Kr(v`HpKNk< zAMVs*3pLvX3p(7IU4E@{l-*g-mG*291`ln~SS-PpQ9|$>(7iDGnW!z#bIjv;oGeEc zDX(-V2zI3(7{a4vza*_(wj%q4+x&C06F8@tu-5DO$FzC7#W`;G86K7zI}xtvmE6QofCd6m?vy z!H&;2QCa>{w|*R3JSmD;W6|%NC^!ibsQA&R3`43Qfw=W7S<6);`p7{jRp*6q=?!$hM8ew4;6}+~a972~uU49`rYyRG*E9kCSg}-jlhzr#7{-YZi~Oz<&@}H^oVrj` zvC+#oA8+aY2|hAc7e4VRuN*i4#DnF(iM$kfkjsOD)H0(aE;K#s0Y*zfzT zZG)u){hVjC@UGff;`q0_!%ENMmpCR$DaG0aaScDT&+>3i7;dpLA6E5P_D&U!-Q{Z! zqWve0um_)!FrsZx>(Q4PcFf4h*yg>g3rs-1k6;d7w*J|PPfF2~on;+V$S%*NvyE(km_;Fa9h5m(&8 znnfMA@Ca7*q!pj0t>-HKi?+or0?Eg{h!dW`{K{n}lz)atFrACz;nMV+53tq{MEM&C zJdR(Hz#fIj7#?goOup|)7Re`d+O z*#@iJ_Wjc7=w63TaL2WzqFXuUD~!l|m(gZ(8f@9@UaT|+3oVcP$#$=>>nguuNnB0h zIQ%Qu65?8PfR!=9ml;`3aNDY5xEbu$RPy1t&$;cZq>9rs&Anx}uOpd`TN}?$ss_VY z#@*~I646i1CS0*4u|Y8Yf9swW{Y)h7Go#tdpIm^(GD};}5~j+&Pgt7FVC^EAvSj8( z43N)W*66LL=%gVtM&&2M#gvk>AD!;=+6O0H7o4>0cBf@}lM>Md^kJeqIl3Pukwxc~ zjy0_Q)c#TZ!O`XDBqU$uZo>udh0NMz4V8tZ1sxj}{a~TPPbf(!>emHoResJeo9zyG zf%8F;7FAi)8U27$ax5MzX)|LJ_A*&B41@marSqR0ujH%j(Na>Jxf~t`h!?5+IYfgh z`+up}Za_i&amUJSmCDZU?JW+&zCvRKeuAeM9wNI@69$z3TKF@q_52qI8nIX&L1Gp9KIYWyMjr}HIIJ1vvjcW=NZF7^fxK)o`|4^$2)HXsM|?z&=qTSehsTE$&Kf5 zqL~_+R^GQ-mbx~kF<7xM)jHqbU26@u%1m-HZJ3xb<22gF*ohc|#t%At^xe30G_A{~ z2+Ch!@-NbzA@O>`c8+dcEu6f}bdU*^a~BG-e%C_Z0%}$k4m{4Flr<6Hi>bkyenem1 zsLRAHG-k$B`6j_H9pfYrleJaxLKcOwC+fVlC;6rvz8b3|9VLM{<0T?g&O5iCuSd8Z z(%sz{VwxDB-0BD&7nuH>WRoy{bQdLg{nMN%Wrc?>`$oRP%<5YC)&AAItg!^oj}@yP zTvg&x5lTZnP+6^+>vH=^C)cwsjm4p9AjDwuj9O&sysuk7QNM`>(H0Mu>vnQj5yckH zASc_>RsphwcJ3_Bo;}^LbL{1XEMZD{TFaWfDD`TGLd}4cVK+;}$7WZIMnancjguZC z{;kbf9ziE(bfRgopA&U|*QJm+uF-l!2IG-Z*`QQ(UfF1ad5Y;kR$&Re(ep*5h&qo9 zRdX>7Yu(|#x1w3(QU;pVnnnL`m%p#151}0GW!@Bcac=gQ-5N*oqIZMIA;d{HE@Y`m zbDDzjU~b-e$K4 z6(0^k-FnOA^NK4=OD66Gn|4)`6jgRY91Lu7o0$b)j*MqEIZXr#W;Ix?+-*k0LyQ5S z6|Sobk3fl~WZDj{G12uljF`}qjAYr$hL4I~F?sPUUkJ@wrj4pw-0v7q9OKA&b3~w7 zp(<@aXiQtZ0uTd2YF=tYX)MlZE^58c`G)6vB4wWtG2Y{+4kN<|q7_q3o<}`rQIpo4goc_*;HQzK{$r=!09r3*Fa1!&GFzd7%D=iGd>Gn^IH|yZ?4Q z)D-6pZ!k_q$F^T)pe9e`d|_1o#UgK-5fIF1 z&(Q{=_o+Cu!76*pc=AVZR6eM<1KGI89n!QZ>c+H84n9+Ks?0`bOU*B3>W7Dbz2HU; zhGV_nm4fDl^{qp+776S++D5r~n^0p-zuyGW+a^NL7C z6r?Gr$R~K6%8p>-R=abwS}gZ!a?YPQ|5KhZ(d@;!mD$zeZh*ZAy|d|;;jU1%8dd#5 zOf#loed_lUIKg(7gx+|gdDqE;HDWX6^lb$h%9@gQE)rp^5^5H@ixY~$5^V81H{p5Kd0b2 zSX%avbo|SS?m6(r;}gI+feV_{xqrZuHtcqu_%X3rqnUg6+8q{-KkE*C$Jg7Br@0}p~icw<*G&9$Te`CcGK3anL zlQ)O#qM4=?C8Ga*pH-2Xkw$S1NUd z)aaMeaJLnsOm_$bSoWVLE$#Kh7-{Do19^r-JC}x2Q=z>iNzxMC?1NbCR5M0{_3u(4 zd)W3f?MSk_{YH-*LPllZQmUqp*l~eGuOH7tw!fTUug$!Zggnilv}<oY^_3!mBgI|iat+qnT= z)1hVE`Z&XB&tZ}ib@B1nkW~kd8ZpCAwto)F1Qn0bw+g#L|3p06Z&Eb695 zK1)TpC-nwg1>jp{D-!zP6LAS(#}HQ$7yX88-G3(1 zKmU$Zs&jKi2d7Tg)VotMoqrIISi^eMxOKMG(9DP8c$cyje#Gmc!97xBSNq= zR2allV^qERPLPUgTId|IM&mSO*8#E+qq?S_&d6yLJkNyR0I=r3Sb+^!a9$2uX=KWX zw@5S6_r||4cj(L2mS)*IJj)T>Sf(5% zXfdL5+58M|cKg^ouf+bHS7IYfw3ClRKuWv%icN2X%>T~kRec!!r@rqB1oqG5-^Fn<(cV4$u*bWDwtU>ZTIA9gZ8$3$-jVE%vCOD` zIf0?dbH=V4f_gbaEa1*MZSu$eP*cOkoM!;JSAlLl`b%&JX?Uw}OVYlVK< zwK7}oXA`cv$Ih>6k4+Ohi4-f!ne*LSs&_{pV8e}_R|~&X94_a|6zJ592?!tNoYa1U z6W|q*gsS2GfyPkkewp zPsG0ox>q5N*SmnyC?iRSb1==m|eF5vYow zW{?ZL3T=YK0w*`HfAU!!0OeH_ck2+V$~HN_Vn!T;aBydV+1&~@2c(?p7?Hb7Rd>gl`V||YN1S-1x6Z`ue~Ow zA?zqGNsL#dRU4@)Jf|qU=u2hs9Hrx&7yHucfyjM4a@h%v!`fbKaIO2Uz(qXal;IBn zELzO#U+72|sPW}rrk{Y1?(vV*g+R?F(EU0Z zgVip>2JfqeKreBf7B7nzXciWSPXlY=9*ozkH{fScKDPn(gBNX9;J#k{6)nd~o^&sdY^_#RP3y1_IYTZVR(wPhVmh)8dV5 z0x)lXQSebdR$$PHhL#h^y`MFNijZ+c2S+#m%upD-P)@49$d#W zC~g3DDYW*+3U|!j%22E=92Hwot7U$tOIPnsO-awmiwY3FhoTxly|BF|xd}4fV1nD> zQlsp}vxz;Uc+kyNapL(*Z=H3$kP|wq)MU2U>Ez}HI=e*(n{}X!ZkU;9XDuY+5t!ot z&k1qI`xXps-_8f~tv?6W)pl4iRJpoRe<7Tq*nY*eBTCbJZk^Dh12=WF(TU!fVDD#; z<7gMum|kaD_aGpCzL032Gy;k(i~#HrmY=%HBjwDbc~le6XDx%Tc^EC#wPtR)Qx7~H z%^?bJ*_|DfTXT%pKKc(h-DEd|UF)SCt!gy`MiAWsouG?1E%ve-cQmqp7B*G=t`RDY z)PsdQB$~h8dKh-eXW6_ik>~J0U#990u@x3ixG{dOk9oc#6Umla_caNoTA%Wn;BqA= z8R;Px-nO>#9rvjNtzaxQa9eQ#XxtQI`gwp zQF71wMBca+3(7b0SA^61|2e5-ViTH&r}%herG;{GWm19=D67r9?)5kq-1++>o1NHv zatclgO8F6}S`Gd4;+u03kB`&tlELW!)~4(do(fPXITZ9y-6&Y<-&y%ckC$w7>2$`f zYBn5Whwdzyj&eJ$z-?tBG50)PZ7t<+=92^OBmvYb-lS^xum9l1cr6NGx9d5mWdH(O zN$VW3U)s(q}*aJLSK>f?7EUw!7eR9+o)aQJMk$TC^XDF0`l~y7|W;Bqodw zbwuAAzjNoS0+8^^SVb|sG4I$cD3)_Upv+e*Q@CT8A+x+BsBVY&a;54VPsU2hzG{88 z!c8G_bPMf<`>(!|>6SZ8HRq`EzX)^uI<-ET(lGhqXix;aQ(L zVqMC#@#vA9rUNajfIVWt68t0On9a6n1td|P6IpiKl>r-g*Q%p}bXS(M>Z)63x z0(M(AEXk%2{6gQ{5zpOSGJTI5ZaPY1_f?a?V>M%g+11iV%(d?R*=o=B>nc2tYN3_) zRkLX|`=jl@j1^oOcEy{zp9`Jt7Hf*KYE-(U$MOl;i3G6QMN}xM9Fy9){UlOZS+kF> zQXK;`U%h)Tj3Ez((2ZdGEg*2MdH1(Qft~!= z@fzoNHvOja&e`Zfiva-UZS%^zYu~}&1SKsGg$5ksT$~c$@pJrO=QJ7ky^3;rhF1M4 zDZ;E=qh?q)qhY1+`9pFJ1ozTI7OrDIG)mg?8rLvRbSQj}F_OKS^bKy;?h826^|6Y4 za&zE@WeB*4P|7PrkVq%I>^-^IqFYtQ*U5@A1<+y8Q<}YHs*TvU94!%@D*DI-FuB|1 zTXR6=@Ct;4=zij`Xn28orGf*s1jokOV?YI{JK0CDL^OS)x%ux<0*~k8>G5>}6y&%Uk(YjIe$YX;` zf*9b;TP043XreSKzvH1*0z%Blq76B+zsQ>V&jcMh2t{MR7<|!$p`b9XP#r5p44pGDfL&clCl3{jjJ%qwQWXUq zLchaP|HC9vH%hBcoSlgEIo%=#kWebXH{Q<_I%!=6%m)jW^&>hrcJ8r#Z^x5PEJmP3 zRmrH@VkjT2hpv%b{jfZgZGs;8{mi6nu_i?8TI5pAO|=-x0#+Ni6%oFDCIO8{^@;UGV~+1V!Mrbt-CKB zo4#;xL}n(nVGmY3*1xqb;Q*}QUT;^Ds24w~wuazsbNb=ZhTlJP;wrC+{dPZULRnEZ zeJ*v(Zh~XpM{V31-ty)gVGrb~!LkAB$GtVJiITwB(`{;Sf$yD}ZC&Fr9r|GcCPPz0 z9;*s#*dQ$`yigxIW5?8*lk+CqM`q`iH-_U-1bXuN>*Tg@O3)^xMvJ)D7XdxXFIr9+ zH7t*KadbUeub8N!1MzQ&B-;QZAUonz7||z4*TI5i6wH;4plJhG(A{cnb%jvVrJfYg zw%@Bf3kl)!)2zvpEr31>X4({FKX?ESBiuqtO}aaV?cq}GMY8V|t3_c9ajJN0z2bD7 z5ORloM6`j#=Owsw{pX5;&5tKCaS=Aob3Cf4$zerrJY6L-K*EIT7l}$b+{e}a{x$Ra zgn^vM@w~Ee(&?E_q95Le@crR|1uWD zrODO)E>%&r=sQ50Q$^}GbHUf*3qqimKV1Rh^T+*)X4x-s$~7-n?{p#Her?V_c3p(zA0G-V;WUmXc7_Y}!`H=Y8UH?Q0FA~-vIVfu9TV`I zf2L)6>bBL zFm?}N-anvP`y)?&phrmHeLS3RU`m5hi)*|MyRYZuXMD2M#7HK!caw!}=I6Z9v z20rFpf=6a$cPQMniq^!|ke)m!ZTm#cfU8ENEMSuPPF2Y?+zF0C2Z|y_v~mvtZ#u4B zmqEP}tJqtV_hy#=LgYNo?uAJ27XD=`P+Xqw4$j)oe>I@*Jk(L`q z)Uc=SP3*P&>gMr~*>R0s5y00nnMo{Sg+(w6p6*Hk^w8N5qQ&^D=*I3*)!GZSYKH_m zv$4Z$fP)k%Sz9Pi_mIU%7 z0611RN;hu0A9%3{_;(l&>+x057lS@PILdkZ^gc9j5q>Y95m+Rf>iMe3xc32=hVV)D zWynyWBwg|k)HvX^49ossAd>f#E!3q$v#~Bf910ka1EXC?H3Y z#l=469K^B%Yac}3T_FcWMG}UdEQN>Q)?+HC8K|Z7p|&;87{q)xlS(6`E7vx5_8DZk z;rxQA^D0Avo<%Lr5GbK&+RL@NozA%%E;S?LDU4Wtja&;i>fq?tachUZu-ZaI!i`@C zX>y$m%xYDf2Tg%Uv;P$Y=QKoEVpekKeCgP9q#FbT=@JZ5Lb|)98^j=n?igUC zhVB@evo_EBe&;*?&fh1PJ!{rp>%OnLi>G!D8@hTvdt9!K-lGDL7P{kcxU;pm%VVg= zwp;(W;(`@HamO0jt5#b5VzTS}@z(o39N%9*j22^EBp$`ctNOln!j~qbL{#Q|`Ef9b zP;y_4(fkS|iG)&nM_CBN0vXUjITRz5C)#D=P2l$yDuNDbD)7ImzMLdCPkOuAgwc1J zaa)2Qu!huV#hz82cu4Eed!kC_bb8x7(MR9&t6+(Bqvyjp=zeY!C1-xD*`N_x#P%3e zXdiOkPCw(-WA^aeQQrZ1lcljbe(C19y`20PD1p6s#lKxBDwpxME+yi$npe7!jx;x? zc~ghMvbSDEEVx|OyMLr#wASc+`W6o}iK?EyK8`y883KSH;#$+esE!?Lg3JJ9N3djP z`t)7y^l-ygNos;=vczCUfxf}4H)&-QVFw*UF>f?VihemgRsX(A$&4G5;mxny1u*2~ zZ}ZVTXt`!2+iUskd`OtWZS>~`bbOuKYx&K9kO3bfuXBD>Vx~|PFBNjjR@UudT|T!y zIB48~DkcUi_UEpm24J752;^Ic-Gs2hCc%yaStfj`fMw z`ZbS_{5XILfU>X1Ak~xWxLk!+l5SINrWX~9v}gBmn-_FK*J>DP3GQ5@^@cIV$l)c( zFi#sVUfTS=&IaEpzC?rLO(dcJ0*H?Ifi$3?Ztg3yO-?##m7>_W^WKu!IyuZyTOCUziMw^>XmqB&wBE+JDCRmI*)B*l%ho1HG1dZmrC426+?Awd@=1fN zY43;k>BsH;uezaQ&f{KwVE4c>?U=&53+IXZodr|;~fFNx=-7e5_P^RtM4r)20aguwHoi>AWQj= z)nJ~VPS+jw7X8NcrIZFf=i>DW!vU@ILB4I$q-oFfO)1BMDKS3>3+BJ#kpqu0OfW15m3}JiFTT59B#= z%zb4lf~9FmN&Z@h_1C;U(bc&a{cjRt#NV%!2KPib%mA+VPpam6YU*Dn6^uej47DlQ zd!YP6mgp4*2{yS33s4GVDhgklOY)|#(@4@+m7pH5UAlue=@P9~F^{4L?dY8LgmIUc z-KO?CpevhdJZbZ=V-;#V=;d-c|4iQv$&n61PHJDWm!B#onJ7G~Z?Nst=4h{+GPkeTF0Sd>rU;7PF;4w~ zv1ZG<4DlM~u^JpNc_p&62Krj5Ollqs$|+T=`iYCAT;J)QmU+Uw;JoicAvOyQ&#=9%{Yv z@j@9(t5v`Djx1=-Ht@)WFPcHA(zMK(0sA607Qw#iwXy>cyE5dUhl-iX4}%QDNc5X8 zb6Y6T(QbH(cQV{?>Uj8RygKY}mUNVjkkX8CHp&uk3SYy%EjJy2f~@#&b+$24qYu?@ zHLW+tps^!jO2qyXF1-6$Cm0E~Zuh@n_e8T$S7(g46E>CCE^c*`!CF&y5;+0VrP>NP zTbWVrif<-rc4%3aXlovfkB2gEenZ7vmajT+fw7FzbKz)rwzIx7QsJ+Z?Y|Wwf>P6PT`ln9-IHOzxa1h zqK__V%L8F@T$z_5C#gmfk-jZd8+`~x%>-^mM*VgaYHZtSVPW3=366y)oKS>!g}i}a zzD?MUO&H^!6)v)tRMErgI)fhlDwK}oyeaX+#A>9(_GxdNQGkZu>8ly*>vsfD#n~=B z{cAlaw$Q#=zXB=4yzTA;B4+xb5?hxZA0=`bqWkxHWTTkG(`XT?7yG#%(`1>HkxFKf z*sS>s{QzVP`6%d%J^M5GC|=?%{3FA^jG|laREHhL&@~*nLJ0l#e_+!H+-LNb4lE1f@PP3v-0}aGwiRU_7W`~o-b6wqDXDhKT zsLIB@3;NBO1s+#T;G3U*z(nLj&vZR|VRz!&E31XOI?jK`D~F?o7jHKyHlhACtqH&c ziy)W_OG$wE%}#(Mg0TMRYnKg&R1N$O4)JrrbYvM{X6_R(H8_P3zzUrxjp+7Q;kY`cs%Gkb<>=t z+8Xc0aAvGdurjIg_M4^}UwyG!u{UAc-gnX*8EMr%gU%C(i$5r}cy|)n8F(Fxl#nFK zGRU=(;Ib2&sEo=f6|kg@Df>#9E=9j1j&aL4-RBk&8*lMVDg5~daPo0H{daSI1|PnX zU}*9dN4f)=%Z3k9A$&w>4tuYNFxEyg^*{EO=9lkHl{r}55fuM5AsYF`I6DuIuwp|MX#ET-ma|M%dNWf_4?Mu-PviDhCb3c68yPH1^%n>61mC z-QRxKa$$>d1(h4PaR(9e>;&+4%A^F+#Ig|4Gwr>`B5QF@l4%*q)5>I6V*nvmh_lnM zyEo&d;f9R;M5I;h=KG>mdCtk$y!Jk7p1yhK0wyg1e4;Ha$4)KW!H0chY-IeKbvM5) zuAE`gXa`iIQmSlCXt;vleLoA0_I`xa@-^KOGpQLgS%4|~yFHH6tlB*T^rl%y#KUj# z*1QGwrfkm|8YYS@uemCEtETjt2*|kz)3-L`atabEckdO*Pxp~?7~KN%=gH*DFJY(__Df zxUXF-D}!~I6p z5p-|Ll}SpX%8V8i80!w!^fE*EY&g)N{8Ml)^jxqpR-R$qfd=LpsBZoR`V)12-}V%U zqE}#o!}D#BE9YNRIWUZQ3$d1oqE!p!WshkZ*-e!g`bG@ic4Uek%_wMFjoXADpJ6uc*9tnAXgbE)i+6O!b@L_)Q>nV{$I>;(W1kw%^q`=1sR3dbw=g1N?Mo5uMWnGxg ztd%wd?XBdN0GLE3<6rpJfu=d=fIvACtpQv5%(nBhyNml?X&b&OVQyviMgpI8f}@!B z2C}V+uNqnDQfx7)kyC6?(^~~l7PbdMZ|n7xj-4Or(l1f)d}gC??fV_o<3rttLiHRT zSjGQFRsm!`%6y?W@D-C&F6}6ABbun+6jWAXu<}j=={WToHZxTKKk5s4z>6@u`=ZB3 zUHM}Bli2teFy9aL0AZR5kRv>!sFXZZ&LNzTjE@!aOIJuXTKxGd7?Byc(!4z%{Z8$* z=>m9~m?26j)Y*mVpy}VS)P`4WEFYXwtYaumfB}zN{qB=|t(jm~6{Y<8F|%f?u3yJp zvP0%TF9LpY;nlHH`v58Vgg@L3UJ!CIcIHt8=|478&IXH(&mQIegcy~$T;q5FhD5;r z_XzXbh5@?fZ2NbQg3!jo=p1-2xgm=Pw?#iKKprr0EoY1D=+BSCezbPUO}_m^CAZX@ z-1C>rzy`Vy6U(w9^E&-A#YMZ4fwO=(&rUCu zEuSbnB61I=G;&thpi^lvs||CMp00my>tF#%XE9H%oKFQ5G@L``W@e&sHswwM?&yE# z!7W>t3f{k2&INWJv1Qp$Rg_Mad%k@|^*-8~t_EnV;_gsAivx<336_*k)b1T1YVVTE z2aE*WI0p{ms?-r+VoCsXHAzgm>h=EbI(C*@K+CzYZcP07+3`ZuRrAMfGtRNM<#UNl zazn>MsusTcfm8j}YcPP-z?nICygfbHDD|eF*{5(aYuq1daJA9H)YxxPuMh0)$0fUl ziHUt!k9b&S3zVE9b(h}8VZvNH=93qd_t3cgXCm(UKT5SL73&pC)iox4nh2Uw4I5gk zs|=KpcMfMqG^eS0w`ac~Kf5b->MI6(uILM+$yM_=zAMZx5VRsp*1^eoXkkMJ`>dk3 z=(yJrxTJ3g)!6a;72g2TUinLU+D*chFQG{uj3?w1P53Y<44N6hZ& zeV7eAtat{pIGlh^WAf(!SXG)m0Q`(cI@fOjt7cXB3aX_=5D3|Nfb3Q+XY8fRo3d5e z{}ye3(z>Kho0GG%*>{8CLS*;~tQ;I#jc!ZXg1d*}K{#}lKslTiK| zb%i^^G8$_v=23mZ?=Cw$dc0giovqvUmq&&|Jna0Bj|$aq(jAqMyEl2@`0DOMd@44q zL)1L^{dd9n5bi1McR-4JRRNG+>U!FB>y8<=*Q0@5v7xFq{ChDMdrOL#*9L;?q;uI# zk$HCxtK7$Tka=Cj5zfYN|89=&XMagJ*T(=dW#Z^%n;ZJBF77Nw{4FAro+ zKR_6bCcOYK+~otes%bI(i!$qMv{>X9C(yPC$QrD4%- z>}Cu68l2<%d}Ffu2Y&f0pk%VC{tL*kZVVeoB|@2Sp(8`rPuvzto)p^8-hZ7g#iSdCE1sUC6U*9HH2MzC*9=N# z8QzfHIkBJ#uw8bIHeYmh)FUJYIiJ}Lc(4eWE-&r+@0{#%()QHJ?~JKki$i_B6R>(` z8?$Luo8$v|=b1EpJ*ltf5M&ADDx`=O7T_oWB0~ zXY2U&NED#Lm#lGs! zIunB00JV&ao}h*ko-ggZ@m>C6uFsq5b$2b*tRkDn$1Xs7N#7-w9J5{sxp+Afm|4G~ zHB|bqb$T^zQgH7+v`66eG3ElT1+4t0-$T=Q2HJG-(C@U%8HNG=tnjHPs)gUJv!o^N zH)+KAKWSEMik8LA^Mvjk8JZ1bd@!Udsw97%FPF0ZK%pIiF3O;Os}DDUktUT6dt-zT(R$F71=t4ni?gisb?20s0>nS@%j4X}BS7g*5;J zL2#)#F74O)7N7qtEMNe#C#5(v$)t7ZhZahP;45#6iN;&Jpy=cbmMAr~W;wsO!;R1h zH*@0x^L8Uh|8H-s=$M|VaoO#eR=gfY?HXpauzS>@EWF))MZF2(KBechx)NZ! ziVARYWm-XnB!nyLTtk#VGD1l3sl)_Wd?3;+6A$qh_hzg_WYxL+S{&dZ`-Y}g*?x(Y zY&kSVAS1E$s!u)QC6Y-ozc$ijZai@5BitN69T6BH2}+8*)0>D)O$?BoDcxTkF1Aid z*biA5uBvEgk8^WE@$egn4WG@IGp`qmk@fLPmGak2xh3j!*)jz#naizst%_LREdNNn zPL7$vL9%`E(e<|t45xvqJrQG2bDhTs!4~ST>fBx-_6^v#a%VKlQUBECI1l%Bfk)5c zn1~5A<*C2hyHQD<-VHz%IEAx_HI2UN60LB;Ug5odzUJ-J8G+X=1&BFtC&{i=f%tYS zP~Y$k6{s`2DkdFjwm$rGMMHK^Y}HrQK6}49&h=6J>D1mf_m!qj=A{-7nf@E$`^qG0 z)beyF_X9`Qj|^#<_daCbzC-0^^7ncGm!I=c(p?hblX;rUW$&-M_up+CLwt|FqXpb> z{Y^`=ZL9{1zP6Dbh$Q)RRSHDVaS= zAGXwF7X`0@mzhs@FlF)wt0gh7TPbn9-S=1HBX%-bm~dNXPPVCr0RH|$Y_EnqM_lzs zv2RN50fY@sUxSf6rX+s0{^R$QB8(PujFsWCFOOki0peTTjt^XI+P-0PLgrhxE=^C%)WTCp;-K$t`|WsZfxr9lynbb$I_&j? zG|;P9E#VAWLyGiGbycKIMUPyrLUK0!mlA(^i~7(PAkHJ+ihLE?B}$A{4xJV6=I5MnkJ*|9%GBaZz#%bI{ISyF z+nWoy#ZI>RjyQn&P*E@<+X#v*>?nNhq&pjw@|c{x z8<0Y+hQY>bUxdoBZy68h3mp|NMa@BsM8A)?H~eY5J)odH62k%Ayh<~~fBh>m-E`e3 zw2w^G(*9c_q)!J_mSwD>=<%QFL{KN%@ zgvh0Tz0!z+R}|93lLr1fL}1SYutJ1G@`B<|C>N;YWRj?a0G7j-welh zkh=W#AhRk16T=h4*Jr>q0&&wkdLn0>i9KqsM~lwe%;0qR@Z0w}eA5Uc9^bf?%`BW6 z%H>*c0{`4{-owj9TaYCC4D0*$9=IrZC8OUhg52l9KB3AXg{wkjCJ3=&slcEH2bT_T zh!#UmbXn0y3^>sW8oi!Ejes}^}L(+_5vZcROiQ1 ztL14S>W@m~d_8@tlRsZ-G4v}}n5`ci)VEr%YS1lv&i+$pn#H(oHI?17;j-5U@Bq%6 zUs$P}#zuW)w(R4mig_YwoqB_IrLE4~rCa3pm=|-o-Z!W09AmNA&PU45mA+dm&SqEq z#Pyf~7>j(%<9N-#>FwT@c$j8$#o>1-g2iYMT*->+hVs@L<$0}>)>A< zHgFA9qr5#Rx>p7=KY10OZh|ot9PhVgL^ceS#v#$i#cnLPV4g!D(be5x86-sZJvXPh z@@FqHh@@Y-XxiW&wM;D7qzbg$T; zWu{24VRXi-`4KB*7V!${Wjq6EvoAJ)j&G9dJ(EGMPVrcwX5TtcVwn{dSC@wu-mq^u zufA<(nVLxnyud?ywTC%&8=nh=1y|=4TX@1n4$4`R$zHUd0*J9(OH2HPCGx#*Q!@9l zokuX3H5JXmL2Oc)#gsc*7|UtgULO<6^oS!XB;DD{$to-jWHEh>V&Zs$_))hlS#wXK zHO6HFYP6=yl!x12Ovqil+YU6~KGRoS8D-UA>(w0GdU0A*7aRR$0WZSw%s55jV^l~#JtUT>Hn)@mf<@4d;u|%qZ zytp_ShyupXz+faC_1uptMpX7iD-kZgKdlrY%gQ7#Uwb&B5f`Z{{wozjKRu>Om}wwZ zQ?7jUZnBq1@qUGcvXS_6)2Zl#%zM_#ejK<;{%K){I>?n_#%k*R9SXkgI4z2neS?DCeV^I${q{oDMxjYuAru0;siyo0)nK2AE1Paq6fAzI-+5Hd`voQ zCb^t55+xu#z+heXg%i5_9fr7OC=8j9(oSVFSzS?PjT;LG1$olE-oNP zXuTfj0`J4SLuRhuVkT#U~D0 zXkx@ca886!`7Cyg^WFQBCGJ2<)(s*ir?q)-gA|?}8V>!qsNPIjuQJ=Y2oqo^-UdS22>TniNK&BSl^Q8Fk)auMMmbKP zFCtoY{=UM|!`PlU&fg&v{Jp*5(TG-{gScbY5NkP-QPx23!E(BQ4^g`4zgqayA_FcH zreqX^lO9`5K<>Qw51jo+0S&mmuLCKG5v0lPcgnBAT$IE=rxAh-6AS869lN@}lmO@7 zOtC0!-bsO6pI5aJQD38#hSx>asr1MZ>e8FH0^MkC*SyCF<(S3ZCtd}oO5JP_1!!~J zK7u`)bpvkt=}M6dkW3n3F|TSp$GZ{_Y4@&zgJ?Bvn~%|GMW2(X+ey&zv^ir@I99~3 zDDT;$>v?dFKDnzH7i-|tezw2IzL`rR+nNT#j!ZT`{1Y77%0PBQ8ZR`f?wt37VlZ$7 z4MHkwzzQA*syM zTQ|Y;rq_TU5=xj(pN!HK+(pbNj%0}?-Gs;!Z~X0VUv@^e$T zhH@48tw)RAVX{g!dWw5okfU74u|m`8LY*NSotmHt2-QX>+Sc!E^`Q?B$57tfVrSbG z$3P9p$0nAz)qFVsiX-j4<5-H=5$m4%15{mcb}7(f<$#op9H0xx0amq5{WPn0`b9=S zR1@Kwrt}C~s)G+a=K=+`R3QD>2_H^iM6e?x$Da>)$D2L;C?0O}UY-hbb9-_KwIjv- z=xak1O)FYHk99~I-%2S3`wBV;aFx!19~Fj>-Me#}QlNhdSZdJV%Hi|Sc)8e#c&Y)MczfO8AhMF1Zu(MwEIA|-e3?Yt7$Ej|R-SI3hWOQGG4 z$35UrnU^%5SPA*`firu{TU!t)=gs>tx}H0K&7kxsdsK7I3w(sYqlVl66?o99L-N0M zeB@#h3}z`M{aiwgmE>tf7v0h!6iftTdgU+>?Jrqx@fF(9Q7+T(H6ek09JH^`P?zVa zf|qA|wrLh1^@4MxK;0>vDOK!jmLLbrk2|y>=!hv-$#{NUT8C8#h>ZDyO5_Ph@AXH}p7JOx8}}Uhefx8}B_|fs>*Qtl#c! zr&|vxjwklPv=c6nH~xg8KmrS59LR&6INKjmbW!$* zlIPY77K8i^Mr`z;sQRlJZqBVV&o94CY_EIq`^vt@S6k#0bpH>5sK1b8kG4~@|Pq`?Wzm>ciqVyj;OqlD2`IXEn5$LePo+%oC-FV{gl z`U+>~DxBP+sN;h@KLLBS=YKXqg@57)eu}5I(@oQshu`awgvrtH2J`)Zl9;Aw@=V&# zy4?Hjdo{8{4NN>Ed(rX>cwwHhqS`6|x}U22VmVoqZ~KYYYVgmLrRZDNF3pQ znu)QY=+J;J>ylI)4Tsm~bHLMjmr`;RN_OcWJ4?>HA05ez13d1--t)I>u(L{wOc!T> zJt0XSmiA}_fT$2f%jZi}dyy&yk}Zf?cDci%SF#N60+ApD*~IE*mpiVK6&6i_+K+)5 z4{YHizz@51Cz^Lx^gZA<{mO|?_Mqae)VjCnJdeE{aKg5oZP=Hc7N}HNmCS}vn9BhU z8JB^|^v=pEx8B2e1+-GL?v5;@pb0)#&@Qtvg98L!|HN8q;5BYOJ_i{Cb|HP^r zw`lS#Om02YjL9ydB%@9*XVI?gn27`t03Gk4?*%+El3M&@`qtebAWuj|$#;2FCv;`F zb6Oeq#;u3HWmGlT1j;h~97_tScTf>C#_F>Kw4OLYyWAXEMJa5boPk2hFLe+nNXBVO zt#>nBnIIuHr;ous%p#RwZM-#UX6shsgjyuPSo!-z&ru)UqAEj;6_@S8#8&=v*d`5} zs_DEe)!3Lc0|XcH)c<^ENlxXZdbqwwj{*CGxY6d0-R!PzpO+$rq>X~?vtE`u|0^;w z=@dncXx`ximK`TAH{HwLE*jOj~VBKhvCbEos>>z z@6cH@^vF|{0nw1!V^VJW-l+`ZEZg;(OK-HIc~Z(KAAlUwe_gpTkQcdB*7iCx<(9>{)i}TT`gbcH1-DaJRK2bj&qu=BO?~!`-*)BBeH8aNc$C=3Y%5`}ix?rB~g?llbG=;eDi%AE_ z&z|F(y9CmCr1}NgH`#`*_R}ryhyooA5dtd+x3xg6lBNdb>iVUfQHSt8lko9q4WqmR z0}We(~uTC9NdXG2GGg6zl zFuX%~(#Ecek8tbH+E2G+PkFo$l^%W8KqxatQ~5R;H;uIv9>&A?@fv4c@3PFpnmmSM`lq_X4u~%=099n zIjkPr*=Yuza4bkzAXclBW!w=lCy{P9t=7m-O&rN*8$vET9;azAEikxYFL$uIi6`zo zVU}pS{-;4`vo`Y>lzBbD?9Cm28emG-RZ~*ewabW3Ohw{L|NTz&IPWa}i;brYaINsO zwEoWx_Wbrf`zgmu@1<=1$&xd}nHcWhJ5mkTFCod@IFd-O5%p{r)kOAJSIXml+)r^S zGC+y4#T)wUCtSEV7p6s5v%@{}LbQo-HP3!ZA9<}DW(!u>5Hh8u^+F1sVmdM#%p5bD z?08Of?%2)c4-DK4HYdYmUjfrZ928z7Gh!)plKf%^l^7NnR;aJjHs*A zn;YiNzciosIf4{V+N_^`27~gg*J6l*=kPR$mzO;b4ZsQSuL^&|koOqQlQ*2WRiR#} zD~pwU@ePaB^d{qtNS5F2WYNm__ODu5r)iXYpKl4;bf{lC7r$}Q96r5g`uMn&(pItM zQzS!LmZ3tNKD~g#u74NTuGfilBMr|#|7Az#$uNHroUJ2 z7ZAlQ_76#N4l@>r)6VUddQl9S<)Gksw4-}T+Qt?6GgwUNWf>yF6C@bBY-pUz&8D{p95$#-nKowdw& ziO`2|nMS|AM?xHlLFs_E4yGa@nqqU80YdKDt#)R~lW?|9=9S{lWg9Y-|ptzdP zxMzbUcJ0-fVb@)~XW8#5!uCRS z+q2`(S#D0oZK2#~49d7GPq_?WXn8QD55H7I=*PqCs-~UuOp?ppu@U}*qDSB$lx=+| z7A(EE>)GT^4qRVlORI<9bwUJ>4AI(5Fy+>V(J1IX!jE7luI*nqSz+;}rHy4o&eO!| zefz=im?@2g(Ddke3|??Xr1i5f?O_ciMvk+#L>sL-6SF?|>8cQ|1{d2hU-b`ep8Ibw z%X7q@1EM(5Un=`bdV0XUs!+(IW65iEuWwrmBK`gzNfbu<3;5|h*Z?NzB1kNQ$Z1u$ zt;?KH?s?X?2MUkGro;`)YtQL$hx8Lfe^(i8ajybz&BeT*4G$Fh*J;iHbF0McpcsmHXCm-RcMj~z0vYfRvWeNGf!KXt_yyvn%J)#nZe^V5Fu{-Od-gBV zq9GqmAL-yLjr@Z62)9S}aX>MS4m|vkh^}-)MuA~q2okzc0>u9fWTqsyBClW{+z5|o z#(TX%Ds}w0?^SxpqDr2wuiwB{DC$b$bdOP>(xG$CXhIgQWFh(lSEO9| zz#+&5cW7&+l`}k!u~d#-+xxn2ny&!)&Vz?^F+j)D!AohYNxz%<-6ZLwddy`-x?1WZXF^Qmd9ZK(mWb-z088@}M{NWPvxw-b zYm`mr^M7|cvi?cNN+RxnkJIG=S&Y6m;)V=*vynElFh$c-G{u<1FD;>!uL(l=mC^E9 zZh^muX;ls*u%D9|tZzPlcoTu(wTN(^!OhaBfULQLnZ8mHyiYLUw_<{)JNjsk-FwDL zVgwOi1@keT)x!EpGYUKyWw)M0?y;(SU5GC<47=A}=bJq12UTGXBT}Z7=MuxWd)T1M z+}1a&-QwM3jaA23+fP@!ovp+m@aKi9cqJ6+0@gNz5^^FyZm?|T0%zTav9kDh2$0Wb zHk`Ly{(2z5!y{8RK03 zjs#3=?;J-Z)P$G^DW-DuRQXP+MRP8&c``l$OZj{VgH6_o1&eeu{A2nbS*TfCl-PJy zZf)Y{a9vr;QvdNt6PxAD*tDo-uh8QKB5=|Ln-T`D9fng`cOUK8;RZejXO`aHG({q* z2M=2|?}CLyOlT1_5cL{p#(&>RfKlMJAebvBx_=Q(wYWAe zs4wa|5t)s?_=c&kvN&-gEE+tFUQ~+#Ud~PJzbmc}lpI*p^95lOp+Lnl7pF=Bi$Fk4 zGBkD}y2S480Vp!<#~1<~a(cil^1E*bpNu)2{ZkWAiYcm`uDn;mB(h99=0M8k9O|fK zDQHT__<`8FrL+nC05-@wH@VjX@ihMkVbM<*7D;Ola^}vV#|5XY^4ytNLLtK{Cy?ci zB=v8^WjT=__w58IcUpKS7nX)>>4;-J-Y_@pN3p*v7Qk&h8+?BaFDaF$KMQzpscTPy ztaS_Lc#0|lyzCL4aMjW82BN*}RP-mw`kpV&;B`^qYj2+_-Z~(X+TT<2<4Y^RA@2|L zY{e&&2NYLU>Ho$X$Y|y&l-WE$eI=obeKj=oOlWIegKyANsCg+h+?HN0o;M-0eF-o3 zmlJ%YY?d~nx{xJB?GT0HrHt{Si1ajCQBl^a%kzEUt1`bT2+JP8tXJqVe-|OT^T=7G zKu41Bg#;Nc8<6K?Q*_@X#r1ObY0$9UF#d%_8zT%M80W`tZ9xXYrxd_woVHSm-6x^z zaI6Qgh8ZNbwUs?h^HmnddfL-P^dfuh3>KY2Hp6aIYsghI=C_x;%_pn3O37}sxZ6*TO22Yd6i{eA9jY$WME<(aMP4tL-viK# z3N2ZxYL%8dto#)r!yPp3H5e^qH$1FR&NZ=E{nZCy-YyH@zDQ7s@Kg3HE6KFx`<6TYI^C^l`MV^dVknIag+DtcZd9J& zhsrj5i(@B-{9;U=_?w=DFLbl}&)^>z(-v9?M4l!c5Z|%DViGHP*)pGu79o>E?K5HW~mn)l4e5<_GBL9V4yD+m_!>$YSvf@oo-Sf+d#@Tci1niEDqelU!dG$9{)~SbwEvWV-F+G z)ACJU7K`rE>*$nr(j2!E7LAZN-7vLBd0iTN;4fsnlurO2caQ2DG34^CJ~IkJ9zKe) zN||P7TL~z(;Bvbz6w1N!N4domj|&U2@OT-B^i%lT*S{0M)9j!x$uJ;P`(ohUwle$#Bqi70Y&2sCpw?E;jZ zdx0pOqqwy=+%3h@>@l439uWdDV|JHx1!3QxX-*1!k4pqmJT^Z^{=s~ox4yX6`U0mT z$*RSEVe)lsxl#0hstR8i!~G073NhrXUQv(vyk+W|NPqOQ+j9CFamKG)ABc;j%_4+m z)5R4Hr)uZ09yHOLQXcyyspTkyw$Pj23giy>6-k8$qe}qXt6?Vz;OeauNp{qp-|KWDCKBzAzBbhhI~SlM5J1M9ECBr zI3aptaw{g#pCZ-Ms2i$TZQwIUl_jSKP9khf_qiOiIPIIsyQtxPdReqz(ddCaGFBPT zE(7W~e6J>bou;X|+Gnq)ZJnfx@5g?{cB5p5*Rb`pqwIQB6@h5+8ZWWMpvyTX3l__j zzwacr_u3Y81dN3M9#?u=@RwaePaWLhx+Es>s_~`|ozG!`(yd0OM3K@rVB(7pLEM(3 zFZ#a9$@*ypb#>LTa&5M6b(lEL0o6^oDK)5C zIy$CFQA#7Z$xQVQ#rELyI^5vz&71x6)BAX^pSivf7orZLELZ<)SU<@< z+lD(1*06}v3%6;)y`C+xiEIeHY=Z5M0Oe;&z!0vmLD+Z&(`>|o-hJOp* zFdFF@e!LnqBq;g*_yCVHuP^+UTIS2!!1emWJK+CHz1dW~d=b^MUu)B6KpETQZuLB2Lw?O}2mF zb)!n^r@@zbhj==iakC9%mu1*I^p_;@2Q$A7@cC272>ljy0 z0fOM2z+)WNG;NL1TW5uRWUPqj1kkVFKl@)2o1XWtBDcUCzE_ln_VLkGKL8=pRW;Xq z;#l>6e+m5ShfNKxl{K(`s_~t`jvYtsh1cK#g9UE>t7h#6Z(emxhcKWWA|9(LRzcT6 z+84JVxF(qEpv!DGdxraer6ASGOFWFhF7qNsJjeQ+wuQ$U@*dzp^7#&(bxpxJ8sb+W zLX?g>g7k?{<83vZ)@+py$^5MUvbtNRL&0{_|DSlLI@UOvvauZ#e5}_cI_+-)anNeG zuw{EcbZ*uki@y&6v@H0XUafN3>KQS_ki`Qu45)@{mRc6UB2#2E*X*C?YzZ#V9O%`* zw^$p6r3pZP0OzTEs_~}V5jh)ao;dU~wTA#|XJyza0<17Jx83@$e5!Cy6^xeuxtBc@ z=7q_}=>V@t&aE51(NJAPc;kYUqebbzCwTe3CPqfcvW?P}01h}MTd*t>uVlN6V)fsF z{WrbumAzH`BX5g;flsgFm*+#+;M=8#g)PbXWjo&oC16Um{>O|5{C@bsOn0DHgj2%H zTTdjN_Ab0n-($lR@<)%(rFfi>gBYN7(gX0vc)toM}hih1tftdHF62*VpEVu_Ao)&+U}uEn(@2A-DGO_vWrDBgth zRRCKs$a=HGXEPdzN$C#1K}*%@io3V~e7*TwM@JvYY!ikX){o;+Q&M(Nalp#0#oprEwgHZM@+Iq(+%7d^I zJ_C$aY-P@z>m_sF)62<@DlC{_*4u!MZb0i+`v_?IRP_!QwMxfI4%faj-UFtZL@?$0 z=QI&!=9g=C&;dpFOfd$^L|hz7lI&CbFj`Fa>~P$;=UqG1(rAR@sQuAsk$%O@EeheE zXYYc0znO0w~fl~9pzI|{Nwj)$uWC()wxL?uu6j{2CKZG(lKUCc-02qPczB_8Gp8~*I*0mdi#w&V_1iWou{9`2B%8{0lN!7 z41Ft&Mq;~p)&BJgZ3%ls@_to7hzdAH*S+25H9IUUji+h8oH@sVamk|C!DWdFZHB^w z_nQLGGTj2zus`l9!VpsPl=8wep5bkWPaeW(Tc&|z^6*rDh>noBfD+2p4I85AgA@@_ zLQ%XQp)?`ky>4zh);iMDN*-1)M-pkL**9FE*EL+E-Zh-3_4}L{CX=HQvo==Dy*66H zZQxZm$&%FalLee3rI(UDFb4IZH4_q8y=SHBG%Xn2!$ zXV(gf(jj{3B-N#;fh=$p1bqA#?g)zz>3z1wGE`>#swT*D-k^xfcFI8q8xFn+{Y?^^ znIE%4Lu(%{yg@Re3NEF5RGM!Dwp1P^bes_2nCj%UhW-R&WV=7!B%LAGI&(@QNbTl^SAzS|1*YVBWe?X47yppPCS8&bhsEx(iSXSr3@^T{_Z zG`YCIlrsj{|G&7}N2!dmd9P5dWrnh&fuECBa zz}*mG-Mr&*4hG&S2h~PGiE_dj5<}Y!fZ1x{Ktdb2f$Vjq=dmNV&xL1&`+-%3-So3W ziDJY*2+L zQD^3MO^2RBqu}i;@u2o@gS4l&j}$5RlWfwNprJs!A&A;a@X}P1+&Ah6#U9hI@!Z@* zzsuTW=~*l_xSqPc@jT{s_5zHyW~DKP%}Qegy6pOJU#c$SIOw|m`v%CIA?vLCtye3bhoRsO^c>-qk6P@gCLMoW z2cH*Gva6=|!RptwJb1z9w0qAbS8QM%R7^JuE*{6DC;ULbScfqvjn5HBle(qMqtm641ReL)0=Ym$AO|~)Rh?AQh5Y|F29`pCAB_- z5{vjo5ASfH$19|U!{>w&pnZ6<%FdwrnqGG#_68ddmak2k<8y7EKBe5K>k~u~;EShS zWU|R=aJ<$^F-bBf14oIHIFEFtKlpu$A|aJZ=aIz20~+v~mY?2)nZ%M5Hdl6mZsvDm z0W1P{*hfv=;_O_?M(K5oO@tnF`}rw5yrv`Ld28)(0M<@C?&QWQ^LS)sY_jwU0ZdC) zGu=Amae}5gD4WGc`dxpN3%vjK?Iav0uzcu$t|wf{Mbd?G>rKvpPt*2RYAD2dL*M41 zD1aQtK_r@igjWFCgNMsUaN|Ds9m`XiYj{fHcUG|*&YE{x0vL60U@pAW1CB4Uo}qMR;hP{SmLvgRJ}R+mo=Cq^ zD;$VQ(l0Ve52*g1_TDS1sWtu<4IrQ(paLr00#XH~N)2p9q)0E)QF@nNlTbti6p)Sx zL3;1K1#CzSy+i0d^qP?5uDJLAj{9)$%Y8a$jPng2WRNV@*XOUzsbu2O*0?+~x5NGC z`G2<+`3tcIJr{)(U1s1ZIq~^FcKurlF5)m`o40_HQSDXBPn*U>w^m*5M^9={!R<{Y zT6Rnj6HrP@LsiI8xIrB|3yc(KyIt9UGHTOfi%+_YmI8n$>p2?$hgg6Xn=i#7=-?NT zo7zpikJ&hl(xtVeAV(Z@r~1eg+yA?5xpX`O`Sl-3BBpMqQl}zP;$xLe3<9sQ0%WHI z^xTdp*2mmn1m)j%ZemDx$JOqm2SAl;Lu4$Ucu$`Mij{2o^9u53o;5CH|LDF4C4e>L zA7etxNOS>dq15Xvlp4UwL#(^Du0Pv29JZZqeB${%A?hC>KMR7wp^K)`KIxirETC>B zR571IN#T`|3_xQ|SlHv*-v>2S5qUQ2N{eMf1OV$O7VGJi|A1obf`w?P0CgniFFgu2 znl)0CXh7Y+>FTyZ?xhQ9UwI=a*ewfOIiSxM=M*_WS;gw&C)i#i)g=%>k7+Vn7@wUL z1t#ig)sbXgvvbOx*L;*;G{zfn3ACa0KDO=YX#R)~&x)i63-KLP@QGeP>Dqkv8-QH+ zBgJmKSxWk&*tJb{5Ri}m4X_HPsZlBBG$0l4;dIMw#phBIqHmT?AKao{69=|H(2Whw zq4{rgDQ2^D^=u$hF752!Vf$5-OJGQl<|Cy$hnGp@yN=Qe=VL?G6)ZlShl3ct+_~e4x{sW!i*?RG5MQHPjqQDa zg!kPpdWA2A)h!<#(9^E&GiE9hKB zAu|)Pp3KqaYiX`s|3`y3M=N=t!F)Kxy8ND2_&>R;O+~3Ek{F4Ai38#Kyy@$d z*RGKUT^&3vQU8J=xtA`Z7yANi8Ax_#oM%tO-V&^hdrlty4ANPxE;~iEl$)c!6HobA zH=<6ch7q~->N|P^5GWxXU*>?1%GCWTS^`jl18Dl{B1z_r0+kT;JUKtYwdOK+RhG8% zeti|YdX(8mSCADKIgH_|LuG)a@eQ3@fLIl8_cbZIrBj1lz#K?63?hy@pYB_+$bylt z>UKn4{xZk;oM7FC5Go1QqQQ}2969QvZC zP{|`#3Z#1-!tEnvuQ{mrmIPuO16*O!WrLQN8AuA8q}H-_AIwrPt!5oQ{vra{th4#O zN|?AXq98)1b|G07e?}<3ZlK45S?6+XCL*&OBp)+96|M3p(_v@oY8437_?(0Thr40= zx~y(zxa%pdv>8AhC%cv@>cag1igDg@Ex@ZENL&L|g_53OT*kuQI;cL%yHeIYe%jEs zb~**QRekuouI=>L1IIQ2AW0oyuLPEpy-*#1F_6QK*bX9^GyfoMjzY&U(80z8vre{t zR{Iyk;n%bd2_BK&g*j=&ZA98#@v$N$`o+Ah;?iY?gB+*me>mGJ8|W!%*(!%VCWl`b zEs&UsW#a~*E%m7d}!7rS<}W_(ask|AXW{7Qdw3G+ zgWm)T!!uYCR)+&<)WtDR%-mSZpZ)#!B-$WLjlJ9oHhobnL8x;x_o~w5zV1b;g?(`c z@Fs<~^&HPW?81AmOPix`dN?0#7+Gz37hZi67?GsBL*mcJb3Z~5@lv!>>{{-*<{kTy zOTJu1AD43Bxzny}GTS69GFd_qaTgu4q#(_W5vGy$7wM8-?xarzrH8%CMwE4wp2rMl z6>q##rvcp=x65P~z@+6NTrn&K9Zhz)PhCFK3a$7Do19_nJt!Ff+F%6f)^kqRsNpqC z&oE}=6VJJ;#Ngq;VMtAKmO-f*cjrzn)N%kma{B2eq<$UyU|olAPtE+sMgdX1;J+sS znkYu_U(1vSQX~K82cB!ht^Hr4x&)k~|Fs}eBDDFh{hzP=pAq|SDg6IhF#mxl`Dmi# zT`CyXW(bsF1L+5)hV?G#t>i(0T0km-PO<*O0AoO%2T)RDsRm5w1~_0ukU;bSfYrTL zd6~F2fqQ)fAZ4owb)+dG$sDkIJJ#go=mBUUrc%%_Hvs1G;tpl+7qOlhfb3#* z4G!}LK2J)Nxg|0i5E=Xc+eDLby_yt6^|CBlk$BL@u6X`hS&9Mgi~*?mYv4iGNu(^` zILSs*fq1MM#cvWXaQU0VjLAZ6XIaMlJ3iHg=jSn>whs_59a2%^lT(5$grA-4z>A`N z4#Uk6)jfHLGLbYwgVWN%{%_X?LL>Z^50J$@&3^)rVPXXiif}6Ru1V1X`=UqrZ@=1VjPUyqV>nm92sV_9OQjgmy zH202V3wEDEgq{zc7PM)QfEcrL87dYyPS(=HIDPe4zey9Vk7jG4KE?2;zoMbJ~P{ zWVIQPfL7v5kVSw^4R|6k*ks55hV1kC;Tj-oKsUXu1n~y(1h;V?Lzr$Jw*phm0Sqqa z=gQ)L6Z2+&@fSch>jg;d5GNd(HU@MeHnL6VxOHRrw5ZLT1e6N&qTm%bYgM-2$@|dW zkp;@MyAXi*uKt^bg6&>FNB6^GqwxrTXDAX=O@s^h3 zq?*_3id(}RnTBidV`X6`H^pxEtSXz6+!-IReRx&s+l=;Y%$ zZph-VnXMl+PTD_gr0gYIM(cLwst&PR(CK`nlbHex`oyaf0~Z&}Gzylq}U z0O`}jhsnS1{m-$%*aXnp_f-KpfkwSkP28yj=eAbkr^1^TTiqqUkL!hLHeC!o!)5`Q z8k@GMz4-IxD30DEzv8|q>b9!Wa9*99wbwL}8MkgaDH&|b%2J4H178IF_whY6rZt5Vg`xG7Tch7*B68b* zV!gwe9=iJ)e>R`Kf=VH%VSeIr0#z`wVKL|ny763yiA%GEYxSDos57=H93`}0ZLE*C z&97Pd1|;*DosZJ3eq@QXeMqq+#EV9JQq$JlMy=J>po>_M=ZGz7xY?H@kLcc>+{zb6 zlXQTIzVW?wYNh_=9}Auc3aV^DXUph``V6+9OJt;4#@slu*nvAn3Lvks9|^pRU?G7C z^(|A!$>=sSkwgpQ!aAm0@8iD64Eh8P`Hi#O1)g-xl&m(1Y~mKNjz>cD$=`*v<*N4= zyE4(z7R21#G0*Fcobba=kn?n1n$|JbFHb9~DaAwvKNROT1rH-@cnaZ#c=xw&%()j; z>4B_W7Hn_Zp1<;Uh)3-xosc6+9bo|G;p`BPS7v0mAq&3E@pY>4Te{A3h4cf5@v1UM zMccG@U=4p<-0ue@gwq{Zl5iiv3DenfkL$uk_1yJ-y!zJA0FXLYwP`$F6sXb0irn5F zXv%Q>CcHfKDrXa)3wY%&0pgl~NHb7*Yaf&!+(5B{mzOulwFebjA7kLNt&f>oXf{WC zOBII`O`D1a%{@m`>sSUHH~9ptI0!jPMICYn1|z37Bd7eJOs9idWYK(o`mH*?vHFBe z*Fr^zA{>)UbxqMUkRfj^9GluClsT>!dzYT%{lT7w3)0l2iT(?fQ&+A_j`$HJ08*M$?L&=5ioq#P7vT(&$7t+&CQ&j5GMCjL0)BAU4mLUxW1~*Mp5&` zSiZAN_-ut%6`R_ZlXqP2_Ry05r7oj&C^=lin$~)F?LKN{=)c46lX74@(~zr%^ffv= z%~DanL_SSFl82D@y^r(Fq{jhybHneM(u|1*nuXLbE#465k5#U#PewWfw?Mb#6&C~O zL44*euv3baK-@>FW9&%%w#@oyR$K1P#P^Y#=HiZ;?r+sr*1qG^oOP^QFQ~sH%*Lr_ z7G|As*Q-i*zHGi3Vck!P-O;b3_P)T^SEY2jn|zUo1S@fEr~st)AtO(f)@%$A#^R|JE9Du+PuLJWld*cNZ8V$ zxV`zs9R+Lc#=80CI#Tw+RT-+`%Bz@lsmD9EY6)VFuV?EKoR`8?0z#(2hUJfc#905i zt)d9O4xzp1y={%lF`nx)m~x^gh$ZrxCw4sJqyXDKG+8pzQ%ZFX#+_uwkwy}YY>;~- zMFP5YYMQwsFM#U{#a`3*@)FTU8P!iRD9p6cKnxpXC6O3R9rBO+;RM~V-~t!bRh|#J zUqC7hGT+#X(Gt)!xQD^cZ39b5-U;O~tnpCLd(9|&wReBj$9Fe3$)AD63 zCK+993K5x}93QQPN`kF5a&?TsDt@QcZn(26ALHx@I#*5<6>{f`UJhc1AX83S!}IK`hYu~VoFf;6yxb&J3SIfy{Ex8JyfK z84|W_#HBOP3qCKGvoSm)DfNslC%vn{;q`-Vzl3YQleSP8b<{S>F(yYzA~Wbc8b0QG zf0nxXVtn5ml`sFku22r*+k3Z3W9j-Qa_!^6>@x-AK|*4$D0r*fFWdK8ONqO6V8jt8 zp6K-c^JbxAvq}%utUub77md(JcO<_a(%(c8Q1rBn|8?yd*$Z-cU1M-MN$h4amQ3Bb zQ|P+3QR)P;GY?{P_?nzZtF}eaXx~X&}0F?(d2Z0J2qc*WeAX(X5K1@#^DxX<{ z`}mfrQNH2p%pm2?yix8FvN1cplZ!m%!XgTzo~Spq)kn8zsK42qM}PS+RKU7Rd{Cx{ zB)!Cd&{T7Q8*}3?giTf1UqBKc{NT+*qYlPf>3@6s?`{ee)3FEQ8GhO?9T)egr0PQV zl;K^O>hrh|~0}j%bRPO;Tl?9d2~dpQEErwEphi40W}w-M0YxNko-j z9aiKEE+5l_a(JA+-~UL8&`6|(?ZC%TFSqG2!J%~lBi^M$T0vdleUqU;#TA;mgi#rx z_In#vG6@pX&r(e{?^8(Cl&u+U9QGV57cFg3&i+JFsgC;r$U*@ny)IG=FTqz~<~P?U za<7J>Ed2s&guUI8>I&VWFl!3t;Pq@Q&H*C9BmIC4U#T4=nRZ67xoCf7ztVknBbO-9 z4O;&A*0wHd>s@%^)0T0;eT7}=p6cq4>E_~3LrJ_QvMGL071)KW1X0Lun7l4fSPCPY zdjG=&u2q$uFI&e6P@B3HD!iHalns7yTfoPOUUvS3a#5qDM)*La$kOY_{r%v4Qe`gF zL|&=F>vGTaBOOE3S?27s$n* z9Y zQm~l+TnJBSjyGe%8X8fx%w#P3Y*LasI_m_M-Af!)NgM6ggj#Mud=>~B^zq`bD!z>u0Q)t@MIZ%c_M@)VY>w)^PzHQ`bgxxx=al~ zc1CfFRH_bYUyw~JFwJ?knNL1>dDDZdXI*o$B2y#Jl8IJVFE8%55H$AJE=y8UbY-_6 zIfE}R+IEP}A%|~#?es^FuunYH6`#U4`c(y!p1jtOkVI%iwW)V?A_*mmn_R}Rg~bNFwpK^mevZh$~~cha>jHgDyKyhe}; z0UH6h?Ys{lusPavlQw~S0g`Fw*1zJc)y`IyFRoq;9jJP7Nv}fdV&T@hjBSK{gYn2! z+q49UYUNv{Sne<0szU+ zYZ?iLY1O`^jw{LkkM4<97^GXm_EqKWDsqm_oX{=rDUCKXZDp=~O(?Kytb)rCjd3}kYc|Yto^qK zM`u=(a$LN;#fn?_z?kkQg(`)s&mIYg>N$6kiu8F#bjsE*6-P|Mo$3{aNnI8_I<9S1XIw*7hGpBfh60Aa=C1(Z zPlmPuCGIewqUR$3xXD9B??UZ`WTpbEUz!wezUzs^xgEZYw6-|wK_Y(k6!8}4m*A{m zW-JJXG&`5{_8&U>4f-qggHPOG+Ima{1y1pV{zycgS+?k5_d7CQsH7y}G@WMhRj$`G zdBAklCuowHpVhq=N#zHAGf5i%-74&ll8;q>bSJ2{p7SU`_PRC4ASF(~lDOfu$UUl^ zlzAU;%5cE6u|%V!bVf5Hej1F6ONH7zF;?oxcuVmf@vi#ucceygl~G7rRh33al_!)U zT=1kQ@+vL()jZ{^?|j13&u}V;1cTF)4a)aF;dI?X+_wbpC#(KN^*2JZa0X%@bjs7y z2DmlqpXCS_x=X&$A9S~$(f{-BD=yOgGs1-Y{gD+B_+y7=i7YnGuATKoJ=7xw=B0o9 zaOt>NXPgK-&U~)~btY(RD#`&|l*IhC}zIn4jmuaTdX6BuN#K{o<*QL^8y3|*h ztkY%N+b`kuQ?)m6>((X7qI4fIE_iV9?H)%)dXVpGGMs^fG<9)X)%j`o?%amTQsul zPT<~wIPny*3beS+#o98IX^I8JZ4*xn!H14!$mu6MLP*dco!SQO0kzsjPPjVslIWwC zKAP|=jFoVxeZrpAPsW!B5yCqaL^dku!cm%6%a^R11FByPEyw`MixMotJ-pR zIK(p>jf+iuWNK?(fqh?~WTO&4-Z&O-FWiMS!DjSdR4$|3jqGG zF647%iDtP-LemsVUeRj)+2Fnwc&DG59(&L|(&%k;N&SgaQifg@31=~i$KB_Dt zmvWFX=HhP`xAv)Mt-s+8s#BZL2uLN&QK6y_T(+`6DV`;a7F8eieVksos72|rYm`g( z&n9XU@WDg<(D_jOHVtLsld*~ZfHv4w6@G)?$vo+Q6Ze6$ zp6YmGxU+Uqf7ds|@vd6BjhEC9GFBRkkaItAlvDh>;EIB$GXR#YznQJ>_XrU~DrGzK z2wB(jH2rK(-sA{_y4Z_yE`DBZJ|-{vqG`Uae%5F*j8fa*w6)#~_1nzYYZY*fFE`x6 zqozNT6h=Og3s&37U-$U7T^v-Gd&9LTzFeWVAfku9Y;(AsuOUvIY^~!IVLPoQIxrY< zPuZI_3%cvC(NrbOgUmV)7kxo(W&oeG6omEFL2@T0IEzjv*~%Ct{EzI$NkIupZ6}6a zAz;JChqzffo~?VEm#~UH0yW$%Klm1R{i>i97-cZwpx9XnogeZK*UPx__#p*?Zv3HA z;#KO?`nc9pD^FqUNIwlA+;q_Z%7Cjpo_$ zQf^LL3v4^9G86Up?-P}0W|7rB1lp&JwS zS2NoV%$i4dFsX9&9f&@wY5onA;TfuaPU+f+R+6T!JkO*f?;ZMWV|y?bzow#VHn_$i zeT9&5vHyW{x-$r7GIV|lC!)-1g|d%|HxP_~+2{U_B1R$xH{i_)*~}?EEaqZO7_=np zc5%6sz9}RtiLQVt=GcLlw&CCu-M8|gDQcI(o=xvp8yp#J{wXFa`vn%C>eTH>icHwR zjjVk~kZN@rP7jLzAX|*Qb}@%T=FY{duv6?RJEILOR=!jv0tw}y)NJ}ZD$RG;q#H-G zHSe(WQj$d=6?=8sWP<^^u?Pb==A{;vY1>kYI}A|ftATo+yo0-a>r6gr<)2u#l|5&= z4WNc()Gj?tZb)rE#$wKSFOZQ?m;y@XG8K7wa%9Mt4n%2H;0XjDAXFUJZMRj;yr+7W zs0a+!XSu&_1zUT~lHw>&u0*NAh2R#c=_U=5TQo<*$gQ9srNR-v@;97BZ~Mz%U41wp$UMM`ZqU9_6r|u{c$q?M!6FZ}}++0xDvvNdrtB zZNDe2Uf1f0>0`dXj*7yYUYe%Il^yR&`nJy7;^oMvYCqT>_L>4b&-yo7)!PXY*HYw! z_77zowJ!FvkUo3TgG=ARr@YNPnaL=lR_Uo2}A58{-5>+-rvVUwPm(B!ctNGSO|w7%u0CKU}-9@Xmep zV@rdc)@0Q||7l3~>8V_E>iv^a+v)C)yoF8{dWUP&_uGvd{~{&r*Rh}e+3RslOWB1* zj5h&wt0&>+efnu!NxTA{fR*>}n}*j-7B9xrDWV3S_|f@b}5`yp6rt2_}HV^WP^uyh>*_muI)=Uq#a`G+y;G z_77c>!_M74-o(C(5^&$S&d`kU9|7V@&eXkam3>W9Meq=yUITPOKm|{LP+2J4P5w^y zMaa1s6mu>i;;`thKC*Tj1}$(BDqpwF|NKx_?Em|3vvz!a(inS}#km*hdI>!-Gk9GujCYGkQ;Bx3i-tF0~NL76mnc?N+BX*H>(au&~lYlC<1_*wU^$ z$Cd>PvyTf9%JdQ9qyF1N2AyV%jZIXarRgt!F^T$m;E8`+Uv9agjea|IMog8Awc?_I z!eruRG79S>^kU%FR%V|{CA>&P>%ngBup$GboL`IUAJx#hSaS>G z0{%>F>qV6%Ur&Rh$Y@(oUg28>C1tp1t*Vk=^d4_VXRgVn&>Mfp(S6wSmG_7aHD6K*W~?^W9#Dp1FuJxJ zcg|5Ja%;A7fYsyN5r_q@GCnjK#|gDGHUTU({d|0)<|VkUquVmmp6o|OvT{O#XWcl? z&)s**%m~%a{D+a+fr;V7iTT@>gK#GwY$aMx`p3({)U@pt0Kr`<6O+?D>?&|MC`-ZP zcMt%O6TdC|dezo@hdXC2TVshty}L_~ow!;5w%8Tj=lw#+Bxmd8gI|T|KHjCT8r!G| zyXYJyAuSh}Lt!HkIx(wcsg=lAr`NeX)UEk{18Cob?c+u)+H;HdC{Tn9=VZ>=HGBDd z{%_un4WB0K#%_vR%Vh?KZ|XYUnA9bc&7CNot5snsunI0yANDSnarTkkFWecg!%J_8 z9Kx014Yt2F_!?X&om`nmW~&Nhz7HYWs%tZkeQ_56o}R!ea)C&Rhg)0p`evit)71%Y z212F3IbUt;M?!{fxDgj+Am$u!=~^(4qq?1pEu-EYT%C&1KwQkp$XF{56`w65XT9g1 z3^>|NH*IQ=$)J~b)4i)YU)Geeu=|1CrKiFXtiwDDr+E;f)5T_I(aZbVha!XBYH6T= z>u7xtsB7bi3EZl-$$~AYjv2gaT&1jj+x7kimTCpkh6!CuANaZ5f=jwWa_RVo-1#D< zN9XUg;-5OgPcb|8v<_ySb*h`q%6{1AKPW`&53y$u@bP%l4gxZGGNL4lT_vw$8?gK$ zQRK3A)~@;j)+_YhHd@S-zfQgo;}Vvat@A{<*B|ud3$Y=g;s^9 zIZ16~S449Tsbkpb)|UCsWWGN?*+vGG9I>AX?pN_rBU=h*)jVo!x|mx|)wmHD!}EfE zJMBa@mpVKK_XirD<53cd^pz@?^g8PRjv3;0crKfWcDEMzhnOcP&oyM>GLq}OZSgcd z4igH}vNb*B=kWkM70m%rhumJXz*e0I4z4R+qd0Xme!0~d!Y1czFil7Lf~3?jQxXW& zv%s5Eo9mw-1VT>C%hT05#rK2jDQKCuxjZK1rf;HSD30W4;s_8kRq5DqCi@L&`hyRv>pfx3@x+c?LlC9F#4tc#d^cZ`holE3vRYccm!=q9kWnFhak1rFr! z=X-i3msJd9G5DKCFb)LWNsqqEm0k>NW$$BL~|#Gc9xGbt_N~TdycY|EMqqQQ28! zcT#xvhFD(9TgvOCyjaS!yx!7A0fxy|+7Ho-gMb8Wt18&%qVjoHRS3Ch1c(0I8eqLb zu3|#{!JEbuNIFakrZI$XMYhrGI^;eO@q-p+l((g#ByL(8o^?>kKJYbt2sKwPXS341 z#GZCyKWK?_4zJHvOg)lg6FF|Ioee@IpQ7rctDzhlM=RFrK*(`=6lxC(aXt^Z6CG$s z!o-MarC`#-+wugOgda@Xr25%)0jO6ORxwD)OBZ zLTvdY-zcUO&O_#(MZJr0lSejQi~w$yIOZo>1}&SKDzqRzb-80hkx|6u(@N3(zy^4} zGtm*1aY1`Z--8W2ewtu^{ln(c#yM5orQloX??yS#Uqj7#4LOA3A@>mk)&Wbs-w)TL zHmtlfhhCP?e)EzQvE4U1Y(L~sN=lADKYPAwdVzPB;_4sri_t=a79jo48XoZ1CKqvfB}8Imau1~ex5TlsVuEP#hkc8p z{a5XT7B1}eS*OsZNExy|BxM1qT4kBu??^FpzSG4B5c&eBhOB76l-6BJRm$L{Jt$k< zr3f=o4`w2yf(rz?`Ygd}>rRf>mVB0R34!Ex-?^_o!{(yxG?OAX^C(3h4cxh_7Ml2| z&z?asbj~8SSmA~5UJ3mzDeo={djE2%Li)Y(JT*mbMbXp}g1-ZnOg=sEvdsQa_aDfB zhv-|X^&Hia$H2`#d0z(?HJ?+r{?1DmtT8NbO?hmpWYeaq=f^D4=lFK*_^Bkt zJ?1zU`%tAkXqDoz$}o!1he+^ zDy%=N$EuTM#6^d~M(5-7xF}27&ZMAmK~obzOe47f9|neHsV5|zSV8>YyQlkERb@Xe zvYjCz&plpU0Lt9Y~hdh7KGF@~Uy5^JPul34~L$7~0M@Q{ROdl6Y zt>GUZ_Gnr~a#RdibirB+e!+mt@y|o+4f8W@ogVkBnik}#P{ZV#VKC=*z=-x7Uc2u2 zQ-WJ=N&McHK1$ewd+5ilSGmImG?eRnOFyC3e@33%i|vD;b>ZK*%}ne&BNlQ6xW@P# zWnSGN;EFi84mgO$OOQ&Q{D%i@qsx_FF1_+v(ev&z7w!XZ09A|(lw6lQe`UhBa?Ga| zfNZ;o4mVAIb&~sRp+STRT>~lyEMHgV(A6(?dhsYsXRlDU_H~Aq19KsVZ`;5&ZRCo_ zQG?U61iFEc9s?ICTPV0^^ z!cuuzqSSpAOBOw+%R*9)cG zVdQ8(6WaJGdzpTDqa`0>$XyUANl)I5nw>on{1m~Y5S3B{P-rwo3TF1+ zAAOBjj}E)ix!s>1uaS^;g^z9lVWzBF)a3!>6pt2tz$^NR)DZ84<_n0BedE!N&x}Y< z9v|lUQw%!w5h_N-Y{Q2ETB?>T!%qzZYgGZ`qmlD4ygpT|Eos?z^>|9C29vj?>qvf)0A#NT>e{DYTq?CAGQ1o%^3-GGm3C48XpD zq;8tG;9V}PS_MDyqPs8$a6*x&Bl)b;X^#DJU&d5 z&1hVeiDv*gwWBp%%0z?_<9vvs{>>NR92Gpu;wQDot}51O(y4MHhWIT~(=E+@HKsbQ zFFXgC9byb5BiVQNs-8e=W!YllLbP)A9MjGahG2^i|8+6$EcXUrC<<5oAP}8q0JPm4 z69!TgeNUph@9}^b^2SfM(D^?&1`5BLJhC0=7W*ZAgtrvq*3zWq9A)&tcg3})jS7fg z8^=_T^rwRk(OwP3EyUD(ou?4QI#u0e^AU+9x1ht*#uu{{Fr$l+irlk zYXT+*0awg&-umf#CURdYbq6jwzP25_2ChS^?SbpP9IV7G@9TUgiPA-2ozRYSZ3NlF zxlU9=?W?!z?-8Q)1eZ8+wHouHj98Vt&QNIkL4&vSO|04`>hH;f6N*p)XbZ}FG-g{T zPC}4AQq%ko38)g-S53DfT-#?ypk9}cNP9RQY23Qh$U-oGSQnyl2;{^^hOjglsgZ?x z<{Y*Fv6>g_M-J^k#)i4mOEeG`j$r*+1odyweJ^#s#?HZUEmvA$gP0I4bVK9koS`>7E zi=_--p&twYFTMAxOOK>@wm=d~@lsVE;&`fXgw?VCFz;*65z}}fHvQZhJ3E5iRv(XM zk@c@}1WGXPn`25+x6NNPz_JB0V&p!V26N1r>4Ga;kO*{0D>@8J(#nx8st*>aI6N_) zYJ3oiw)>9VQ@-%TsH6h7a2<*b3Wu84O{z~bY+0&Qp*M3{NS%W%j63$(#oYY<;V|jA zOd#cJygCeGCJ#pVZHNDgcC*;r3pM3$LU8;-Q9_=JO6c=#u%d=8XfGbW zC~769a2kd0bI8(QhG z-2W=)%p4#V)8uj(k4|Q}e6cWo#wh=Xj&QnsWBM~~fk$aaeHkr{)F$&!tUjh zi;H+p=1iL}WmY3@hxMhFjN4E5u!ofIv?jF+ShY}TTBve>ob{Y=va>1{eRvgg#^1xJ zZbHrE&U5ntI=A3Rb9sn8u#isda(ZB4OdG=tto>-lW4~52u$Y%2Fv7!)e81yc#}Rd5 zSkaS&I(r@I9P=Ah^u=}bdPoN|@0z*M?$5pb+gNO>wXKbh?V4&iASaXR^P9^?cKa+# zLjMCtqsLcGxcSf5t)~`ODRF^6U5?Kl&%u{^FFw5DLUJz-Dv(FLe$K?A_}EcXAdQ-O z-~X$ps9FgOR}?E#jyWld;^$jT>8cED;fDGmQRYHazl68-7%(Msc!&=a7isZwzj~T5 zQJa2HIp~_2#_HIK8r4Ag!^RUu?~d^%afAJTH9eafN4{PS)v{w6iC$THoV3ymHc_YJ zpxa&CE<=j**JkNb-~_on7(Fe+3*vk%x!=ZY;4mA}Wuor?Xu*+>KKuSk z4f`x`3Wah0{_M0?-Rh3m!luF)yf?3?bKVIh#jLBB;pblMzV3i=E>=2Kemq9A{&@Vb zpD${qzsCfoVG4n@y|_hQIlI{Vdjh-$erFw?>&&$`tkNI!19vv=V!DeUykDvFCIVP)9ZXQPLe(|H5`cs2G45$H6YY(G%fza_CPoqd+@EAfrf&yq(*ZggK}!VE5a30Vf1r^6%7Lm#t7 zDzY5nYk6cR33w^m`_Ogtt-;q^%Jzk8)!*$D zecT6MzWNZ*esxQo_RF^PS`#-*N!$2b05#gq;J+tGNSvCWs_8cj{f`G##35}-3o`0g!ksaZFL>7S!6 z!<{kS?~@5N8FD6AbK8bbQz=nA0GULnfS{oMQ&Xk7j68alU0qAi5k1fR;V!nEx(_|_VJ{` z{NBy~K5zM|*Z&RfA@FcZUOX&{um*fr4R2vAb`_ZRjp6*^$6$XdTyBM-Xg3bH--4~8VS!At4?_HnkwPh_^iEt{FlBKCYg9MLy|=eF z=luMB@^_)@XNisK8!_t%XquP*_kEP&iwy#oqGZt# T-!)>m5TvZA{;cdDi;({dv4b$d literal 123004 zcmeFZ^;cDG)Hb>i6)eC2q$LDtm5|;DC?O4-4vX&YP!Uihlx~sUlyrkdOP4eXn+|E| zGZ)YMeq)?J;QVmL`1TLagFDu`W6o<{^Sb9;ZT#Bfo}IS#CcrHR4`%l~7n&6sxkgZBV@GUw=~7wyz3(V@|1;wZxJK zhUx0jAvB=@Pp&`q$LA@SeRd=;`_n0ZL#&B%=I6fNYL2A7ej*Ize_sT<+Qbdc z$*x^X5OQK;VUZsG)I0A_L?iUPIqdJ>zjJe@dH8(>MnFGIlDe10U zVVLMh%gErbPHt_L>#tRUPp@9R%FfPCW8VrFEG#U%xmg(=^(=_O4ZvMQMFDY}G&4xp_<7uH@y|;elO*DEUNpgC6`g(Aq zpu^0+-QAna%tE&>XC1f{j&U$DN}m7n__i%EV$vl>)h?@LS{yz0c6Nme{6D;RcXt~a z8hVNvo|?5-4TjJNtM7=B1vQxRQeHe8kYFz_FMqiI1BNX;y&^03u_Klzv%8w5cnIb~ zedWrZtWopo@Z{uzD!JFMU+WN@;iVIlLGGg2UO~Y_8T|F@SLW~*dOY4KX+QYIlkfg( zmfXnd?k9?Q4KHl}`ST}gVk}0USx^((vviOnFDK4FzuO0I;}Z~-{H;I@pE_|;68$Pi zJIeqA0|N=dM8(-()}kUJI^%B|p2mu}6{YrswTHviV`8$NpgK}>7aw6s;TE=m@%L=_ zHwRY6s&#+4>FC65F!(H&4yvD1tTm|jIX)_=KdQ<{ULBVFs}uEx1%vD(GR5>&De!}w zGMRgW;=)d$19)&HaSxli-I*G<4NKl%U#R@* z#`E->?zQ-{eYd!|4Z*zZ+sdp){!C3p>Uw%1uV>GmJ(83A5FSn=oGM*CwYXSqHBv@y z=&6D|OMUe!&y#Pb^In~b3q1imzw+|(Wy=KYLUw(9z37neym8OAU1Uy3J7KHWLn)~r zO-(ivbzWekt>s{~-t#>-1L(SsP)Qyuf`e@*Xlh$#lI*1+=rsklGmxh{Sd+iKJi@}g zzq|Y3;X|R@7b>1L5#h|`<=?U4Wna5~Jti@+)@y$ohI5C5c zOTPrr2Y)4T3qjmTz4CKTFAS)CWjMv0JinFC>eW>)lw-R@5@%$D}VxDs#r0vDtpY9mNolzQTLL8 zL1ZN2)%Pq0IB*MKEb-m4?(S7J)KpYCaUA$4oxqcY9c}JPk&MhC3rdf~Cp>@Nd^`jI zNx#f8_Uq%h;ZlnLy3P6CrpLD}0Vrx}Y9f_>WXs9Psj8`ASg5XC`4AK1x;fWP=4_y? z9UB)noT)@tIReOn(aOpRQ_21qeTTTu5JnvS<~#%StSJ3F;_ zC<#!VxNyZ~Lqo&TTZ<0%_7QGs0080PJSH8~Xf*oO*T zlijJ(xE}_|VqQy?F)ea4E8yG~tl6BH1rAU&UswZ5s7RaA8M&YkYgP8+aR3JQu=PwbE-z-5exn?0DY znwlEbvYcDnRe0R_xA5pV~~9@M}v;Q`CIPQtPzMRWy5XWjn&dnzBX z%ig9L4jUaE9o)FCP_XV?MGsgGU>K~3uco8J#cw={CmTGwyR@5E^M)LWy@|JbD zf4B}u)Z$=)u2eXd*uUe*}?4ZMsz{^-cGbRiMvz?6a znJ%&)jg4%x@%Jc$w^wxy4Dt{D)Y|@*Fp;`WO`WT!|M~HYRCg{`*7=cgo77CpLM=cB zNeKzd*`E>RBI}3WcsfPG-@jL>Uu%!Qw`lVd0?hH+0Q%rJ@p_)zVSM)JsGeJ8cRVBO z<&injR7aYNXI}*`Dr;*~fSD#FjLsbps; z+6AiMOJ`s8$B+9%6a)yo7w2yoFVEaN-4c$WIy^W4G&3mJ+1OC4HR7eDQjo-6zvw?u zP<$t3N2a6R$b9);6sPWh=@BKk6YjilZ?|F|K-jcTNKL>-S+6qZ>o1y5Z z`DjouQSVbfWSO6Xy!Q7N3qq*{KZJ#S9JX7ZZVAb=dp_cehyV2H(`j8b+|5XzGsc9Jm6f%X|6ywP5uG*w0&a1BcD5V> zR!xn_{HX=Us?VQ4Yg9jlwed~OVzwKt^(Yth0AOIdstF8ae0;ov(B_-pIjjd)YWMXk zS9(4Oo=IO|Zlz+cHSpP98iG*vu*&5Y8`~a?c4u{>K%^viQ`ZY<0N7o0l#`G?;1C2_ z1A}oG7Hkd!BV&3wvqD_jXj{juG^SVlXOd|3P&5M{AI3=;t~uTO9zv;3m5GSQwxyih z+w~V6{`oZv+r6EgtIh8jot&J8&{;V-!uHdFck3+*wHia|CYC0Db1fj2JCm)-4bxRv zzP~m4^a6f!Tbo0(n;<`%2Ui>1J+k`8j~@cJy{V#civ{)h$s8r!-VhqFjqo3fEIR{( zq=m-u&d$z`AmdK75&ZLTP}@xbtVuL_4St|8PP*tQM5U7bvU}cf{Zd&l4`!Q=ntHU0 z3vMm0uU0oZlCig9K0El#LqfGKopPH6U2OXO=x{%>IziM!!@KQwxi^*wSFi>nY~R<( zr}Q$Nde~ygpRnMcvVRhq9RLraq*vj53AN$qe!!2wRzDRCZYM04;%&v79s_e8M z8y42pg>D}-^vSC_YX9eS%KEh(l65S18XoTr9xy*`bX8e3Go=4fVK$SJ`06U!M9lSJ-(;c^thuR$ZRL&X}0H zP6TnxbF*7Im{wv1Eg&EuEIcBsDA z@=8EzCi?pNn8$KvW@p)@k2v+K`Wk~j6cns}eMJzd3*p?PQzdPLAW_lf;Gf+Hxyjm} zhSvUvZG{kP*aN5iLT?1w9H+|*oo9MFgtXNFetbe-(KcmY*T+&X79ASj@#a>G3{o+nJX$PR$o6r z0xI?C-BE|mq2lQEQ^SJGZ%LlEe7M=}-HH$-(|B0n$PFAG!-SNwHnEfqmK@?PA^VBnvu89%7c=1XKxsFD~Zf=9%-yTkRCo3|h2E z7Gm{`9~p0|chFN(Q(LAo2g{Q>!$3BOF1d)G#5=AW0C~38!{s)Hn2gq>udhRHD%|{# zm}n(@NkdyZ+mmew+%qX&TxE4dlhE$Zk2eD%UuRf3IMnm~{r!iF2fq`ebM-h`SIjdBc+MY>38!gIyYoIJC57MK6rh(Jh#XkO;1$Rim*}{%DP_!4M{}?w7LTq31Q5}v=3|fWO|KZ8wF5%6z4WOnUDRvO zu{e|{TKc~kFbE1(ejlTCoqPf5NuFW7fwt~L!pqdu2Tkas!pQH%S{zeNp*Fm_8FJLa zZdY%`3fO(JFD)r4iR>(-uy=6489_u4w)y6T?k0OO z-8h+0k9Gs0<|SwZT4}nCZePC@#sJsRDmD)0!G4~>;aFD>*EcqHmPax@TYp*dhhD$) z=ik~Czj2%LO`>?-f`{VbzV=%MK8HC|+{vjxc2o_Hz*ivji+b>-{>;tH%p90SWFUb@ zdU|^H_C+;S!18mhUbX)9`M$xx;#QnIhUD_)_lb%6$Ii0+K(*|44lZ2jcLhL~VtFu! z!x{NJv$3(kMTC%E2UOF6t}m(KjI^WiOWkd zNgla|Bf$hUNMukWZsLFLD)TLeFkp#W4q?E{F*Es)-Q+*{(zD$2=|K>Xt;LBmOw7!n zCa6a;V1mmngm;KK#rAXUS3-%!{(bcJR>1_8-o)5ql%8+T_ht_1XIrB&-`QV8rINA3 z4fJ9S3PRt!87i}i2M*fNVO@GUjpO$1zp%zaiHhT@dU^@K>Xr9_&zU*p!4FU+YG4ou z&;V%_bKlIuw$mN`C2-Tu?CqyB&652Ewjv!utulV+96bkF8(?i|NQXgpcX#W*yr^dw z(x-{MW~#-+E=LD$d7alklmACu0wW?Ks@#i;ixrsFJUwfeG5$;dX9(N1Zv(&^erNva zvs}Soxwkn5vMLryUfbF@?A@tbevnyehV)ys2#5p(g@kB`R#Gu=h5MkT9 z7q^@gq7xJIoF?IpzT4`lYj^M7jVR_>`c)`72Ja+9$bw zJ?o+vs3sIiYjC z)jm^>JNLL>{Tk8FhGE6MQ|xEb!48yZ3bIn+=mN9A$qi^jm<2O}`M2R`h zg_VF{_LM&!x7?elF%m_~%%og=oA@a}Oce*3E_$PTa5SGknp1nHS8>}C8 zb(#F~qP73!%Tsw%wLt|11?%S?RZ?3R8)p>7N}exd<7t;5%(>~j)LjoZG7q;#<9VE< z-zc3o<{>0@JEu9XVDu9~8CqYr_YmR+`#dU|oSii({T8~=^5>yNt%in%Vxmx~+pGsr z?}sB7>BRIZ_r4O)@vdqiBa^BXyaA>LxliOJX8D+pPVX+M;=X+Q_AMd@8^95HMsXx? zsMv&EGu=M7RREGi2&B5+H~Pc6a&vQMwm?M5sd-R(ynXfa)f3mqg7c2F=Nk4 zeIw9vw4ysH3DZvLUZu^~v5tW09{dsn$JW?qaX*r1F!hX}bF~&C30u2{4^bcpXl{bc z%o(r4FAFJod)E~iXJ_sBX0Sam=8umTJ?fNI-yIvPcF8$kD7NvwH2r~btHbVw4!3j zkR>gTu{=l@mV?4Aot?y&E@3Xc)GIMnIgGl>EU!EsAJc9uyh;dqu%)FX`@4ARd)5i| z7))UQ3j&>L=RA9-!@Vtj>$)-4}@s~Y0OgfM{>TmvSdL6XxrS z!eP5jJ^%dHynK4`AMM z_j>;PIlu|JG@Q-BOQjZEc;M%a-{OVgG0PL8`bH&}Q9oYrg9j zx04egjaa`t0|MOi^mC(lNS<@NX|&3%$6zV5YJ8$3&l3?v^@hRbS#D29z%u$4_xJS) zq$PtYUG_Nu=G&)~h&5(+#erJKu`BHurJ3G4$;sur)PC|+1{*PcnA786)T;PrO#|Z&eVkYtdG;>2Z z9BDCy%EGYKdEFjia2e#UBj@5FjtOBc1#$n^uS>tZK?9&|-NPCeIR-+g2YNl?(>XUg z8~x$K)2+O@xj7I-_w=fhwog4!!`x+EPvm_u&QDk*VzDU;1oWH4b29XxgS)_H2V3O#&WBJ7GE5w_)oaW-Ry{0maCF=RO~8uxvwI3m z6n5Bsy7@9Lve2s)oYv5L*GMvxnuO%W@uI+Av<|FGg$OR#|IkrL|8O_7@$Jry6m)bT)o2TI zzgEl)7qYp<4*u`5o4Z_zD^#G&&K9u%Gd{kdj%)WwWx0O+x@o6qs$%9NSy@55KYqc5 zF_$l2hGC^2hHF4-K8?c-B@vvSQ`XeXbh-$`Ls=L5cnfVHD0}X1?pjjd~DC$#w4V( zplnuFR$@=dxz~;l4&DQnOJ4u)_4f-D2n5?+F}v~=&?>&&7-w4rYy~j*GULN{lzdpX z@4PsVn>nHcXU@RcQRVl8HNZLoBI@S$c8ucVl)=S#yYE`s+PmPJUh6F^@L_9fzG}9v z*+V6_R@pE7|L@b=ld}V4cJTVYXSuKl|9?E*o~K2Q>%Ro6_xEY&Rjb$?K5QtZk_gQ4 z+1DXd!RYJitLb9Ys@>W}k^`#F(Q9O^v;oqa}qo;l_mY=Y!;u;rl7Z@q<5uD9Bm;WF{P3 zjTZ9#<7hlsI6wN>%=7l{P+MnFWVV>u$0abs|H`s^H`nJZ122x%I46||J@bB>DQ&{t z*+GlmE!?ygKOPqwtCsa?#Y368V9m|jxb*TXoKvwG8g-g{QvOm8{Lj-D*(>GB4`KAx z*h|%yt1TRg`YgX|sKwnWPLaH3WD|a*++kREbEWBHE8NhCCRe>cR4oQukvl+9~QcEaITqe~q& zPKU0cXy7+K%2ya>!k#}77F}$w#M`HOlqa=!=N<=>_~I`-R7H?uJ?mI{ltIP8!COw% zg+lK~$KJKmM!TywP$ga%zS5qixPzgaqL*0xnq5~rTh%SppJ!!C&(e<35zbAFw59S) zjMJPXi$siveOk)uaSk;yZWqd{GWe~BwdnmeYcP`77U5HsW#%)y3Wokw z8*IP@++DWU58!~uqpci{|A;`I)@eeH^Gx=;Roq0Ro~X>f+hv81AGI{o5{unPTOdhF ztQ8Srecx8>*A6h6_xJ(#(*{v*Xp-ki}l=L^UJDm{@z<$tSl0NkFtu|T-GN9 zo8Qh%RxiL1zOS3L9cn0G%b&TBxx1OcD5`pg|D=AiwD^p}|;oL0J&b|vhjS|q4 zbSRT;c_2u@dXv0`0{2 zk#)15OE_>e=_w*>M;geZQv;2ocaDE}npck%ohN%~jH-oq*FM?0{!p<{NPbX8BD^Qx7Nt>@npzca$Dj8we{c zqVT@-!ws4{I9oeJQ?RBC{}#q?BOy%-wzEhtk(qtx2N+_HbGB!@O=ifs3+E;&px$co zRu4BN_WATTKiQt2y(rWzOn+ZUa-}LJQ@To@?;YVA3i6g^Hk5$w;ag^aB2KxOAD?Az zBG(;F|D2`n{-KYAf(BptMoX#=YZB|uZ|t26m9qdxK>zt}xWUPNCmy8Yye1G@~8&DrXL5jJ}GbaaT) zqrWm8|JB5(JzBU7noC^Ig=hpoas-1DDwOxk7}@Sd$TOiTQvTa*o{9a9YGKW`(9B%< zF+Puln5^b5-=o@6A`;Z7=KSk6FwF6B4=F+B4@JMOAX5qaZ=Z0}JtwZ!ZZWe)MF)pT z8t`RC7A|#bSgn7{eRqD|!$+-%w~Do*t(p|5s)fw_56FnyPKl3zZ~fKv2vygs|BdV3 zo$ToE!DmP(=c?WaXPCZ$hqBJw{Dzy=v{dPrC%kY@E0OnmVm(~nmp>ieFW;tBX(yxT zs^76mrugp3gPOzwKK8zgh5e1hw>Y)8BUa(Smo1GlE$-Q&uG=dwz|B7RFD$9=o_?cC z50z!M7T`gC*Jotfz#k6xcbNAf?H*>%iEZwe<%zL8i_%j99BS>;Z=E&5-`h#roc3vY z7sP~j4z>J1W91FShFY4@j_1oTFvc&-087Tlo#%Fyjt+3TMGi`SuTOKx$4G4>D{lw) zvE((DiFW>u!n{uVI%mSn;Ie==VdXzWNGoqW z`LO@<5INJHG=MNU%RQlLci0HK6!7{qAw5huC&yxJ)&>m_gV(e9ShU(DoHhrG&89&+ zq*IA$T|#lmCY}4$mbw9k`kW5$qrRG`HiMs5%xyEfr?~@mrDu_SbU~0S?hX&&K*>UH zt*%N3KNP0GC4DdHDws9O(@Q}>t>(YEVM~fC5eC#g^B{d)sb?eFCObb{KauxUs3Y7S z9G_jynbiaXd-bbSa0o|t=VXnM=wsIS6R-~nLuB($RGF3F;`vXef4puL{Qf5+7%m~X z_z{J2e!rpxfu2l)+JhySRRZ+|!}_SWszq4_86-IixnZORL;xxVNllhc+uhlBU<$hL z10>{taoFexbFPU~8%s2k2)Y2+S&)Ecf?Y1nE$7uhVuJ&NP4T*)muSm%vh=L)5(L_hWlaRod64!Kk(Ul?JS{1_z1?5(mUz8=wnSjwXfDsLJ&u5{8-V=KdC*if3m~E@gnj zB6kf4f6F;(w*U(1NyEZyw3Rm#hu2j*IBCiF4%~-C#55OH4Y&~?2SAI9m7H0r@C-`kE)lN7u<(I_Q>@w8@uNkALBE4<_rC8j-uYkD{4A zClA=+`BsOSZ&SB+bi(p+{4k^pUBSs7Vo+ab#%Q}lg=G!x4wH?@xdH2_?roieJw<^5 z`#N)bb^ZoxI`Uc+ruU1Mo>j1zZ3qckax|3An*t*-W%ob5%_Bu5YG7Mdm7zgsS2Jmu zm~|o3TofgcY~Dk{-I@H3kS^Cr832%t%|1(MC;7;Fptr{y&~|5Ru<-Dxqs0^Mvu$<4 zW7Tww-!j7LQ2|Iab?RwDG%KOpP@kSD-6#PY)YFDQss!)b9r~A4-G0*Z=Z6;J^d-dL zrHm{b*CdvP6F8#HtPEp_ub{f>8pKHFaoyU4m&vZ!+RmC^0gpsoT3esqjD%KjpklHPDjq-e?9V+F+4dm zw+)+6q+#TQD0#v5z+908@n1zs^m=iLonvuoB9hU((zQLq4}gbd)J_$lBu2;(#yo&r>k zxh=bByYv8A1~EvLZr;4{ngsHj2M_$sX&|Wi1}SIIr=R;-0$TMj9Ztal=AjdSkFw=|4hS#cC9`pSQRTcW3>F66UImdAL? zcU#J-S8j+F<;(L=q)$O7?CeBmO!6luso;OzZQS*_h4!r=sWsrAeFy%smQtP;@ zLv`HKApmD1?^mzc#+uhyKx~+2^Mpuyp0np#@wOsE{P<|ji zLL&6$lnq2y0+;-m?^jV>LJt-7ZXM#S&yVNbGKnAft<{7Kiwa}6+z1R5cIaoTXo?dD zT+EuuSTF5>YR^9}1EzAa0a@QZsm|{h0uPQK_{j(i#gq>s_M;0x8%u`X&?AsM{0VFW z00;o#H3@`jGa4e;#SsKEAqi-Y5uDoP-wQg6GUb9u8h2>-Jt6o z{_pK>X)Xrl-T?As8ia%ee>6CNL86G{qAd(;R1u7zXQvQ|CQy}6P6DW|hYbo;x)HUU zkVfX=L!z3P)vRpB@6xTo;?IO!#Q_rWn%bSf5|IJWo>%|6dY+dO$Hg9?Kb0~iL;ou6j$uUBCqAH-e1Yd!o6pxBHih!SyKs!s`Ldon%B z`Tqr61q~$6Qb)Km)8ih!0&&ZaFw_l5OwIO^zV{S4?wm*Y`hx*D7ut3L0{{kD8u9=k z0Us}^DBusBJ+_63RFJ}sRP(buYLmZs4h7Ik<&)(|PD;_r3r<@messAMJo&pu_`vVc z`-r3YZ!URzkX_1n0f~$$W6bp{sOP|EF9gZGMp8C%X89f=FwT`*9Bed#Po98Uv7Gkb z-iG}b<8)(gzX^~9Pah_7d)f@(kW5EQiH~wovs->RO#JGGi4x7#uKMzU%XlbTY1k_; z4?=oi^=ZNKxbc+ye8m%20?>+wEGqsG&2q8+e7cSps+`YfHwwJ>H3`D0Ml0hwB~jwK zJobOS!B)Q}QF))T9&w)XDa>Bn1L4$jv(Kqp`-Mng3$$i&-GX4R_8An2cWXof6v~O8 zrw4qvf!OIQXRE427qG2vvKeK>u)4+%NOge$$J9T41WT>c1nY>>1*_G}SfK>38F&Ga z{zK3(7#0=Tm5*R#D6QWc%H=Vc4Cy%7=x!nEwbW=sZ*62Q)|`;QMbY?Nk#Kc$42jGa)e;Ep_*)#dxtXpH@9n)M!)K7jOUOl;gUs!=l+tpk1j0zfq{i`dWd< z9@=O6`AXpveF({eo0SxeU~?tvCz)n+2{jMbhyPZ+BF3(qF4rKoD(G!4YE%xq8)94P zd3MxC&W-iC8X-6Tl7sDW#&rR21>{uKlodF^+r@i+BuJ7dACrbm24(sK*gCxNrkai! zA4MKzckAAia{hn{jHLj)C;PE1XbkhBoX37h%59o~WJ5c&@1w|X%Kv!+$*FId((Yp? zK=l(yx{MNd&wFkK))eq1YgzdU3bj;z`-v)y4b-tmWc1+A-|A+!vd2m#m{+nq9N%!L38()bN zb$X}%5vdJXejP1sQkmQP5Q5onsNewv*8{FX@(rBvdx1nL5hy?z*%i*uAglm6gj~wv z3JM(9m;V!kWRQJXo?_hP{c9NHyk(d!I|8{HsFh|k7f=F8_9WA3nMd*W!3Ui)VXSOy zNg3U0uV@w)7E0;};pTwo1{FZ!Hzsx_Z7yw}5 zt*gBS&0GRy3jwu8? z-sFQ9%zhPG6#*@<_*@^c8T{aW1IbiDPz&m-p$h55%^R@3Nccoe$^o`B_q#wc-7XN3 zgM>2Aj2&yAK;SS6~!%Qx>Gc6+(%ff_DW3PMMdxh4P=bf+cu z=Jo1)LNDdKa*lqc^xNg^2DnNUTpN(syz~wzg{UrMgupgP>Xafut_2uhy zVJL*VJRXlzh*S0&;gUer|2H4YenJco7GsXJ6g`W4PGgsU2Iz{-1?*@giz}{m-(FYuexdbxp76 zBFWDC+(NIYBi7x40eJ&bk3TB}R`C1D4*(FNqC!E{k0?)xSCfE(1bWIbi2U_l#_y0@ zOkTq~v>~VJ>=5lfAHay{L|nV&nGj582oP@K21;opnH;y?fDjqVl;}JpGkS2cku^vh zda>+VMA*qax8{kb_AW8r`T^BWlY7&9ipgbiUM$cIh3ri2C6q0}5&GimpY}(}%^oFU z+Q6p6lYKHt*a5wT!|J!zEW~`BrvV-Aj<=CYSe}4P8WN2rIqoPOxd3(_g>HTbf8QaA zNJCQV&%+=ttF@nn<)MK%q_V{lleAIIZcOuo(%?w%UT-$49mIuUQU%%Yx2GKGVEqbe@A$u&cq=|*Oq zAQB)0gXj^3jpWMx4}qx4;->3;6_M=A_bL*c?^#)b6RH;j1l!W}++qNS0rfZsn~fvz zLRskz5!ZilUw1o$vc->GJr+bP4M~epCLQNxky$xfc``Sz*6ptV>pvKoIC6&aj{4)n zRj6EUKR%o|_S%QDcklh>Jl*n=5F%3_A?KxE*-*)sD0KL*3B590y4dM+v;mDl=zo*1 zpjV`JXWXMSgh-(=fT?S{^eeFejAwO4Z@JT!0ktdzA+=<(Z{i6Y88M1@uRk-pe{^Av z>0~ce!G3h>e5&0xW@33v6yYEi>i3TiX3=1(6(LZFFjnQ11BGdXYf$siGUBtDN(Vh7 zw{G1s{X6*;I_A2CIrR&zccG`-+~0rOBD9)?jsUk?Q(&0Vr;RE}uNf zv%|4PLFH(XL%R8Ngy{<;9>g!t0_15c1tXYg8^!y~7?eji$bc+(Sn=*Uv@n@KS%|~T zPv~PQ4LM`WeZ1T018d;4(8o&u1G*m064Q#f{+$eE&3NFW!jAd_f+fye$FVq4lx4u9o^jA3nk|1cG=o?ST7IWq*Y$XG;bU z#kLy*p}*0QjP^M?*oF0l%8p>Fd)83jrXzhCZbP2gLa+)MGIE5dujWx`8#+M#k%pav z!*YQOsw1A?MM^}VKWg#si|v#@!^_lZ2k|H7)Spy&Vtj9b(xt7-Xt4MFo}_>*)IEE89zazCbS7rFTR<6$jx;#8FMp;}T@DZ8W!KgP zWC{k@xbrs~$PJ;y(C1)^&bh39wa$a!(!$2%t8ui?XKzP{LG2a4~Y?tq1fNo2j5Q78Fr zoB2Y(^Ck~ru|I>WnN`VRU0a#sLaQA+$7#J@iQvMntD>-!R^MDEd|Va>@_Z)XI_4H^ zoRNk;8=XEnN4u|g|KzP&4;5E$h`EX)j{19HqHbW1p)%;-KUex6;0x27#>GR=@2b2f z{Y=lkBL(^x=;9YHoO|5GkNTp}JkgIcv`a)zkiM{r!3md+tH{4@4KYWk{p&W(1wp+e zU2*pEwLX`w$+}krJM+*#xM+Cu%S|Vkc2mfXyZiV=o^CYsD$7tUE-Z8;R61_|jLuZ@ zF)QS~J3IrmJ55+j&Xp@yvS?;6Zu`i%AN?ryNya-oJiEEE*a_GZHRjO?SmaW#%eISxvYplw*~t;NtWdhLw4 z5K{|0u5O3FgxwwgPJWc$+uG<}*h8&kfJ5KE@Ac~CUPr=BE`wYFhrJ+TG)LK;yG8l= z*02vSD>QTr6*w(%H6#(YgzPvUJm{Lj-xC?mX@V5(!pB{t-@Crk_sHukVeP+#1;b9m zy@TT3h7$Nf}jY8nA;1h zo2(^>=s0<;C!rDHZv#0EP)JF2XeG*KJ~wmEn_PHTlj#Ot-sa-$RroW0;-9{HpYjMN z03EbTmxc^F{o0_XxO|s`^~syx^{GI?^V2j#11=rYBu0E@5~+CqBqZc;GP0gP1Khmy z$5nNIqS{67T7`1rP?GU2&%Z-RsQ zV&&moPINZZK0|*LbWPsEZ|m$V_hM)blaP>rwl@uE-ufm3#b40aV}1KNlnAmBFeaL{ zfm1^$cKn=q9{x}Otq`PB+xGgSf#O}jHz_W;mzBvOd7$AdjdHItU!GfIC-O-)ZlDo~*`ue>VApx}6l?l^0qGA-@-!N0Yhp?;H2 zJ`wlLIjA^I-%u@s7SVmAf4FNG%Gqf|T$P=&aYRk;C>WQRk5fOA{`?;;wa~)B9yk|5 ziMl9(i+NkDzF&wNmfqj8H(<3#5>e_-IQIJU;}--<9To_5pASc>GEHV9xetvMzlAg1 zcu_Xkb9=pS zo;`(k{kS#J-Mu~jVdJ49 zqgHJuD|S{^iaSrG!q=Z07rQ}MY;XpBWLk+~sx&JXSCX(xeu?GXSZ1i6HSK(uY6pd; z3c)`*7J1y(r^q(-L7I21sUp=N4alOsW zU6A6EDJr}-G%;6NM~hloU$WU$%M3Q38-qecmkBS;#p@$4i;Jp8I${j9-`tX zgx6c{)SKJ-G0@d4N1I1UB`TV9a;|Zk3TN{t$BVBSLqBgTPauRp8w&v7;>o_rqPXJ}w<%?LEQVhaa@n`>+bhPJYF88Q$9vlo69YcwIJz3h0KxDLM^eqIYy)MODaS^a?yv+f#<+O5};aSlM*KAUn*Dwh;Fjh>}SX+cb{*<5$L5+?3( z?ONYXdUU4ClIfv};E2(Kl;=~D>*F3P!O3wlr>^*>K^$LGySiFCYml>A_V?AjeIG?A zA9$BgVVep{^i?Nv2ng8DW4ozFzN%SL_? z=D+#4(&Cl+#O& zglwU{{~Pi60PEW6^u1Ttq=PAqX6v9Ot~3#zq+&T#^q?~C!-t-eM?FA?ODiXojeh1y z1D;%;as=B2mB{K;Ff}>j_PBICs~*oN=&ULWu)*4YGA@qQrXRU51LOZ9izpG)1*Ei$ zqQD{0M{C46Hekab!<4wd6nN?F68g)J)9_SM zf^&D$DzPbt9xB(>BUwl#YSvrXFWsD0-*oqaTH4eA_Q;2TyuX*1X{f0=3glAJ%Si7bG}{eZBTXli=Xaq~U}y*`Eqos# zPbD(-$=9#Wh?{=M?=*Z%?(?a9`deA6B_?^DhF}9u%fozm_?FeMx*@y$TJW){3^-WQ zzy5HI3G;1&f=q&`8;SYQs)R=S0Ej~Ngt>VG%Jrc;Of*r(fs225>BpO^t^M#A9O0`8 z*uofS;eqQ0uk_D74~G0fIlr-?fsZoOTq+|`BTvVuGd2WT?<-s3#0WJxbF!YDL+O^| zKF3};BU^i6Sy{??owUB+uV0kh+9S!a;oB5Mb^mQ}^Mq<+TdAwU_>%_TUVf*eyJEeS zH{a{xTWSz%-Z^TQRWZ`0}(k2a(F}(ccm)b zw-)-Nk~0ojenDH|Go>VvY~)Z0b*Ty|pc7mrnZ6O9^+`hN!5bo)m6}hVe$4mk1e`%x z1sx<*^CYF3nF@V6!!6&pSA!?euyG;xc%nTwOoPu+J45V{Do+x>%UWb(vsj~irUe`5)>jE`6qls?@CyH>yJ+8|#W_?7 zRW0oTpTLc*qiti)zvI~rU=Aidi(BAy|3xBbUc`305~=IyrLA-hPlm{Ez1MFaYSRl8a&DgfrOoLeYDhn}=#s-{J=@PYU30np>$R@$IA% zglKfKe)XWV)alX+%~4!S<~Q{2H1}3Hosgpp=2v?=-zL4_pEVg|O*-DWimc0LM}9zi z{x#=U%Um2-nD`hqv;52E5?Nr!5Nt1-ysv1D`66WMR(sPDiB?NeP z)MD_bvBTkVcXtZ0*sn6QxFs;`39;j0_CPnDsR=^n{!RwI{Nve=tQF|vDO*D@8k917?c^q9 zGNSv0M8jdHj;-b0^`EgkTldCYCTf+$8g9sB-i{6FP|$l;sD6R|<*P#lih|ue)xTtM zZtK~9k9U)MkB@d-u*;yz&=as0?$HmejfWf`{v$+Ifh(9Wh=WbgZLwMxyR2|H84NWE zi)t^v&p8Us2>2~RyX4&C$%h}ZsB7%uWSE7VVT$smOA zZBQHZ45c_p*;>%j?ksN6ZgTX}F^Ko#0ozbtDKm~ zU23rlRr2BftNnoqR}g)mg~psFZkuv1Y-wUv1-V_wrO(?GJ$=OE$ceYW_V|0e6!Z=6&0_axB=>Jh&x)fUyxpaKf_e$*eu?LkSH!6k9w0mx00&}+H%wX$?IJTsXx+eIh5;M)T)YQc?zC-Jfwc85k_n z`N=u2hj0u}ZI2dxXyMru(9yhTbMgGb z$Yv6%=x{GSAWpCJe$ty34Tq=y4_99qR#m%23l<`&2uPQLfPm7ClG4%*N`n%T z(kLn@5|Yy0CEZ|vz@|aE1*E(C&V}ck``o*K_^BVRz1F+lm~+fA#zaHpv;6Yf!|*hU zB7~xTgPI#Xvd(2Wsv7=U#iVj#t0T@D!^(&H)fS<*xEAwQm@Z$YWy=1}Ib)VEwxg~! zw8Befnfs|GSI*dMLhEGGhh8^!(Nd>H0WEW`daX_X^l2w6>R22XY@fj`i+~^>V)G0~ zb8X9cv7&m!n2g0_C-g0zemNmTp}h+;YQ;m4AS@5P1e*m^AEQdTxd)TH3TU&xd@-+< zhxlpeMT7`MBX_(+{n`0F9#lyGkRDV^sDKzm(w07|@SC@?r#u|EC2 z)PY1*UXztMg1iFiH7Du zXx`ci&T<`7sc@ILH-QeYZ8uKS{HTm*_Wt*GSdCdK$@vPg{CBXgFb8mO#PnBYWV||Y zd^!6uGlxLH=_(Z!v(>ogNF~F51A+WwF;~}*8`E)QcxVH5fX^S2QMq^N01eap+v3@g zy2zbT-@e|ntkn$drpwNl2KS{KJ*0P?(D9 z<$UnfYgM)U_UX4cAAh=)#2TorP9I1?Kqtk&^!v>lakajc*+a7u+oZvYZi`4l@1>)o zi8Fn#=}xNUY)4)5^pFsLKE8t{dqv$UhsGCmdardew#fM%CWC2C2^dH&Ts-1R8A*{6 zVZ6s{zo$LHka*!6VJFNuKu)9&tS&u`(@)&kFMrKuzliqNC&tt}EE ze)r9=2iLDZl3&5WeYOL!T*8gjpNk73UEzRUHhn#G`STZ+KPxCJQ&Yvm9l_}@n`P(D z?c2FJu2yk>fdN|q7+t}X&8af6 zy$vPnwObfNRbiNqhuWglwTJmgyu*HDR~_+mB~ovUOlVu^YG&>nT(l)VKP1Terg$dY zRbU*v6mW}9S~^82O#H|M=NIm@6wJ=tY(~_4jmmMMDqlT44ksTe?yVR#cZyh@BnjZAY<;|4~xHisU<0?{v%c`Xgfege(af7Cj4Y1+sJ5>V#0m1@FK;h`yVv&pNn%JV!YN3A)!P>e?6@5D^j27 zf6WtGUw^hS-4rfiPIdzX!f|n9<{SnF6Y2U7&oe`EW9EM8ot(9-4wYH1A(g{8H<_e7fFQ^L_$hRn};Uu*y=a0)L8Ozrx zXNAfHUqMoU3V1gan_U9kZ>k-e+mEZbxYj*dmo8mu&T0V1Ehq;Ky3~RB1R_ja$y8QW z{`m1@P3?*+wmA$E$ZBzFna>c?EpJOFhA>NQtn*dN!ZKPshGXpQM{d$=4ltMGF9YINWwMmUrG^EpvghNLAlCI;LGq_ecM@PBl6 z7APhNfByXW+^V#z0~bI9(mca#WmVPRskpjc`Sr(3tO-3{k!!Q2SpTM=>v-C**s$sc^t9GHMv(v2mPgMQBM55r1k>}~c za;{)XcWN6cX+XL@p0lD%(J4XOXFOr`eEQoB{R!zXOa9~()P#Tje8)zsUfAPr?nYgm z5a~y!gQE8AOR4%)r`2=ml2UlYB5nAUFMldj2IsC!h1l}uoUMiWK0T0I@23tcrO!6+xG$?F&odIW5LQmFw>#i_)qpz7w>E zy=f#IX_uvDscpup)Vz$E0%xd4d|~sG4KYx$mVWG~SNqp_d|WS#jE?pVE$tiA4pJ(z ztvBqKD{dc@ViIg~E58@LS}T0qp2wV5hKapgK2f4{HAzkkw!CNVH9N_K=C=pAp79k27;I=7MZnlcI3|

H{-Up7Q)Bcc{T%L8z#x_vF|;jI5V?wP^TN8$tc=1;M?Q ziQw=!YXRv>>ys^1pO_ba{}b81LYS*LjZbT9tCxAKM9yB!z}D~F@M{e-DB=xASAfL?kYedi)FwtwH1|DHT`smfJt z3BDJ9&DfM#BQ&zj+Wb8^eE8H|%tQAn@AufLiQ8(8NwVuWad8$Z#FtArfe5r`;YB0!kjZ_-R$w8R@ zby5zCj)a^uv77tdJ7e3@YH79@W6H`CZbX_6G?6wXHoj?O6RqeeE;wXeCa4PI#!Sg*67p^@62x>wqPp}@CO4-D*ewS0XzYU68V zWLV=xZ0hW0ZJ;b)?^0ZqC6s9zhmE~FFdT{f?+a|}fEO6&an&w(W%SMEWHZGFcB!sM zBQ`n~$MY$REJBMol7-Rcf=+U&X%-&Yz(~ueIy6A3xn(DzRC^_(+X=ZzBO?vQDyN@H zN=B@h1}Fy(!>rNig31l@>2k8t7w8=FO(c3QprOYS@1?Igdg8Y9-G`P=Y^ZkBr%LK(RH}6 zxes`vIPk}nEYHmifb9I77i;bX=ksZJ_y=#aLd(7tuXXCE`Z3A931=X-xd2rHU{Y#(! zfK(nKn;~rM>(@8hc_z#$F2f8b00jOX%cbqLl~e#|95DO(AfqXK`p5@IP)I^KBz$GM z35$wq=RRuD67mQ5##{$OGj==gQf0SnVVo<>cZ|_1uAOu4E{ux)ch3U@4irAui*INK zYFq=qe^bET(aUcP-~H`-zHVhtv6*6C3jArM4Y*Ee@9GoW2BdwdF%~F^SZ_Xh)IZ@m zby=rHHYy{7_tle6q$UGXzk+Psw~f`p`-v|DV?HXwwvx^WS3f{6bNXJj8`y?&Z7Z~y>OyBV%`Fzjb&`ze z7H3~2sHh5UlfplTp^YLW?4BPWUj}*?%fZo}Qhq$2$M~rSGqVko#?GOwn$JZH;S$YN z!8Ne0+JR-qf~}dc6?UZR1T^dDfNLZVnGu7(Fh-TfV%!L0_}?CT2-fK4Ht%r>ul&^0 z!M@TxqMbGR4(naUT%4Mh*8>w7pE2&N!k(>8e+q%sVU3nvm0ayRaslmtz~3=6Bz5Nz z)CDg>0radFf2ULGZptuge5|OrGO@FLQ_OeR@eSH=#=lJbVd4Y4pbnH;@xjNFqj(X6 zd|klfmAH7fGmKDuci2wLX3>FN@uX5)*{)D6ADP@;vBF)?S`m0y*i_J#J zZh^Q13wt?3mxYxzC%4AJEbliQkh61DT+`OOCIn=BF`Ku57k>_ zWLBMlBqYv4+Z+z_M%hc&HJENMz zrf^8N(0#~sq&h52H#Vk1veIN|lW!*`%d+b4T!MAxPqNb*Z1V&{(Jez zm#OZn?O|Ape|Yy5>}8#L+MKWf<4f=^(1ru!h1~Q3lmi1tWzvx3TwodNAV{0H-i^Px zMa8pG#cNEx!C;Q(#e70xV%4{^)XTwM)Sv5NJ>S)oN}B_rvtsOiD?~-yr1GeI2mVeT z@ZDS2RO2bFsJMs1-}~DiZr9fj10di;F6Xji#h8-S>oUx7lbxH}wQ-XttXm z{TdyuJ)2$b9C;CgCr6S8-YjbBz50Sfd}WjKKmw-npQVxMzA)l2x_kC^*ADj=AjL~# z&qFDNC~Np&2w5o}W8hk=mx8k|s=2vnc(Ai`?%Vfv*^pk&iZ1x>Z)2W^y$K7msEsCxo6IyU*^6j--yp+X@9>(*|*_`zYI=5CS_5SgLz z^jqTVAl&7F46laipAP!`c_~-WzFj+?9s&F3of|3=!Po%-j}j%e4-wQieKoDu#c6Fk z?{xRuH_wE)`?b-VZxrT+%<&4+Yu1M_Lsu@P8rNG)_oTh#CcURe8W|qP3I&eCbt(mx zQK$04!fqHc42+`@+)ZPp^e}Jo>m*Dzcvt5~KIPy%)X{1R98>^^Gdnv+nDKbOy}Vqo z+<4rt@MrlNw>DUgW8k8B6DpGyK?o{(l@Z4U+}O?qt5&k=tC&dIe;@MEFYFTTyHR)d zp9y{p3i=|Xv-AacuJ`VLorsJqq8Lnqx0sp=7LWm8!SbUYmNE^0@SM!QZ3h{yrIQrPQyHt>w!(XGOg0zMfM$npLwTR%JhG?XE0{r23OX*Z5NRW*&45x86W&OkoT zeS&n~7tgJax2Wtq#^9@}HafkaY%}@(vDonRSX@piE<_Lm4WK}T@@j>r*uU5{Flnpj zG9IoLC)>jQlw!I*^cGY4j5051UHE5(B@EE6@C)WSPC6Un5yVgO!ic5AtD2Xa( zw1UUJGd$-w9!rzQ?vE$}$9f;pBG1A32#BWeF^}$(9r){U_@U_T^HVby6?(xGN8N`y zsvvmmrx~0@%rhZ5XmSRo~W5amRQB4Ion(2{d$WY1sO+5Dw+ zY0Z#$H$(l}$!c$P3|1beik_YxKy##4lB7j6AT*H0LN+bidv@@wa84SF`G|(|H7`b6 z9_^K?YO$Xg4|~~8Xb>7;(p(Wf99;i8Rh)jJK9^QpD0`aW!l0N?VYh=~ct_Pbf+2jU z$(Ep>A~jD(No8u}EI^pRcvx#uF4&-Q?L|+;V7yT7nJ7zK^Jl!E*rQ{%o`|Vu^}nS^ zlO-j}?|)&|C@`jEk2^Y!buYR|<#F#m^?Fw3%kMs7+OML~CJmRRAIdxygVVbIpCdY2 z^z!!vp-eD&71F71-#8w$5>3I*{eY|p4!d#eZ8XrB@c#+`=`|SNE;qVhtTj}*1ixGwI8LY&W zUll#EK5I<#r=z7s{>hIYuPk0=2JB0HqNqDt`E;1UsAqh{RVp+Jr87EM3AU`r?)mU4 zu#}35@iq*!6X>8ir;)GQjg)t~BzHDl2IEcZrN5Zc^?_>X%8J}WnY!$K%gyC4TL$jd zXRHDA)OTP7E(@$nSqwQ{P~II4acTE!fZr*r>4ks2Kdx+2SZ|N!9TN5)96M9+MPnfEn{U{IogQ@Ey~%C4yV`A7A^sT**;j;~^gvc#{1m zXJC9ZlRTR7?U(hL1_9#rbcrEPxY$BfCYxW85LyM9B+PU)GlZ)z|4lC{ZhWi~Y(nA0 z&ZncIRLU2oykS~>qCw~t&hZYG%JSYXA-h0Zaq(29Ncy*$m7bOC=nJz8JG+15VEKeu zx$e#n6-#x(o8P0@pDrq8Lm|81;@n(r$ll^?udG&s+WBU|{%7M^ef(i+?EBp8{6qO4 zoP32mjy;^SG*<*S=cCh>-d}kqTy7j+vl03us6e-tR-VryhBGZqoJ&5XLzah@)(-B# z@_2T}cGT&{@fLfpW4)|9J{FlH%E)g?R z3A8eA@f*!Ps@_qi^L)^@%W6lj81DTd)5}Cb;cDU83N{&I2_r$gR{1gEY(#C8TfT1T z9$D)*DXOF1@x3L*g3PTRme^{|%Cw|QsV{P;T3}~UwQf1PS0FvG8=z5smmpY%txfND zYeuA9tB%(oLO%Hbiv2XCfb{hZ61r97fzJ3}<>11@UUwkld+q6E$o*AB8Nav+1ZA_P!Ab1=)bifS8o4$f)QY!zKwQ}W83~pYkS#oV z88|O<`E6whc<$Q&&K_K+Q~yq{(EIXyvdLi1y~Q?M>Gq?oL9#eJg2~?wKQ%IAAk|<* z?Uqz{f{L1dDXO*iIh>tk_Q^jfsq%F_L`C|Y!UrB*#9-D~p$g=4>_Nt}aLhD!N}5fP z&80NEDqEUQ0nJ;@I{tkPLL4{;h5)R&jmGIqHXe?SHrWA@mYtx8bK{{&cPMr?Q|>Od zau~MzcH#a4W&pQ<+v+DfM;Lp_CV3fJ%K9A2W!QNQvx)}C@7#a-B<1_w{J2=Uh^HL%=R-HAjpm`sC z;Wct!dn6QIU?6<`f&KV)ZPeoR8!8O)#SD3OwSxEG2(>H3v8dz^H!MkLV1euQk+ZK?ywpeM26FidQ5 z-Cb_c1NU1^m~bO%(rSYIZiBtcqjPg%IUepugI<0RVXEezu26YBKd=}kampdNaSI&B zg$%NRdNIjA%7McNVbA$nMbpKB8lia+w1%jjLarGsAIG^ejkie_SM$~79?u$6qNQ1q z#)l@wRTOd6&s*Q&Ta&8{+WOty5_4D)ODS5}=h&joiZR-i3IgD?b>f^j!~N@oD=)eYdq&7TL$( z!`gw!0$N?plL8hA(jo-cI)CpEpSLzQ!~S041}`iuXRdyK^wi_|salFI1&r~|5U;-c ziryUWdiQJ4WuWCRB!F}U(wloi27Os73`oxEoo?v{jDv>)4XPweAXx2%Q>yD1NN8O^ zz_KA{7D%+dy$)~hsCqfbVh% z6Xu5Q@Bd;ZuyG+L(t}$Nxdq1xIIsHP33u^dD=|X9cp)q&ql0LqG9?YQDLvJ+X$cXL zkXyy%TBDS(V6v`bOJO|gdzf$4VizD%a}EYQ>OYPS`nP9~#ZbWgjW~xThVQbn3Og-- z=~qguRld_F+|2i4F7P1XFQbO9kg)EOibZsQjxw2*{p@16eZrYU|!>u|a;}-9e{UiSm z1dYv~+t<8$>0eHI-@RK=yST>o|>D(Vi3S`3`#XbVF(J`{Cps)i~4~-DF}X`D}_15_2Goc?=<@H`f7b@J&O>7QgnEF zAzO^IoSVIm-3~;E+6gkfH3z#9P?Q@H?#(rF#(CoodtFBdOc^UPSvYNA1a6Az-h;IO zh;s$KM%yDw!Mo$0I>u%?UgcHVH*fmAx$rrj^~cY~RyLk53L}*&HOHHAm!^KaGn_?@ zoUm!yGGNd%G834X&~!E??2%STAnFuyX6V#7)7C0A^8Lcsi`td&Z_u&H zVvEL4P8L;F^;>>{&=*bvqDe#CEtMGpPO*}ofpu>`|0RYHFZHqjz=0|_W(__7>#Y3wJglnXQ7ufzwe)}{Dk^f z5HU`8%W&nE!gzj_w)=h(zUYhL+u&r(Kpo!GBko?4=kx2=$4LF65ZPjRzD>oe8wYde z)TfUhN2auP;14T0N;J6SwyS9bqT~YflApeYtUUky!8Og7nMwSicX)1|jAJRnJ|P?F z3GV3ufL9)uZ1sEmrPQ~`3Ecwhd)P!X9B-O?(Vy`5!)Brnv&NNsDhlGS@h3-wOOY$> zHP5k|);y15`x(0BI$5I1AuL<6skhsa*=&_YvDCV|4&CC+lKJi#Y`>3Es|&I)`fmwg zP!w5^ys@nTK60|pwT~7pAK0gV<>2{h5xVo}Bh$iy;kG$U>+o_Q_K=XY3zmj5tmJVG z3*y!|5E26RM?a@^2Q``vu8?7_DL6f|Tv54iIqlPHX*^CV_1d4Rz)o9&`^X5fOL6W8 z8{Gsj8s@Pc#TNxK_*c;R0co<2|85BbKbu~Uy=+9Dhtl|_TB8P>w8%EkWEmT)CjeQE z*{L1<$=*m99oU!Hn{_@9@}V|q?XUY18YAR^`UK}YoUIVtLhQq~6QD7BSP~MDV^BQS z39*uj0wo_CrUyrtmnK^#N|q;aKGrzR$djrGPb$1 zt=t6$3D(sC2?QfN#JbNijx&z~Py4VUQGLU2Mn z90~ypAB;$LcL3xTnPtBrS}l>-3K19B{*|MXstpVgIPFhU*Nw;mR$7WrAylsvJ5LAM zU;ff7w}5ql2s7ca8No8qQ|vs#mS@%Q@9W*_DEdyVo_B-FmrLeByW%Y?fRbcD)r+g z{i@u1j_U^p&h$aCd@46@jym$Db#)1}HU(-)qYn4|1`LSdYE}F%7sE6wx@0uK6lOdm zDr=nfj85hxS9^Aw8M|CuNuBpe3PM2`(*698f0g9>oaZAdP;9zT1-UVoMI`)+q=rl1Vk}@I<g+^8PPyAEse_AmG%HYzI~*FN zvP;CxncKY^2PgY%Z;rHC(z4THs>N=?8ss!m9L2Lg`{rgBO6^;%kG68_c~%QQzh55m znHWN^;6jG3f&(y4B(|}w)Xp6#_g@(yv*37{?5Zqzl;gmvdsjVQ)h1SC0%iO$po7tT zM<>qmuD)aGV7cO@%X|mfm^8623%P-8+v^q2xrFpQwm)8Dwl}LgE(#xq9r%h*b4T5C zBd4;jN?0XHN?$rfCSIxOCobGYUBe+Hj+6-e67S~j{Pd|vp3x(Ja-qYPlB^aY*;0Jb z0=!O?wY%=U;Wt3^#wFvv#{-4r{8eaz($d!EzB^WJI3JJ?^uUQLgFncNqqNUPn%m;n9zbn5ku&t z97uvT%!a(%q}Y0SnQMB%@zs%JyHMt*A9kM|L@E_ zHIqBU_wW-u=tDxZExs^Lnpz-+j3v0=DGm;I*LPN~QwX_FC6g3BV7o~?aWKlv%E}QH z`0E$@@Wrn8K|!6+>CPXzCdge?f>vaoKVLU;Ot%6*6I(g1Qj9K7keaL%HjyS^L?T5A za|+ftA5fXOA72T`3Mn#Dd1=J_R^8E$Zj&Djk>zp7bZza3 zTioz(3jC0JZ>O&+;IGYRwDA@>Z!T{mAlaLypz+}Sx zSu(Jb-I z%9u&_pX$>$z&PS6=RgbW1~s;e`43Ra7paBMpnsD9Cl;FXXc~C{TPcv^{#)uVgMk*y zl>NC9t@Ost+fd-O_~>ruSgQH;;XTc+N!Eh;an~un^&YE1>Y}1m6uYSz7Fm7K!I6JB zbDW!#uyDClT((_8(W2$&2zqvo#*R3{na6WiE|$;XrqF#0)Psr&hHY+Fn+j1I2&aqCgoY8S~lu#bd;`@SIr! zd;5?nL2yVDm5u21moMrRmlnRs&P~%@!7XvdPRqyuM(0L`O8wWx=FfljUv7-rmtDxo z#Gh{R_rZ`B`!S>|jKk;ENM!MvJoP<72()S1+(ZhG`*YI_{{FJFmieBZ!r#B6D+c(J zb1}7zRLgE13PNob+7=XRW~bpw1>q(Ln#hTZ$HYgL17A!*;dEn^z-~s=Z}RC`aPVuR z_m9V{7~Ww*TadGiL`6a=6Lz+0dT1i}8b z{VKGM_5xb$^pr0Z#LJ0MtOnNZ`xh`Dll>1tlg|(L(Jg6~Az~5KZHr~Rf^*}M$Z>(vfM<1k?QVShpLfK$FYvD2I5_w- z`Do*C2}e>>kHxg(=I>u`w<_I9Z>%T*OD3H86x#V_pG6GmKI7JanFKYp7Fkn{t~9fe5hJO)E2gW^u@dxhx-9a~G#T4`-X|J* zddqqXqZFh@sv8nZPxTmztGN{wKqm#N_X|&>ETJf%$0DWCBd9W}jR5@&XgmvdF)<5s z296b?{!oyzXH^Uy5{RDleMfP&P?102=3-gwKx$rnNg&>=hmp)8a#m3205vu7%zA+s zT9+v}2?rB_A`~%VP`Qm%HtBVr3pcmP?L>UJ^*Mpfw8OzYB8P;Ctu6;Y^E_ahnsP4N zKf>~PRC8K&m&K~@QmXBInt;b)zH6~466id5;CzK126)}}Ixz!;RWoxuNoVh{B=q9+ z3bL6tj<;}jIXr8cJBc$CK2-27g^H`I@|u{_Kf$vX`vIM7Au)I4@01)7>ES_HUQli^ z%+e1z9_|X3hEgh7KvL1>U-kAHpXi#<7wI zJsu?acUkHoyh`votpv|8?_!Ye(jt2SGCE{xV3PRXG+T6<8n!Az!o>hGZgYx$JSHY_ z$zOUpuq)6>l}Y9orl)_t5?JdnQa({?d4Es_V|a2ZY20onn~qKPN$_Bu-aP|WZX0z5 z`FC(^{B1Tu$1&o(9Sf~%4J!Fv9V!BT{i|L_J`a#dWluu1wlUepnm{-WqhVzh7<1C= zzbh6iq{Zo<#&6fKJvv=oT4!1wB`1|3r=L3vEipg|u1M36CaW=Xqx1&jON6h5z8yiiSHI7BdK9s|n%WjDI zM=F$Q6ith|&SO2uXfg@=|3H5GNzN4S!9j)^W^DCVsi^2a(I1_548~|nPM@x zP6HL*9Rbd$Kqd}%^ z;y)3g_iN*H-Fod*BCoCke@)ZmGskB=QQD z!lxCkvJ&skkNAmLjv}i+;*OBq3>P0C_bLkUX@skkL{o;g#Z;?9Ed<*ic#VceDZaQK zaw4u}v6vjB@}U`@l+t^R5z^uvO@bSS;h3`j4{0%rH@i_;PXu!S1$>W=FLkQW|4DGd z?X3NfVIFF?j27yegOgJ~(_6@ZnL)zElzaNEYY%spNg!9`mFl8Zf9?jt+fa)e1fFbb zYqkYfaS@r@ud6HHPxdC*(rD^Wc1J)xE1g0^^=c9w2(sEOfTqhut$zZeIG}DKhy>@H z=+>itDjSBhTvcwX8*H_yj$pt=gF-cADxC{$KvW~%u%huISF;vIYHYGjeaodaBCEWi zaVVV2VK>cf;ZAq)TpUNZA_uMLJ?-00t0i><`y1-?&ml1*FXmetfU!1HyiRB`OKxOD zR+9>@4&y)YFO|?PbgTm)PdrMJY=UYnh?bLLXksN5ei6+cH8(d`ycd*(vl5=*r5&}W z4UAvU3(9|iIss$|6wA>bMd2{J)imoYkY=nM8&aX>eUcsk`?9t{&4|-7ZzlQk3NxFJ zeb_m-X=$4)RyqtM{nHLpf^xBkH1x;cz8!p@J>Uu?%xddXsPtbRm4jx!@A0TS5x^Dt z7UXbna6BhvNyP&m>B>B>YXAUCX%}o^;oPI4ZVSBpRa&&AaQb+nNF-`JLDF-c*8I4s4S(cOGfC$#S-qvE1V-h{-b`V*IGgY z8`kM*0B5jbapFUW&a{ffJLCH~KG&b*xym}0-V)JoXqpJ874pzt9%VE%dKVc9U!tm- znehz*QV`>O!hT@v2Vq(a_n5R~yf6z9OObN^ZnxA5zgA*n1&8?7%*@SW4GgcooS+0x z)J$XcYX?|sK*kvQQmTZsP8B4sStlFu#l>DGCa|AV!UMu-pK11rjddy4*+z_xM#*Q+ z8J&f-=2lma3RUj@_xt zJHC2U?=&P?_4W17ImpQrmHWc^eG!AZ=hy2<*1M0%1qWw1Q}H1~5F+hq%J?pmC$C&T zm}`Ya% zfC}H`z7pM9N&reu@P0vf8>_A6oGb=qmEESFF54|?f`32-?b$_IlAZr!P>n7T}DA6vgPmR zY)XW)t9sSzOrTH3E7RREGK!-yoY^_uXnW&2hZ*-RAE5vSMWV}>ovrw6Cp+2#A@R#* zQc}C)w5rNOAUZRnDSbzVVWf*QKv?+C&VbFHh-(S2^Xe|w#`aADdQFS9Oa#f{zwPrV zHDiKOOH+wPA^s)+Z6bs=wQ0iYZ~{5W35m?MMHzNce~xB?*`0EH(zraNp5M1r;2&vF zCR~}mNp9dZ)?d#)NzIzzEzaDv^s~5Vt<6M7-*acCr9M~l^nFp-{Nc_5Wb3f12j}P7 z7J$lL`5};`Dw}kX_|~A}5|(LXC_`sUD+qQ7O}^&%xu#5$GANRy+?tJ~kCcxH(L zZ|8Y379}j6ZD>m62^FYwR}#R;!0^ig8f`+hhsII_j0VVvR{w~c9UywOigu`gS&5Id zJ`z5w(VO0Vu?Mqfuv8Y>+{HD-)Ysp?)o^@J&hboGI3TDmhOZcNAybl!O*^&YUcR;? zl*bz48b;cpXsIXjhsfV0kH0txr4ZJAF~N#Lt!`6MBI|yM9SEocgLM%GRUfbIIIeGi zQ(%~S+U&-3V@l9Rt>WK8s%lyb+;<;;k)vglc3*$Jzpa8ORCYG@h~brNG&K?^`Xy59 zoIz{ej|2?jT0?Qgp-y;L@j-ead-n6<@-#hWGsrZ|bU~(3mdh~MI8g3eU=svHD+r3h z7NC7Q=id(oAg!=5Zvv+sSEAbt84r0yUv9)HTA1AF;a1@DO>0zM$;<)$M2UmND7*+% z0Pvc1LXNfNqDk*+6HI3)$P|WRu%Q`tTP3n8N84Iv^U&q`A;2XL)K<;VG94aXU49nhhdan(Wh&U68 zxM%ZSzy98Ky7+vcome!p2UhE+Pr08y-2;vv*YgCGh!>aN`1nAsQ{5WLhD8H?mLY4r ztw;#LMOQFlb#;Eb;3Eq-#rf^FV=RQ=QmUH$y;3!gSA>KE$Lg z$u=Qp-J5?|t-aQrA_ZDO#P>Sd0?d<!N<0(M3PnWE>T2WJ{)dw1P6GNNDh(!g7)s|mAWr#LxT72Cn8 z1$yUCU-^-8uZtIHX~@6;;q18cY_#H~*wqbi;zFDTPYz3ihSbm>en80GlibC3kYXee zv>rj6AtBnIrsh(9@>J*|!`s0UX=Ae-oHM8#P+Q#NCS9{g>GdwNde>Igw*+X;fT0^c@vRUUy2A{E?=GRFD8RardwQA|o3r^r3 zl9&0{ysd@tE?d2J{ZKHCvygzY-U*v8xyO80b-GZQ>pb^e-u?WXshXr?%Up3* z+UHF}wi@Ul#2pEwYT>|9&;QNu%G*=;J{za{EhL9RyMsmn+;C$!fCc(}_y8>6kn-ix zm$oZa2id0R7mt`Uu&_GBrSO~^y4Ma{9XDs7ldv0SRb&c#qWVy19~LG_Ci&?r5c`nZ zagh>|k?Favg5T7doQTV*yNkH^6S!-u3B)23MiX(!gw_?n1^E~d;IaN41L-`1d`jG) zy8{0aP)l|EgMt7#@F)6EK3>Eu0%(?Lr*))7l8&=f7|T@#YD4`?}^+ky*=eh78D;tk#sZT()K- z%La$)73LTtUzM8GwLosU`&*rY+ug6*JVYbNef{Oq3~W*FGoQibgaEU+GVT#Kw&-1(z;DO<@)$xGK*h8fO>nY zIXi_~>Bi2=)SUBN&}XO|C3scM`ciT+$w4dMXi|#zNXmbd@l>_nwNx`(R)%%h`~^FO zUdVB@$Cz;Xa3RZpIJBPc-uv*Ii-0(e{=vXQq#& zH`3GT2jH%0cbawp3@2YMXk)B99bU8g^cC~~^b8r8vnZ$uYLt2HgqQEeaBEvxuv5gb zyA+;0W5ybUC~E6(zE!4K6RHEHk`I8^{?nzRBH7>19jy{P4;T7!%IA~=0t0cev45ZU zXaJ56|9>|&rydZBU$9LD`#w$Nt~X~}1IlR788=)qH?Xg1KhizEZGy{ANB4Z}a86By zL*dJP*9HHA)-?I{zaIgmQ+IMrv3LwX5}@C69WjPn1iL?eETCp4;LL>i-{0vCaKtC} zh9Qv)%E{Tf(Vsu>{H|y|jAJGUMs4PCx5e>DXnlh^RnBvb0;tA(t=dIc33h*Y2IGY!j zl!R5TdfqGm-jM7$zWyKbHR6-&ON0KQjQ;*t1KY3lcF;YYFPj?nYol&WS46N3hy0(1dwv99?y3xCS$)uU79)6AQ~Ln0{%RTMKUGJhRUa}3K2-k zr>EEx3<&n{Fg@$Yj*We@HP@VdQUPeo{WeDt3`7(-nJl zBeMJ7G%78*HRJ;IZadyRsfPbLY|k?|>5G+^cpbDFAU=Z@e1d}R`|Ik_kvGv|Q}4RP z{3Bxlzv&}*W=-C@O&xC9Rv*_p4BbwaG)KpBhX}$_uqR}^7=YfMEf453fI(q&Sb(I0 zbc99Pv%Rwud=W;@?ie7l`24SDT_As;6r8u1jer1S_O4hOVAqiv?QVv0Z{p(ChHE|d zzgi$qE6o@EB)!+=FXe>Q+42Y2me1CIUYL~#4e?VB-n`!gaqJMsSLLA(s$1$Hu$n8* zHF(X}|We=gl|ORz~I$y!62|j|C5!t0G~n)C^h#)BjJ! z_Vqmhdg?`A>}!x;q-4y+F zwDVXkx!W+y+I+JW#x8f+q5JDQf735=QZZpi;zxE1J!xPME7OXOjt*p(KK|s0DxjJJ zeY1|oKB&(bijkT!(ylJ9z^Ak7FPR9zc~cDhpfjMk%=7^P^!TJ-h-DrRZSvZm^wJ|X zJvb}mVGVdk(4v1($G?94wuTx&SW@EehbvP6mC;bq57q!Zt)L*kjui=&$U@tYGw9&l zs^EtJ4`Yk6&sDg>=pPUuY`?3gHm0g30!jG_m_j=oO6SC_jnsdaygUH`nNp1}mY{n?Y{C0u$G(SHDu(iG2 zX3$*xlknueb8QMG*7Bae!KHpGjqICLB84k@H;H2T9JA$sm|T_mpAu5X68^2h zH3-8}RU(>HK#L0oo`PJgMM?bm=0~Ty<@JO&M^7q2i zO>YP2<{(tiEOvdTy0JUxbC@ znLai-I68*uv&5vCNXfPaUVC3*cmG4Wq%ON^Ez+63%He}centk5VS^tS9rmzh|7m#l z=VhHyh_a3CFb)njMg`8Y`Dm7!e-MFF0mCo)2{~aBr#E9(XGtC@?>)wb8p3uRGyS(d zPff5z>uIV!{%SivS7cZ8>(J9fb*u5BTqW-D^R^I^WJrYv)!ZEYy5}c zqbrbuBF zFOFfnbRE`qrh9w5zvA8z|M_}1@#jY}scC@CJbv>|Ok{6n!HC}b_S@0Ux-M5)&J3pt&L+LLY1pVbkJ2ynyy_mXMfON>Z}U^CTKyxj?@boGm$@ z_kVIy3b&(PH|VI5#Oh`BI$9yP_O@ueN4F&<4!C&dl_)Aan%MPs`z-fSd z5-q90qwQ`vH0-_vZ>8@SI9{qf$o2R4KgNZ)1?&|hB&ha~`P4lykm13>bcdPYdHY|r ziZKQO*LS0xam>Rb(QbHB9fvs-113Jo5Exqr<}+1)c5f ziwg^rJUGIrXXNf5JxOG_wGke)s1O+*O6W4%Gy7%!XlQS!`Z52|o^{Sd|Ik8fU0_GJ zi0ljZw!+|)NLDF$t)04IX_etI#nn%qRXm ze5ShSj{;k z;z^D1tRlltg|BWzM1+#!2A}-uDonSs1DJUjZ`o5`J$6Q>lA7h6H28m`Rrda&9&48; zSd8bad?az6V&uG7$$NErQ0UgGukc`w?q|ILXAIxMrxuR6j##Oyl~ED=XOS+;pF@>x zZl{f#j}_q@Nfu7Pi0n_CPMdf#7N4(f7HLao>tQ|9r}T46&2=0To*e!{zpGfDm#vQe z5SU!Y)b9xhc^;{j{l=h)AgO7u!KN7?RVN-Et*XN>ze(j zJtp_f?FTe7rrMShuN7j^LOAvtqNWw!QnD3)O3+}{uW?UuKx<)qI%90(6jyZNu~tuy zjcoa2h3K$o@vCqZDTUp?6bA?Xv|4Mm2_7PJ9JOs1;srhKwaon=s@^iL%Jumk#sVop zQ4kdA7Le|imXdCj*dQg{poB_yw^GvGp`vt2H`3C*Y531R-{14%xj*Lx;sN)4#l%`` zX0U>=z(#g%lRYB)+CIugc{&dZv2oU|7c`%FW-k}hw#;L+5vh%-Bl5d?U6U$PE& zDe%L;shW>wa%U^jT>p`iv@j=DR&G9eGadhCIM&)ScZh30%S}$%^$|D9h->?L)a)hI z)!F)Y-$$Z)%C?4tVTM;$R@T=Gz{=4s%plleM0L{x9jRa$;>ZSMf<8BC&5SLZ>a4YQ z%mgNzjQAv!?y}kOTh6Z2k(Tp~A32q6TfLs}pVrb+%as|@Bwp}ni(Vi3*4tAtG)K^I zpVbt80*Sd~qVeyrPA=|Xu+zp~vX3EQ)DeL~T3d{IXF zvpIqZn)l0W>;uhFPjpUf(S}q(mpC0-{)e1JAt6XjkUhKw) zD?6VrR~MDqBkk~R`IpBj`Y11e<3L7j{+c^rQGd?9bul4w)%K`-yHtR8{3uqK$cAbX z+2|EX9R3mgqrK_*Se|753TxUY>12VsZVmZag#*n7WzPoL#+k2Mec1lBs)%e%ID5)+ zpN{%B8QCL6hULBc&x`aZ`1q(9*Hr6$Z?JJt3Au#_5SYi^RxbSfoD$^= zm9IfxO{ndAoeIv%)!kjwTCkG9lR44TY5qd8| zlcf|dt6JthmLEh6xDI5$EFXRwZSY^DK@xtPJUT5ce{rittqPnC^?Hv^ywk~~O`5UN z4{$ck%sLsF#gXFNmild}*UVo}m>jZNjPWSb<0Bm%w$}d%dJf?9ZpHZFFNd=A87&x% zxr?%#*nYE%s)ZvWxAn;Aw^Fmc)591wfia`@#c_i1IBw?-JR(Is#{H)os=-g#mOl~O z(-@L23;ZfY6hpK%b~I;m)kqR2R)>?;BFN|NBDARkZYABcqqn!WAu&)V?512v2+twJg@c9cz+2AXus;=y7D`Xa|(kApnDf;vTg?*I2dlPnr$7*k_5oEKAD z>pEJbFW~SmePI5Y3x)5HsQ0;8EX$G-8DC&v`{^MyXSAQ+t1MO6!1sA^X=&c812oHV z>Pg&I{19bBU*JI&xf9;O-_cD~IkQ_<8!e7q+|k6D7ZA%jy>vmnACneV#jQQxI$Pm| zaVlxUpj7@D&CB=!3qK8^4^^+a60PpvkP&as_s zNy>(MTmpeJHyf$Y8yo>+TtP3bA2@bDoUxaJXUBgHbu{suDG|rQAY$br(lg=9o=}{7 zqF=!;2yk0A=L!kK!@NyDtNZe0do+xU@N-&4 zMNM2?`zw7=Bs|mPDSsPCvxw^IYJk!eE$(??#JFb3#^oeG{=j zO!@7S8f87xEdjS*@$5#HJP2ne^pk(PR1?nNaL`2cr<%P*7#SI9X|uZ>HJHM*N4Th| zpDQW#O{1Ph_OAfO>ovHb-c8;9YpSp($$w8?Vc~S8h32D2Ng#UG==qjGn=e_9MjwWC zGd%d;zrY5V4u^)uxCYJ$R27UYEVO>mq!kEad0|~^n7@i zRApKBX-{EiazdL!lpuojURUn4DuKYvxlhxrQ7_!&Z)YQM@PKtg!k?7noqGW$fCfw~QdPa+isrrw=N`PZz z(W}{kuhj8H?G!KiM6OU3snDu=@5pS?5hR^3BI*2OB~6`BwCn-&dJysUf_9Z6m&Xe* zOovH?{Qcv2jBk9G#}cM!tD6cR@#qW;$nbo9+LyW(`zaz*E=ncGqiRt9oM=;fd%KU% z_2pXF+Kdc}%|OlvbOOdJ-g}3R4yI&fs#%PGCq_erHf{Dc0t9$Avn#!pj|RQd?F_08 zx*d=ip3d$Q4?2Ef-f=&4czND8?ESqxIc;H&({Sc zVVnvR0xZNUn7IIdzJZQYQmr}bo#wl?G0tf?oFj_5$t-P) zy8O=N89wC3Z-cT*2vLu37DM=(qm-D@t_J$nfn3=KX(}X$HG}n7D>F@MRh?5St6yO3A_v3Y8k0oZ z`?$|q5NyBk7$)T9v%x9s{hrFhYF}tmn_YT^Er-86*bKbS98-QL&m*~p2XX`CFlt+( znR$44goTA!ShkRxliM?m`8&L**ao^_AN(u7A~l<;DTTloy2n>mI>MP}QboW>s8CV? zENtut-a$@LA5RM%a3?r<%%%$z4bYJcn zL=XspN*($lY8D%i@>uJ^xV16X-@krs%rw^PnGlF4u4Cy`;;(TL{Noxl|D%^9X;0c{ z%756L%anfw0T`KZulIr0`CpZpuXHDP_WNW)gw0<28b1a>LTxiu^Ji$t;qUhkv9YmB zSMz8)stY*m0!cS-MKEc~DBh8kJB|KsZb5*J{l--4Bi>5y6;qtZCHaRB5SuMzBoN!a zBUjDo3_9TT@oArHq2^1-%=CELJ(F5f8?oJ_x>xu)Xu;6maiO&=-_e$$qr)U^ena~n zi5!G?+S+Lv?@X*KQ%G%{S!WAFgQzB}qoSjgsz-l(sHuy@B>4P6?Duxdc=_VaT;E3% zDV9;58~dIQ2&G=W6xRQiK+MKNS5#y?|0^&K1MR7_6YE!f$ZtF$JLXy(l>yf!{oH#( z`Z;XBiAU4>9MjeHe+{nCfL@a&w)>+oq@?kcua+T!I8#g(o@XM<5Y_s_VHTM=L_I;H z*76c%6&M#_jGZl527v1l67mGPpa(vmD3dEGKSu z%kiE(31^Ea4d%|>uuX_?4-zrqKWb(;8$2(!X{H;zsSXW#zamUR-+>K*dj(R2^Zeqb zH#(P~Fa&m+!NB)hfEdvrGS+kx=kT!8(HjQ8O;;)GS6L;NKPo2^4yuo^y^wM{)%W*O z;)DrgKx#m}@XXB262E|1Zsp{80-cEBk;)&2A$DIJEJ^|A$bRKZ&7eRTwfg~qlmbrp z7#Mq9tn4j0)Ab#!%^uwe6_qg5#<(LgY{C2LBIi6HB2paOYbdF4@8dFSec}hEh$sft zng@grBKnnK=dR!ASnla;prBLQ{i!hII47M+kZJfaLiu<@hX45>MRSPC3GyrB_X-*RC12pzpPj#dKDo*84VvA1^=Z z87T6(Qy{{?*6$gfRXYrORL-Fz^;%^vPsHnJve9a$;1*zp3iU719(Sp8 zDOuWQQn*Z9>d`IfjvT%tJ?QmEE_($#ry&RH0juB4$4#2n$4ZUBo+Syo!aT2{dqLN@ zDrUCv_cLit!nccD)9VIY?Y*2@BKp8Q$k53pF(D`NHoIUHaYH zijILXTjv<$L_sZtdM6;*Ehd7ag^h`q(+;9{DvZ zwInOGzcTFbcUMQoob8c)YWr$;W?^9P>da_sWmx4}r|CZ~0vBHA!*^Rc%Ex=1hm&-D zzZQuE;|4|8*lb~71*9oem6hs_R?3av4AElnkQ zujGZs@J())TD_@;_i|AE6*mROwx8rld{E>A3B?p0r*`Sj;)d5TZ#5eSf1ylBI4Cp0 z8kUk7H`GH@7L^am=#HAoq^hvEhL-ylZa?v}S7@7zd1~NM$3Dq^;{lX9x@`Z=SE&#) z!(_WL9q01OEo4h@Erig{=NBs%8)qAuM`RPDvf1by^Yh3ZCCgB6?#<8eeZdZD|{uwRJF6(lK{77Vm{fdU8)bF$P zdw1@{OlU>mm$^IF-jHb>$O5NJw}=h4#fko8Vqw0Mbcyi6u99)V*l!fV6de^1&;h>6 zTvKHjqEKFDfJWsy$OV!-kl26V>m|4P8qsdyipuN;p&VuM$6OohDlJXI_{ZCcJ^5p2 z&x^(zI=`kNxMm(ja%dw@x>jNyzNqXLNJ_&Ipx}GaC8K&S@X1~uIlXC)3SOcW`ZE*H zdkN5rVI85@@?^D*rfK*Hb5fNhi)DZM6N18F&?*iLz;r-DiYy%+9q`qCo{V1S6)^F+ z=FI{f;OI0o#E%};*Vj)@dY==iNlCRr#nNn}JKefIsiYNponv(=zDjNBooN99hi#9g z)6Kv3rODHTOBms_U%0nCtv2eMGGA8xg8kX@tWOmru;1&h)GRj1ZU7bn>Rw=SD(>`K zF?%?o-wH1nJ~}wl3Aua4x}*rnJ8dl{^b-+8XUAR zuD4U%>>}mlOvj{w|LD;@BIzm{O}qPXiJgOHrW)Va%FDro?^JNOzhIwy(fT{$^X)PR z1|`PsdJLErckXz3$^=p6__%T3lgq}3OIi)_InC8P34$j^*Xo_4rt53}*vVn>M9nlN z34~T5+kVE=J0DrwomPAyph4MV<40_7Zf@SAHv{ziXNCOBHC)*)!>f&l|Ab{2e6b;M zDkv(+Rfa-Q!Ca#_W3yK>XTqNBZ^uSQ^a}vnG&R%_f9YW;jhM(%kEl49eCuASK;q4f zxA%I>*G`}4ESy8Kru8QlbQ(cZQ+w(OowvmD2^Dl#F?FjgCtfOz)kzL&Fei@WWo5Oy z2N^cHo>!0`1sv}VUSICm47NL0WfT(fgs>FGtiaXF(=1LsN3$D4`;>9v{<5wzZOn1g zY_xLW#}%bwa#Oa<6;Pii<;OTqy+vFuf^V)l<11eJ5UGHL_81A+?CUoD_DNhQ@&As~m|(kKPzbboT_r z5gp}a-I%E1y+_hX+W-B%pPvWL2@X#Ce*rCmQ`@C&(Nd;^Iy*FU>S1J*to%w%^W~{F zhH0|ve)~L7A24zlP4us9~rah+g`cW{5K_?5UDiDEAc~Tn#sV#BDO0 zt&%*eVy0$h4Ki$_Gijwm&{88h6D-@DVERu@O$}teD=>SHzbl%}db&{IK5^mO;PcMa zj&h%TTXQ&i9Q{du7Y2Uha&5k;CE+UA7OIJpTygT=#B)Edcwepb?%g|IblehpCN)v! ziCZ$`O)2*YBOTXWuFC|>wbU{Pu1MK0?^q&u-MN*NR{t_S4bDkAC}`VmqH4a(t9bMi z`en^zYEpcqo;QaUGrsKKLs0l&9OXOpBMM>xzB8O=42g|KzyYUJz&qD z)gph#s)^aa(o#m|b{~HctUQ`BVVGCyb*pRjCR>qYKp6#w9$_N5x&Sc6YKj;vuUEk9 zJd%;Sb*~LF>?KcmS6kC-*(RAC+pH}2y_?Or8G_o|<6)&eRTyTU1de{=Rdnjzu*qwG z1e^%3s!dE8B^w%Au8z#H`m$I(KH5@^_tj-2jr@Ha@=4-^Ym^RaaO(CsUd*#=3 zhp_pQH_x#vTV1(uo_$N`l;@vMf#$lmiu0*UAEq$N(!+>IxCrqVJ>+Q=?4x1h&EDQF zE-tPgZ|0g4XND^No=r;WDgT_ClG1%ycAaDG-aQwRl*OVKOLIT0WaZ{(9^Y|yPx!jq zYZ6UM8l9NbwK1i!b`*rIErJn|%AFV~{`he_nRoO;6KVn9>rplB&Asr4wVFk7A;-vq zBfCz2Ed6XtsNNGuPW`zl#P*yJp~gM1bZ(+@a!EB_Q|ZV6+p)E zl^%UA_K4O(b#fF<07Y9P$Y1ctgxgnJGQ)Je()c~njpT*+ufz!}E%;6>Misi6ch3i^ zoU9}Xu$cCdv%yYZUzT`ylSQ83N$S6jTEcGSMZy%7kLniTx<|Ex&5?&|5PWaXHY?Rj z^;54k>MJPY*Lo!A)r#MqE#Or*>SeMGu~F5Ykqv*V=oX@FW#!5=dr$n}u$DQQSA(B5 z+DpE6hh=~{cY*zgi17@>{{5)h>Q(j57HRvqlx^FrcPI(6QZ9%H%MXZ|aUPFL*8NN5 zWLqtMuP31yc4sm!U%zMnX`#h7!uvcy5>lOR*TL$7@z(lR=5PUEYdK7Q&3@jt{CV*M zcST^$cr1XdIiBo8#G&B|{jAnPpsB zxyAk6>TgF0L1gUbe?guY{3nXZTgJ1xk&3@t+w?y3H(&J&D?Y5UUCL%u^ev)m1&X}r zbiuiKdQvLv;oz(z8l8AhNSJmNQqxJ9-|g4Kne$){rd==O1>C<^Ke>VFr(n2j4@l3r z(a2a*nfbT{9e2gA$uQ&zC!#ehRLC9k)=#*s;pI8!PBu|fE5HyMFBzB4Qg#iTG?%Ha zWez>^l{!RtlO_aVh4HraW||b1zXhh$6A#gw#3EY{bWy1P<6)aZ5yu9b-_}2Uwh7CM za{&Sb!t_HMUKknIkItWoC!w949da)eZwq?G+iwzP>oa;7pbmvSbg*f(Ur}^(I|bm9 zOvFo1SvmdK-4L+p`QO;u#W1917N#H`T=|JhUfoJcPHup)N1zkwSX0iA9l^&FIFuzD zC*jExv3&1aZWMPzn(~X0^&ax%i@`K8fzTr^wfbkQ!VLS12@hxDvV(2cjSNSWribGt zj(Up?n#VoE5zXa(ety_QEC;KDpPWL{Z=zcrsCi<6(C}sgYh!dGuR@aSA>Wh(wJ2sM{8;>1VK%W$!{9RV|)`SnJdzMJW z=^JweRKzl5Q!0dtXs@0R6Gw!~_Tr5FvKgWlZN(B|Ky3aAW?c z<#cohT-ZMm?sD5vI~^t_ILk*|6zbfDi`eCQ5IndwGb;7}C-*E}Hex{f%{oQX>q8r_`61H_;do zgL?@vWX}g(Ym_jtnHJ(?rluYT2jkRfIc<#g&9vv*CO zd^2nhI>i1M|FE@a|gi*&GF!J#>1&~NpFwGVu9ut*J^?g86j}~aF zzI;h(5$@GkNPBG7N~ji=M!16>ep;9`u0s;%-H_M+QmMwYZ@b8{y=L5 z9`JBJ)7zQ;xlc#A*sTK+GjE&8YS&>xu!QUwkA>cJ@7~A(=@!i(0VV}x&=02FlaJ8-iJHPE$#B+187&c>;I1e9q&mfCG)9(1b^EQCg0g7CntN0;7C-}1%3E_X*9 zpb(E@8za$PMrOc24UEDP-X(}}0zF7!TgYYzIGwW_dQ8F1(X9x})4@y#U76l|nQw5B zNJZYgJ@{OPa3vvmXl+orv~6|Pi9^n`GklkrSe`{SqHH+i=oR9>*cT1We+v4rK>HKG zM)gbIK&z&lTsXK87K8-`>NI;@fCae%im9T4Q@kCveze%|3dwxM9SEDA$lj`;dR<0b zW{B(3Ukjo1M5exi9mi?A)OOsMPJmAseAX1|qLww3xA zXy5BMPDX3Fo{?*7cM1}T_C$HCv+VzBP<7A3;e9(qzuM}JcmDiL+(f6`bkyhMQRf*; zhO+y5Rhp-Vpm`i#`c~7EO1-*Wz@RJEzkEEBuvm^h5&C@f)|ejy+7x#^)N^Qjk7sG~qx;)aBT zuakA})>59T9{n9Yg2#kU7;BB;+wh}Bp3`dlqfz5pY;1fg?9=@nO-ChSEeVYjOnk=y z{R^Y>MP(QWmb?bTUl1)SrQy{(@cEpe6PRHGDm0ZmEp+P2Xd(ATLs4TfMn2By3kbzz znxeY{p1xqN(;0uMw^#l4sZP&+(jTr9Um~=;s8A%C_voS%C*fexpTHngQ)mQvc#KeqOUvAwpMKGP{p>c(E?eKLI3}4*RU=H{#`Bmyt=C&A)HuEhE>0n|Z z!>25iel@jxetui&`gcVi0Dn<%;3Dk%Z=v2oPYum(?>jvn?qDm;aT#vApq*{Ee8GyZwRVKzXk6cbwaqBW!Td#EKmWZ zV)Qte5quL+uaF7EpeVNxt^>4W$<{dvB|)iawX&N1ih?KRQS z2aXob2xAW<6ECb#Zt?8EpSx+|!wPnLq$q7JeVH07Di)^a3&1ip_O>A^I)Z7la)Y7-Vo? z#Ctcds`k zwM~-4!;$+dG^WDu>jBd=;UKyn{1dC^SGHJ-7L$vre3E@o(U3vvCGc|Q)a6Gp_(u)LOUWBK+x29d|ZlQN}lgBYH^x9JUy%W}k3}!Sb zMZw3%g)k2nYhHUKCeHVU^9w#njgKL?eYFr$^!@qM`sa`7+R)v}YO=i)B-2?I<^s{Q zEMAsAIV{Zkd$0UKtgI{nQ5=kfRjW7{!oKxzZJoH<#$>7~#FhYyG^od zUD#F=cR_uz)e`JpOMGN!|3oNX^mWLi;p+8?Z*p5{a`DdWoZH5{Z_T#p`mkUcgUxGuDCcgyo&26&a2imCI8kV#$X_9kU z_QhCQg~f-w9f0G(zkK=_yG`VctHV=Xg`ePPX))EL?t6NurlKWqqtOZ74GQA$_sWjB z2&A%S-RVis+xO;|))l--w_gpp0jvYt+fLqYr|;SF6jiper%=hHb03rUsFrX3^7mOj~N^D z==>gSjkL7;hu`Owz`&4cV>rJcp{Fl$%L~yL`OnPmUwAj>*C@LEsn#s{j-@W+-sCXD z&o#Vuf6-3XSyN@hMb5ulCGbYjZM=}B2n}6~GyEKc-O)>%#Jw9v+vSG|377TNd$h=qlvj=h;KD0ofqvI&PggZ@qeD_1f6VdMG=_T76_Z z_w@x!_GVpt6_uZ$_wJhAMj_Lv&8*y&C7uij6J~IqZW{}qd+xt>MkJ{i9%^`m(!Td_ z;wE*zbg#=?Y|NBoKib`#gpntx$1l9fY7(G-b8h^Q;%(JRvxiK!_B_xB-@Jeq|U z$k8XJ@B@fF6$_I>z-h-gs?+3^sM9vBfq@{gfr?5k_RikCp(iJ2#l~l#r@gu77CQv@ zRlk0bY4&>1ylB%jm*7wQKz-LywY>a6ohH=#|Ja3Ugx6EY6dO2u2j&HLQ928F(jTMl zpU3e#ad98Ylq9adMTM@|S!NiA%fz$}!VD)Tr>(6mT3Xr)Y>dvK zr@o6RX>Iau7Oi=uZ*B#IYM8O7RdFY9NGEi)*42cEJnA@_jWc4*@H8k zQIVnvOG$&X@5w;hFKRX(zgoKKdR?58&MZTbGr1Wn@2kJK>qqQV97k52Lw93iN%J?Z z|L^)oZ^3Z^^@K)dPGOcUs{qGg_ohy@C4Yr?ACL?)y)O8q_Uo3<8>a3@3VP|3*q)UN zBMHBLL|RYf3UBU-EjX61k*A1tCWg5$LDxRbqOq|tzyaeE68;pC9%CkQ=AD!84TBgx zZGVrl`BKM;$HvCL?wd~tE)cgH))N!d^q=W3!bW}tJRpuukYJeBn#t(y1`Jv$nPIn2 z&FLh&ObB1j1dD_>or z8%0b8uQo3&J!mhc=H(TyHdo4yPb0K}rZKZ#?U}x95W!NrK2pWA4;iSYXD$BT;)$i& zOx@Pg-VPI?oY%NaGyA64ndX8~c}^=O&#ZYf`c1!gEa40-@WY8*^~NFnjgz;Rzrg2Y zj=vgTn~iirN?dAM^(5-kk_yJNHZFdAc7%F6=ZRvhnN4{2JFQ#Y#P!aPC~C(dgzcq6 z&Q;ji?PoPEF5~8%|5w2yTG-%zWDRLX;LUI#`ik{9k)O;ZhB)3EuhYNf__5NJ^2k&> z>Ctdr7ESZM|G&(dOq>ZC3CI4yswjuA=O;BF6JgRQ(5kgF@$eAb-22eB`OmXk!ZAm= z99(>vG3I!UifMFKp#rxxSF zAx%w9IC4{Dy;axPwYDaF(HK~NalV8P*Ah5BJ(t|wu3Vjmd>1^X3C5@HG@pu00d>)0}V^<4pXoYmhCZu)~d11qcfO@-dHm91so*VlKEEN7dMtdG&& zyO~iQ*2`FoY_Jp7LXsJGG7}uUaqYr${Ph!7Sw99sBCuC}e7EQrqa0q5(bCRr%~<`) zjSwj_;?stbcLjbI8Fod3jt*z)oEonSBXrcZH4S>jaM5s5QBBrvyMSYjcbUxe#h{Bb z!M=Ta_g&hTS2!KP8q3p!K|zGwv#8yic|9(hK^Ymfy;HTa^uS@8`wg6|J-T+eEpD6l z%)2{t1RX?*FX#RhugLl!s(gI*Sik>gPtnrRt!JAW0k#4#seOBSti@sJODPBKfej*E zx_;5lOu2a`jN{|h)J~zM_Zj-(ddHfn$;?d0o$`KB%!(vV6zzCxCLRDTr_BjNuahOv z)mE@x&r3`!{M~uHB{07viQJ&bQHsjU3|se4Ot|ap%Qm)i|^_A2B8^h9)ylg%^tZiqi~0emVtpR ze_(_biTu^GS#C>7)n(f_>nYuI77Bk|WI)5fkhWFpGivcl^!%cu!F6#_*6w5v5!cZ$ z-5VNA@>_xBTKc`%o+QzFzqR)q9D6yxlQfDJx6Hbp{_GHp>z&akntHwcirE5J-TcRo zyA9W5=(&{F4A%C| zywyw|?;9gk(_SsFe(#iX>xQqgTTTWB*XZ#B5uEk?!`7#^A~7FZ!0hhp)b* zj-t6M5lyNhj6##^x?L-6zn(@()eaQh&BG(zltzeJ7e}Crmh4@j@{5Dy^U#XZ4w1JV4jW|ODyJkeZ zi+dYV0t7r%RPLiiB8rkI%p*z4&o&j(GNXi4z#*^QRz`P?Ozmma*k zF2lp~Nj-n1gi6SC5e$8lLr{o<_TK(gmGxPI(9jo2Rq8>0zCQ)jV#e?}qOBia910>w z!QqZoxZYjq>O2|XSlvi*@UI#V4Gxa_H7TBfJiisSxmk!wkwFo6{kr%m|B= z;ax&Pm^15ne0lNd2zu|hA6>S58aUb(!LeJ9h)+y#*uQLhjHKi*%%; ziLo&q?Tm)8@YIGj6OHY}2VV=C*BM{G_6&Waa+#&PN4zm!AIa0r!9I}2V-qRb|I2wI zhdDWn6jzhl*I21tx5BJ*??E}uCx`^9tk;05C!$wTzUV_`pVbHMLatg$tDSF2h+93#}kmZh8+M8$veHjR!s)=mu5}x zzs`00I#G~Nu@=8C-}?8LV#@my)Yp7|j=qa29a|jFjQF9Xub(m-+xMp0&!3^v&1*!WaySl~O#tn}$!$WeNz@bOYUAkeoS9ZkwwgpupT?FxnL9h$c7~CZ(8B7C zye|)f zi*oM81>VBqsJZ$0pUFuHJ9@h5g7+MVFXOWhld07ci;8-u$V3Wtd%Jzqcl6GX#ZqV} zDjSWfKVrHo9%EBn6!Q+5u*d0vxpr~lXPsg#-t4f(Tbu0aip}cf#e(hwMc3cv681Q@ zhP<2EI~)2?G!6AU%sB`smHz3Et*WYO9*4u|2er2_o2H?`2SO!2f}5I`myO8@p@FY# z=(LrUGmq;#gQq&>$&W2p0#^DA8TVKW4arV|J_G;jKlpO_v2gER6_CJlo2{wgYwPP@ z+lhRH%+;T=%v)}16MJt9F}hk?SkQWpF!)9pF!5rK^z?}R@RWdvyOW9fobi8^dusN8 z;GBYt)VOh?xO$k?UYfYVTB`oVdyB3B;Ek^FX)Kjd3+l^f23gRVY(=ddfWhiGg7?(9b{cmG|8x%e#y#z4YwZ z8uUXmbyfi{u|SFw&ixwsc3IYX;O;~vNKxs}uMDH_bDBSgqg4Z8$vO1oEHfaTXkIC~ zhIPUE0F7P+Xwk;g$s}j?Df=DE5#O8!PlG0pTHA$Ie{7;E>uHn@$s-;$ZZF*qd)X#8 z%cRwd(`NbakVSWAi~tm{7Z#3RdXFkIfZupgZu+xyR73qH9WL~biQ8&1-x%tUiHOjx z9x);#Box-{yk&P?#Xjf`NK+bVGAYYnK_L!r2WSuhHtYHG#|sCw=xTa)c6%UCH*Vy2 zs^NfA@Id~+i2{g%y9{E>_!pO!tfp$%pgoyW*8lgxSUfyEV=s_}^Bi}ns1{zn9Kgm- zON-N)+L)-!+)Hg_=OE|cu(zJ>e4*H6e>KwrsUcDxc80M|UoNdoHq#WhH(t?RK0Z3y z0Xg^a(0yFKyfY%rP^*Z*9GgV7;RWjBp}p^P0of1~#lXih68HYJl>*$A@*qUvT;^&b z1S;#(7`;udx|Gv%KYTH|E!#ihJ{n*ljK(BJodZv6>pMZ-iP?XzLpyp~47}vMBfRDA zi~=|K6dyk1IB()WIFg|oQ1;FgSL2nCF3o&*P60O$N{|LC69Dnx;qk7~OdM_K26RIZ z^*mU6ZTe=&^ymkNEYG%lGqlMmeckdAHf*FwKMB@_^FJGLV<=y98<2D){uvb|K^Rgdw0=9D|I+W zwRtU$fwrEvHkN5lOiZkU<5X+EGQd)v{cL;409AMb2Z|&eN^k~547@LNjM&@N^S$H2 z=g?qubTREfTuvJ`cYfgu2~vd9gEbjy_J`cvAx;m-z}C(7k|@+;ExBJLu1L>8B~p$) zkwODmq|I!TB>Y-AiVIAMx91oD#pe1ZCnv*{nB6AMbi-;83RGawBg@VV!@&uRyXpYS`hPfbmYjm0}9q~H9XM+n_8$ez0Ph|`RGT1)MqOcI-7 z)1r_P&Rg<}Kihp-syU(QFS`Smyvy*cta{FGaMRZ$7ns6g_YV(e=>L|6kkKqIoh2~P zK#ph6OK#|KL)=x_v44?roRc}4GHHX$iC$glNADc~&K!Z+YIZx!b8rW|kv3#ax}G16 zUIKI(=g+Kmwv%p<33f(#IYI{(Pzo|_l89lx{lfwWIgjGZdbwLd`v$kwgjyZwjW9WsLFf)qT}L5BCdC;^8ol+mQuueS#)8NZRAd`{x;hiv5m( zS1NDuIn_0^oxc}?-~L-L(P@aNT_)*5)<(b7-q~D3OSe>TO!N>jDJd!mIFJViiiwT= zRsYrD=)fELedTF}XL#UQI7LCUnf1t?Y&acmTUdR7#Axn|ud( zusqLRuYVq9opW`%{!cnn)8V&{UiLwKZ<_z*x+p2p>HKzBGyWwk=*j=~ByhYUP=%?% z8|f-S^Y+e6mp7vn+?dWEXRcq4F^ZwKX+jfKYZ}dV1By-1ys&$*44%ftJNao5)RvV+CnMR?o20N+}K!e2}30oz&a+8&l77ijo4@5LKkYQ z_XupdwqF@%vfd+>xnpFc%Fbb*JM*McSw|-^Je*~^_y3NLPU|Mg9t9>EBk-w-u|M{m zHE^%~-g>`g3|&Z2v1WSi`e^aW;?8+r1SILpbv}DxnpA$$zv!L6FD9*)8FY5B>XonB zObUNpVV!=;^gf#KQ5yf|Sq!P=N6T3P>Kxs_P=k#od%L41i7E%DpWNuM6QtV26K*^e_p+FqQG^4+*mN_0ISOv!sc+iy?{GCNcoGqZ-bK1ULJRvnX* z#MNeT=x8L{POXL2G&X!8OQ(PJ>wD5%`tpf$-x~EB^b$Y(sqN(2NjWVib7FEVL?IWB*FZ4KT9Fw zJ8r!OCuRoLVo{BiU%w`OF?y8s*(Q8iH(LJvbKy%tytqUD`djaG$;tZVOE7cQt7Mlp&9 zG$Hj+E`o~a1XO%UakM=f7PfM7rHp7}St2Vy|8tj9W$XR*&DMdCfDUs{j29+0r|tM_ zX6|^7`WNd3@)`BiVIn*%Iv49tt65zm7e1SD>DXYX+-n)yBC!7 zrxRORS{4~L_nHR>)pOY1+w9EqtFHcJtgl>JW47?I*|i!Sn$bKGGz0u0!jM}nJSk}b zg(ZH<*W7%}-QCkJlQzfJ2+rDvxJH^B#JekW8mR$O@TfcNq0J=z-CVO5zCgX4l z&&eUEpB%B0uN%J;HzitH(?0Pwx45`3`Wsbd6jb=u{QGzOKeXQu9}tbVv0~X-OP_!D zWlg$@ki8|DNm4%e^3SVRNaDh&BL4=Of}_Za!5{3@=3l!%phmg^)h4#CwAfej>&HW| zog)rOSedF4RsZq>HYCb>DNio=*tdnk?~r@a;htITI!tGqSPp!fRQtQIw7jqs8=uVW zy}DdtA9sc<%kpe_e;vZjE8+W>z~xz4NU0|4el=a3` zZ8@qYe&Ao!A%!c`0xcYt+>)f#Zv!F6f5CU^K(YiyTA7n){I4i9IxAqwJ^mM{4B2Ssvei>dt9yUt zcZGaD*4dRl)jiw6k&=?!k`j8Sd!NY#l0g&+Hj;*r=$I7^LorV);+p zjYFOsy9_khKPdJ1ZMkY;Y-p;bb>9Z9L;GEfQSTI{c!rZ}(C&dlU`>V6X$YLu%!e~* zWySV*b=4}hsUoa*u!AD3+w>k|&gj~nxH;_R$5qJRKnqbjUt5Z)=^EJl-;06~=C;V2 zZepfnN<8=dN!^vpemPZ>+i)$@FRNBIL6t6}Zq0O9tZoI{*-O##q^X3;4J0hudA%wFCs({-B&z)D66 z52nhSFaa;vc`{SX5yPx(a?Fm)}Zu z&1av+)p0oNN>O3b8p3hcGaXz3q@>HIx3?D$4+Bu=-txze`ToZ@@MFqK`GAu%9Jr&_ zxETl!_U;N2lC}uvZ$o%LZ|pJkU_CGKS6*IYOOj|%!`I$k-HEir1qNu+DwF^H4j!zS ziv8v!LBOXfVd~WFF+EotU}h0`{c5qHrG*dJ$?rSnWMt}49&zu}7uci#+nkgNh`HOn z`0DhTIN{wf^+q)%FgGgVq86-GZ!3HVuPtlXU3+u5*tktn=Uqu$WMr%MuQ2r+Utqr3 ztz8sywhwZpXP8%2+M0Q&{cKrf79AwrlOj(s)SHg4P8}R{H@r-9!=$f}4}0>2=m)`O z4{|a)nZLvP5Y63g#t9X^ru{03JGkD4Bdxl{7Mj(8=!XgEX8qZ{jZcoIC>P_`r2?pm ztOG=5$&XW71|1MgGm<+s1kymj6mBJg<}{01x<1sebq}6ZLj#?pg+uddW^N7!&1}Xj z^Bsb6!gK@ZGlVMvuz7FX&oo<4qdw2JNxVM)Q7g(h2=;qscIKNA@qE>)gAcYCL=9xr7o zR+)H3519crx{@hOP5CCN2_TTs&`^*(-Co58n+UM+$y%|< zZIH!Zb3FRyHq;BTN6FDocURjO`#*Q`3C!u>OKr(WGI#wMlr38dJSwVqnk&p z>$_ObacVw}z18Ih+!ajMIOOo9$`C-z?NA+e^4<3i1N?kUAuMC0nOEeQ`n??Npm`{? zN`UQx{igN+}+P7nzt{Z(~L? z)pGnPd-WdbInY6rb#KL&$T)z&KK*vH>M2q0SYDU?dn@>}o98^1GxRKhlDw(GZG93; zC`waWTH57jsWID?oq1pGfr37D8UJu+_E}DCuU#E!9{26#7icXdu|LTuILBq!?}L|; z%+d4MON1mOiT&a;i=(ldAU>Epd!dqqgrsJ#D?zNG%ODtD{El--wtseI)+uC$y4|b0 zuU*wvjl%}7KcOs&-RL^<@u0H|x9YL#@P75s5WGg?v^uh) zIm-i?2g+@oeH3;@+&3f8FHi|+DiIggnIw@VxzPQe7x1c^1Wj~kQK;&_(6`c=QKvwMx$9oa zba3z$%Ax$W_Uo8t@3(mM9Ggsv(s7w~!A7eFx}DwR-@kc8jtcSb-te4iYINFNQ!Iby zJ0@M7l$<;{KR=u#^IoF`YYu-$or~{; zAxMAl-~oVfPCCweo0y#Hg*s?M9L2!8*ap7`Ack@&v#P4<)~)Sc4R5#)K$P;`j<}hd z&Y|AJD=1zXQ(et|wWR{Lf%1%EU||Wha$QSF+NXPAE9mj?Y4+i-%Xi<|HIXjc>;Y5|Ngc*6a_#6}usa z6Q1mtW_Mc50C<87If&ut(ksHMwtx=YZjhrV%Y{GJ!N6pr52!zd=`Wh8R!2@MNNNli6|2-wij z(8x&EQS^+=!}+k*=H9Wvbj^X%=EBS~MQHjpU9n$svzw|}8l9|J&agyZW+=Wm&d%a; za+aV0S$Eg=7R_0F-%{I~8|6zr(GfG<*;i28FcLI=2R;ckM$<+XgF=ojdk02$-S4JRTMl+GCXK7jX*rrU=Ve#R1Z{nMEg1Q3k&UWn`=~!8JFP$w#JoTJ)$Cxgu2;sP3Lz@(WF1U|5?5jNso;GpBnI@%a0Hnq z^wy~V@k8}zB^Yyn=)b$_C3zFJIc{N6B{Am~|B!ko94x)PC53SNNy9;684A+d_3@3* zdAF20ZzI!QbmQ0jd}eAYDZr}@sEQliqUGUP2o*75edoKl>FQM zIB0oH!EeuibF)?e zGjwQx{o||y)nU!|@1GJAVL;V?i3N&1>gmnk5bzRoUH|;KSM@~G6|}HI*+a0IYG`-dw(#A}(H9cPoXO&q2Mc64o%do>Bf8iUl9V8IphW^ztnR^An{4UcY+9&~?-r z9h|9V-IYa#x=KYw1!0{j;&d#YQLySR4G$gd%Jdf9i%MXGdGATyRu%0uiF*PE9Ew4p z3;V$Rm10h_CptaZ$u@hc*z-Ul^d6dfJehJ^T0T);s?WRr|S7JIM}(YMoJVY4W2|@&=CQYD=v!Dl-;Q1sY`ogU z@DDm*p|2TPIc7-wMywdg6;)ROnY5&bH9`YqNhJv;KSopz%RPXKFnNCmIg z^&p7BSou;_q86R2PC}CjFYi>k3Nn zf+4kMk8;w^3=8`Z>%=C|-9~xdL`SD}(!_WYQl?zH_^Oft_oCjxU{Fo_};NPQ;_iX>BSb##0p?I4IIxI(i%C#l^n^TMul2 z(g6^K!%a<1KYsie8q$!G@&Vr4(( zrzk8#fuz3md6~13)%I)tl~Q3xoLYykq>mDEV0EB#sZ+sn@BOlO0{Dnj7t(TCdv%%P~Hzsi`RpBLIn<*wxHXq3Eg1-DCIZ zgp^Fp;_P(fx{VcwpyV>lKA@EnPI6n1#zaOo?mYsqO^RT?yF8T-Sf|D>SvmvRmh(Gu zJa$9CPr%g2y~daIlXoiC&s67JT2pf7ZFd(5;ByBmeYSpTTW&-z4UpbCFau`?7}a~U zm$ANS!if9z?v)}&5eQmPK5jgAe`~?uPhi61vbpdUKxj8`lTQ(G6FGUpz=6wpvjnha zi!7#XruUyVcX1uF*6ke<+$a+W_b$eV6N&N zy)i*fDwvX@0>VfnBI)6*H|c*w3C;+%;AH|=7aJMbm;Kqj%isYgRenE>KXQxeX?fhW z%aq|xOW%s-Z!cs~Lt^U?eJ&=pEYgwtM{T{Cb;uGwUk)0pC%6~=0xbQEcv1`nw+%63 zvu&2cU$lzMh_Uz?w=#ZOi?$%a)V1)gMGKSaYW$}S|jxgnWjqdD= zC=>DxVjiyjq09g<^1k$m&SX8*SaBB6AM4cQlXw?X?h@;gc)EONB5r(a4C%M ztW8Tn&qq+c4E3KjJm&TP-W-=QxQ2j(9Z>~Yl3Q^brQdJ|>jhL#-Ox>BSbMa_cc8!yUWKH36KEy=emEVEPOq2cv#pi3^?)-Varen z{CAcN7%y{vK74QULB^-J^qL4#=k!uZs(fy8BMn;qZE2xn~-6`GLUblUBG<^c@$j zHG}q)9Y+3RT1RUZq(;JZXs za>dWUM6Hh&>&sB_MQcGi(34SDP`3aXrAa@|>Tzax{5&e(5I3gYIMM5YRu8K8W@>yM zjkX|L{ajBr3ub!}BQWIfLJ(P`f3Jks#|H6)M4R+j4NjE;@Ce{Chawvev_70CKPsjE zF0Kqw7jwn$5OPAdp*Jec~MijJ-5CYhV3Q1f3y_9TdgWyXz z@L?nwLiP)~Oc8 zc5y}|$j}{Q)FS_`Wccnh-qG{2q7?O8-j#PaEZDrmqf?e=+MQB3|C0T9E;EefI}w@u zZ+6z#)IbG3Ma+-iYS!X47~wqEgksKS!}<^48)t*QmX+gCVKw1R>$ZO^Y` z1}dBY2oWPYhFVtT$v6U!9S@O5S8`xxVUd@Y2k9Ok77RJSpKRoFIyc~X83i*Dny&(( zs8dQoOW1ZU|3hqKfj%kGLy94X+w$Z(X9n~1rG^|#x&J*)pkf{QfaA+E1F04CjHD<_4DbpG#dgQp3YZ5R z4T6CVpoVF2`BZ|HA9j?=gt%A);6JdsyX-{?Rd}-92ct6$p5Uc zv4!~JJBt54f+zonM zuNnw&O-MB_KkR>$OdC znf5xO_4=d$6&hO}e^h4A<&Dmp+yukbRsp^)EI0Rd?3oKlQ%f2fqNu0n&1q|iL6U4(&|i9Y<2-ss>! zckc@{-&|%uR_#~0RJza*Yz^R!^6Yf ztQW2vYZ(4MJ}M*y;;Zfyc`*9LU&>5ns}*tAxZ_q~4CQYoYO-Pzp|6%mvjhC52gg_; zq7r{ImM!|x|4TslytQv?|HKX3-I%ch$VV@#9r2q3J6`Df$b2gHJSew>onxq}oW@(~ zUKQV5GT3m9h(2mA^8%-#d4zkPV~^pd*qhtXoJB)qaPt40!6;AmK`{S=CZ74)@()V) z;G3dKxU_Dv99o*QZOMROIl3gF!CLJf+-MpYv3D1qUdRLMbm`}k=fR0u0(|SLH|Gjg ziW}V)nj1)uhN!0&IpD9UhKf>U$f_ zeau;+hvs}3fc|qv2_?&)L>}Uot}bF+W;aiqIs-b9DA4}JIi)QgNAv%E*iOoW$lq8j zOKMHldW^~u{>Xa&+dV$Tvoyus)E%tqDlZkKY;JA79hyHXwRw2|`%2f#`DC2_c*EE6 z{Dp&#U3bv&FTYP0;-}v4(T;aZqcagNr)|~m%fo_H*Kg}E9||$@cW#Wa?7RcKfWxN^U;dsHExxt5y>o|4D1g81!P*A~PqK2t0_~I6yWyOhYL9Oh+}hZ{e6gg+Dkv{4^?v=j ztBRTnUpOTL0c;_zfyTYy_3T7naJl3Uf|)9l zPFgT4;QPPlH^G(|T}gSlkGHqX#mwz`L;LoN+YuS;;88OC#j@OH{65YolUe^N9PFTN zaPXG5_Z}*fk3Ne7shJWV^Pwe^3w;Bqb)xvc`Jx{x5yh5=l+1r6Q_m^mRadw+7=ZSh zfTbmLXzZl3i!C0LgkG>UqsD`YihcDdx-=63&Dxp1XBFc(P$gC z@JqHl^La_hfM#sW^#w_8f|s%u%RLVIj9FU0ej=-Uf>wSy$rJo=Y^INu4Zao$5cXJ+ z-R&*7{5DI26U<{}^S*Bx1Y;xO@%=4OpCsCU+? zpyECipdF?3bBw2sBgHZ2YKa!2zwuAc&0DvcoK5>atB1BDcsS=Sj7uJe3qKlIH^NkmBxP^2jj(-JY2nS*Mlb~oqEjeD}? z4S#GAoYSl}6yeBEtE|c_!1Um0BQ#e|w3MFh(8r^;r-~(H=tWz@MXwC&+xKpL>H`gI zg|l1-n7SwFU?`q#hLQGXI9-v6Z|i1OJICLBS9(Xarm@P zj*N^98kPC``x8?M^72Mho@PRMUIQcf!9vr=Ha2&cG7WdZKTJ&BuMJ`8>GZG6 zjgOtn(;+VE7daUF!p>3Mz=YS_zZfDJkiS?1^l}(qNWNA25c`tBc1;hBdGX4^$%wF( z<&Kqx%U+C!)?VitoYpNW$2#j3@t^C&^4jPDQpPZHE3HtY5YqGG3zh_;s{|{@?b_kt zXt(EJw%+vqi_@A>qaC=&1E;5C1vNPA1Vi3EXOH&R+5t9o(wW0_ySAqcinuLd7Y35W z4xEXvK8@vQ$X1OV^vUF2Y^~T5y-hbgNs68qFam*SmnoUgSoYV8+!7$unOJv<*i#O^ zC2Mr(cF?MaM%{j>)Ubxn3TRBT2+pv`A^lB5x&C-p4y~>T8=h(1#m75|mSaRc&2GW= zsM~JT7JKi*9w*sar}Gzlxf2CWhNI#Bub-9%Ip;JnmPT(p@1431%PB$33jjEVk?Trl z{(JE#F6im&mz9+@(b3+{#>SSTCjorgYh**J51>}u`;BU9HQc@igAq8##?HRFzq3}s zRw^+3fO1~p>|L(A5&K2+*?t9Kt$WVvxxzvg{9s0LoA(ePZsG;?4Z&T$TmnFqMu|u! zCP`Io2P*c(9djasbCtdWbJOD0&C1F!C-9G1DSJU(wIvHj?;6!y6ZEMaBg64EmWuw; zmlCQzWWN`eU^%EcB_POGt|5;vyRn>4QkTUy&owiv2^-izCIiYsAYJ92{fZau{YsGrU%*~n|y z&lXwogN>>du4WK%)F1Gu8a?dGsa9{HdfcP!u{P%vmmNPD`zx(iGrv)xt{v5 z!9Gk~rv4?PIDCgfQdf~tzyN7(OgvEj8r0pUfQbmd@7k-_<#&nj*~I$0c;gVh=OCM1 zsA<&qQQj7o)Rut;I)G|5dqhDq1HKRDMx!pzu+I4gB68wy49bj&#~yKUTD@A!65vxO zDCvH{u{inT#>gxPrIL*a@T#jI;DMr}h=`Vmhnri3X&nD&M#us1IpBlrkfMlSWEzM) zzNfkWoWM4pL~TI7?M7$?BVEj6_QYeV_HcDBWzfsCb9UJ|=;GhG|Lw0441##kMFXcyu(kdve3y_A)t@On$B zKWm;O*V-GHRuG48KQGPOib%4R>kSMG>*KYa9^*bOU44a|Dz)ppZGBZPey2>nHWxSd z;V&QR8y-KzBy4Bn6-0f z>%SNT%20d{3@sx4#(Z)AXs_|3!;*2J%<2W~?Ma69qjc|Lyx$V@ow$)hZR}@A9aehH z95&8{+1H~IT1*v}GCh>8x;#qED!NVYX|QJRP(tJw;;7P_QJOrLZikB$cP0;8k2Wei zc-W|2>c~#bs=k&JgUScxBR&apM<3jnbq3$kx3>R3lv5giPtNu3c%NJK&jl*3(azf1 znvO29spmsvWU1R8nwmP^3R7m92f+#)_Cmb8CEw|A;gpRRjurY`UJen$FjEEoEloYW z-w~H?EFlW%K>~1f@^HQ1cD_7_%X(E}L7b*~#P7RxQ-(18XWL8A z%jzml4R5=UQAEvg6X)uS8)bZYX}C0&SJ5Kl7o+}u!H>$Yw_W&JSG8j4($o=hBJMpK zU$Qg6@d?(vOkQ}C(*!@YlF>Sy+}81nsZw>Ey~}|2^W=D@p99Is<^)M&ZZ|T$yuIsQ z;^32WTS)I^D|_v!!Gq+v%*f}^L z)?p=l3;7wzrAuF_!VuBU)bwsx&{zm$;Sh~NvDc3{Lip0=9?((0zX7of;pNLTn1RM8)jHNpV8dPHR8Sww zU#Ol|BU0WB(&Nx^>3dU>i$cq$+UZ$E>9ow)Y3TkBnYdq$#P=yIOK04CEBI`Q|$*M)v!JuAd*jT>eMM>4>p(4B+7GNT!2nO(4=6&?WW;n-#SPb?HnD|?l5g^Z7qSv(*66d_fNq1X}`!nI5>bY`2PL- zgky6u~o%gWVT{a72(R1x#~66?4Iw?p3bG|l}|1Fq1zG;c~~ZORDmkDcKWms;OUPB zgTo~8f4vylfus&_D}m4!XDlK-yk6l#>VwEfc_;S|4q$$_$B8{j$<4iUPp<)-?Stru zhYZ?)!`Ccc#Q}fv!!9phNm>XX;M!bx)Oh=ngt$C#J8zQgFP~WiQ zQ&o7z`?WRw4A!a6S1$fyGDS%|VjAeIWA!boC8i2HMfAcl7#*Si7QM7Ft zLc+m@_pQ2IO#QDC?;hn4sC@ohy(E1>)&CbsCpZw(BR5)<_!t%3g_-Fr5W zs|)Wz!@%Iiw3MIt#}3#&YicAONx)VBE-FxK*BsXd{N&z7KL8A`o5=Z~%n(T3m>^Q- zCwD9c?ih79!C*ma2w6Slwz-^17MIRX_rRY~&)NsNxR71F%Ik6HD&rd;9}mj;fB;1t zt1F%<&{6k;RpIvm_MdyDGhepO`})mtF>Odx-=X4<~EN=^sh?y zX7^40Pqq`jzE@^%(R5gSer^n;_|nJh;MnUBp1{HUyx(S$xi+IgBPahENtPuh->L3L zdcJ1H)U`6li^BB>0xPP{Z2(sKk@vAJneYXyD#X4mea=_f68HM`Yp^=B?9ZmeQlOfA z{=n4;Ui1N2IW?`p{+`7_=0(jReADJz@yke-*{Z9y2~W+o&zj)84KaWT-G$ zKnR<#=8Lt(p(0L-`m*b3|D=XquFLw#z0axU3Jq`?pDryyqah{Tb+ ze?PxFM7_Z1wrw7C@CuHC6$1M$)3bWf_PMSH$ zWnQ&DqrP4d(Y?BDDrTmEHq%gT!ZPDXNa5k)`alaw{jK^x`1SGginfA#1+-$jB|{Rq zWTZI=o6Mbm8?A(7qJl;@#hgpdeQI`U!kfHSSDKloEt)NE?#U`L@} zDKUrl?oG;hDmQhFNcL!pj4#%k+Cj2ttyL^qobJgtQnbWSZU=yH;rwK(YH{H^Fih=D zf4qk&-&*()UYAo~8z<V@BM}dr~h1B zsKj&={BTV+<-8hN@+`}fFTBVLf$JQH|AO4#kN z+-OoUVX#p1 z{S#^7{6S07T?y6fF%~{S=ed`_q|@BE%O}q+5`2l>y+<@Te%ELS^DgUS#uU7b-mJ%( z>JmV?!PX2wBpC&TnmcEQhcI`L z=n}Hx-^I>=42IZ69|w%oR0c~?o}|YstYn|Am-%w1&4Pq+s?~x71g=Pq=aaXR4$6lP z)*X%y^%r%i*_-q7N%b~jIUnT97#KdAVxtmv{-QUYcEpk;Tj>&YSsch;K!BU~w6(M_ zpuK}0#b&cz$u;pjh$WQz{EpTYj2Va(d-WhmrVPU`&L)+54?njdHNwqPYuXiGyZV)RDjAIm&ZZ6 zURnb_E(9?@nQJw&FR)F{=$f|)zxPx0s|Z%g``EJ|rkWfay!9#&_-YpKetBrMN(Fo* zWFd!J)kiz%^qAYetBTrqIHvXyRbY9vdZ<{Pmk%J=Mg|D zZcIeX{XKD!PD{pRsyRl!^8$`LTNhkz_=r1dW3r{bJ{3$uL8`QhhY0hP9giife$%n;-O%*?z zuY(ytNBU!WkUxS5tUa1%$ph4b-~>_cN6`=A)_uR_c0u`L%?IQJJs**7^v9QyrLN`q z3!gGGL8!(2Exa-dbIUnUh(X*xmV&eP zsHye+TDTtHEFA)kX2LGBWHQ{xR*2=$VAp^2RLNkHw+k>=NUfP$<#}W2a&AcK$b;j@U*7_zdlM5^{nK4D111Ff@0YM0gNESu{)LMPXA%4;!4)%-a#vCB`(YBI zb%KilUiZTVE;UfoEsc*8UAq<&6hvbh!(ps&vz_+JxjMbOYfqU;rY9$FSLHJa>=V6} zj8SWCX&LZ=YHHaEJ|vke2+y=oGCuAXDb8iAt825t==iakJXDPR?o~~AE~2N6#=sdl zU{3PYECO0^^hSN*K7{}7$ ze*DGi?^P}D%lYW&S%QyPXNj$PVa-Oamj@(KIM~|*txS%GP>-Igh9a)tL5%c=bf5e= zGt+Q!Q%_%?h=>Rb&8cO4l^7QKbMn{JKoiy`9CDG~0Po^OBkHoUGF$#a@Q86s+qSrO zuTK>Vb&7ROXSn{F9wiOU4Hl%@>qE3rORSO=_Sp&)j)Ze2;agf&A6|Q(v4rIcy8iCo zUyo*3XTQPR1p=6ZV6?DhFrDIIB?RVOej;t;nukG63B$^l$P4TGhLpb@ic?~}%2ijZ zlHkBOH`46B`elMLH4ox{$X7sd;dg8sbo`E#>;3?|my~)!Za*IIftZ=#?AgF;yW88! z?yK3f7I=YWa416t60Nm2TZ1)62()YS#+h(t@Q6#*cAFUKv zx}WP4AXVjY^aAam@~vxRLSdv6Xez>@ezm0P;cy`~@5LbizxGkVzf6oB=oiy|3ottq zI8INex#NvjKIu#HrS8x!E<>$9dKyPlbaVvj zmPA`Ck~0rUpVMtnrP_2Ph_xJcD77ko6Y0r2wqQD*2d+>}Gl9jn#_cianwv9;cq#`1 za~B;=A>gpKx#|D*ZO~d+MTH|fEd0}$q5OFp^3@2ar3l52huBY!Lq`KLFJljq^C)tmE>lT)+t z@OAH*oyDZQgk20)!lNc7E^OG;+ju$Lz|(8jKN9{Q1)M88n)%2`%WjInQXUiUzWWx6 z6DZ~v7g^08|A0GnbadRbO&9<8tSd>TB34L9Xt02|PA{*xm}jX0;22d^RlU3?EBVH4 zF^$6K&z_ACc5{wgYHDsCu5v4n0g1>a90Jpmb@6xyJB|&$;WX=38J}jZf|2g_48wBj%qu_1zd{5OouK=t=_(U8~g(n4amM6 zXF6UK=!`?OJ<0sC3}m32x)6)_T%?4==H}&po;}M2P_-e5?xx$w$d#z6-!n7dDM*V( zP>{U5yDrb(wzNF(@Hm2`0)CP%=#im>3KB@=z6+;2c!Bcd;^7Imss`q#CSD4(!=0>` zPz{4`Bxiph@)KANSNrs!hWz;QY=>Hk?5Cx{7?6r5hEx*VL=3Hhg1%iOt#~7umMvo=(F9gd_OF2SBHjr!7WoEJn}YWRAl_5u>?jX}0j%$DV=@GHgpKg9WDU zlej?y|5I@Ns60DKE5UFDBPgPBJ>oEX zA7%$WK0cI5WI(~tKmUTlO2UX4&%Y6w=ru}8$Q2@|#Xkl*@=0EVeGtG`SncfW?2jg% z1{14#i+_Z!CHrj;&O0st6x+P$pATaT2r?s~sO)|1O6Q^&Vc10Q@bFqZA+iYTQ8Id- zo|#z=DH1?Pw;yrSNZugK0?j)-ZvL&{d~4-Wadnj6I2LPZVbQYJ1;tIkWlVr*w*0>&oJ7eSaM_0sAHpGQob%QU);8hW zT?l!>W)nw3$-kwg1z54=1N$4wCIGz}nVFfPLKH@aj!vR$acas4 zHVH^vjE#*=O=WhbUOolAf}UR9^WF>)WzHr$aGQy*y_rQux#BEnu6_;M10FO?q zDh*_Q$tbSxt9~caO$Dn-?pTfRwzTDpg3S3Ve__t$zmqzHMKS<|`r4Uq*i7xd{R`q% zHh>4PmuzlMwqc~aFfP1toz(th30*BAiyt5>f!%&vjxEc=wP(b2lvz|Qlbjg!he&^5Q=+;zwRFN?l> z^hcFQJ@rTHcj|~8bQ^q-`Q?HL8=Bd(yKq}6k9B>nEGlj2`m6=oekP|;$Tx~PY^{=~ zM-)6dFhfZQo8WjdK~~GZa-&*{-btscyIZpyZHXxx2rSyILA+f4y)xMUCvJX31aH+G z_&#ta2Ms?!iGDG0Y}!@+WoVAMA$jpPPV^GcgIU!GJ6zbMvMdR02dG7eK^wL_p{~0~ zw1Rz)8ypVBUaaiWX%7{2-i%Q^Q|QYCj~_`CUeo^-n3<&oE@y%S<@dxyZEYa*S$emw zSmhwH(|6$E#oCjp3H{My!3Ghu%^Oj88l0qGdb(*|xo#`%{*sslRqQB~c>Izu73*x~ z(tK&)7Zw$rc9`!VkY4@GK{Nm-axe6DB}JEu~Se(_fe@~KisJcw1!{=2wp$% zsv0={`3kVo8L-cS3tGh`HMK$WKvFWWH0D{e${dS9YZ-1Yf3zPS{W%j6ozKt9%jlyS+Y_U)Zaq%GY45v3>v5%b8~YF3JN&Xhq#cCw_yT& z>K7N?7e`7vIy&GuPyUPo`(Rb|p~b&w!VmgkFQVjmfj!JMdP6>?H#$n=6k45TKe z;dm-6x7HQ^imOpQSD-^Xd^TKqc@fZ|6m`V_HWXuHq^}vnl_4-*hhAoyn%%63BHeQ= z9PxP$*U_$cewUwL8ZLoJ$Ph6y z$jkEpZh;>ua1BI+gb?(>{{eN+4haHRgE1iK67$^Y_1zwphZVA2;j9@?#o5YtQpMvo z>3)wk6+FbRvfY+U+7~9e^Vkp!c1JWCw zs|ZXS;4HI3_yk+H3)D83#XtwJFQcPzmx9425IUvD#O%Xl{Qdj)x%a9X8qjff(!f~Q z`3d!q+8OdG+Xa@{A4c*;AQv+U-;9Ks3(v&x=f&9udz-`7u#9EOpClOl%THv^{LvXE zI78ec0M%^(ryUQ=V)pS|P$4A^hbqANxw$S@+zbjtKaXlMG()Z?K{jS)S19TR>IQ^! zk@S$$7Ma10k*SJUD1ql=a`Fa53J{j$N$T`uYGt)x6YKVlxh4udY}KIqQw5lhyeF$CL^iLdv~$O5KFs9^{PN+aK1VJ7$1B&6WhW+&uPinae3yf_eshZ{z|FtP~6Q1 z)9tr*wX@1~Rt9DU=1ac#_~-Cf*Q}FAS{j>Fm$6VIy1BldOM5vt*ETj(h4d+ zMWeDx_w-Lz-f;cJCn1+js2@#fv7+R6C@jp&&;4T2o5F<+K7q>ko`_5E;WrV{IN{<* z88pWN2?UT)-K=C8c*ie%?z*tBJG@g+O@OKj+7HQ-j~71_i@V@K$$(aF){z?1=z29gW8WRbp`A4oEcYr9TDs=WjI{OCCde~kxLF*)jD*Q< z6J*9v8wgr}5cj2r8b7>9*>u9^vG8n=;Xp-3lI^^xB~sXl@_QA}Db~xx$hZrrvaGb=MIqvRcEoN{lb!uOI84y=;#a&Dx9t;Lpw92mBy`fBaXb2V=+5a}838lc} z{0f$%u(D`UYnEU5>o^T{vOe}kYD#%2>CF0V`wW;xG=(*{V&0XZ3%*@|x`7(1*665Nm5G!HEMp4q528Tjc71+6SjTiHny2g6`l9=uhTGx`Q!XTn!!f>f}b zRKz1NI^JgKVO?7$YztVdkV(h&p)G=*hs665m48*AN+(4`MEFi5$vbKWd^>f1xHwVk z7Rs}%!!kv{X!Ut2+uo~d0%VRXVa4q*BcXg7vt5=Mh(Z;&5o1bNOU&Hd=p*+- zphx)HUsxi`O(_;gkc*`L5Q!`xi%-#g{x+@DR2hg#v# zz`*O7imO?Bum=;&^jmI=`jk0vZR5_;o#E3ibw`+^m`%D=R*JcK=#v5!eMOs`IZAyu z8sKyQo(F`3aApI_d6S+V{1w3HxThyXh8pBx45LgPyv2!I>AP1n@>Q$AeK{?caiHU> zC#XNe;cTdYXOgMz`J~c3E7D?yY)RxNP@IjhB^6r^v`|QJn{@mGxig?>P|?}yRjrTFOXin`g}r~Ovjm0j1KAv546hQ#7RHV4o%{5{g+Rh0#UESZLJJae^b)5sy>Sg zveH#d7zkj5(F;!$z=7Im_2{vw3Y=K%kmhi6FLrzclM+gMK>Wt&QSL|tD7u4Q5cnn- ziCF7sB=8Scx@^Jmm^=(;8`n{IUBn4Il+p>uZVuI>{FjaoRpF?7OGMwrdWn<3y@UW| zc9Ht=B~sF2^M2O6C(PB+Q1}(6Gvw?(Sfmg;R4AAICSDaAt{oJ5sjJG;ml(gkBU?cp zE8r@8W53Ma1z=S`31Mi2)U93jHZ8%f8R`4FU)_Xz7qZVrN?@s==H<@hWzLhwg^nH4 z2w7Zca(PMagRw#l6)Y8EG|O&N8;i$kRiW*q>;mhlNpI@KQ^@FDxg-va!{Mzv9PDmD z24On#bxm#8ONmLORbVdH8-k7N5e;Uhu%~CdH`?Rj+<6@>svtuo7*0?7|s%9 z*EAqBZoPGd>S}Fm#4YLF(HQ(6i5h_LDFYE4WVrCB79vW@*c4{?gVo@p@#0%J4&z9r z+yvAUF!Fn!Vbe!aRxYS+s)sU40_t}tLR12K+3>KC+y2CWZd`l~l>piTYSf@+=R(1; zh#I~=1nm3(w&4S4LIi|Wy1E}@;-G!RIbQT_P!9?u~|{AwesVT;@S36O$+O3^Fc-J?RZl&S1M!RV55E$L$AD{|9YUh5wwN zC!}g3=|?4&&FvEi8yg$pDMPJ_XRU^hGR2Szw2tSnC4{(%FfD2@ryc-k1Ya{XH3fGp z|FUl9glOxaW6hZP@2m;0GF*naP5A&}7<6>R+_T(K!hRJR>nRbv3A@W;eCl+`8)YDkKL13nE`qHjwW_G7vtm)#`xLM{*y63VF$_=sC{x z-TVS-;qnhV%lGFhwTGbF?|WZ9T??ed&POWs8bbCHBZ>KQq#(!BCQgEy6SG z%o|#hPpX9HqLQa@%pBBK#7nH^qPjFb1(lfw4YhBC>6U-NVN5nWDLatuN%MwBn(2gB z2fIy)WHx$bt3Ges#^~yqh+i$|bEfsF_9EVHI{elk6sLf|#-TWoCYKMy;{BjzrkT1?kV=+7GkLd#2oz&n_Ar_5yf zk=Qd|`BhZMu2D}dG%$yhw9*9#t~SsG>;p3>ub^-^6>a^%wU=Nr6jxZl{b#|r_!}4g zvUU3<*J`Q$xW!mZnY~Hx+(o&bo$g-q>33{#OjX+sE8cc>q~`9sI$qIb_N9-aLW;tr zNaaGzCc6$@*lm`64Y~6}t;R{|geqi<^YptLA?8mm@T15p6S0xivcdl3H(Q99sNA#P z1X`vw;;sZtBbVjhzHHeVMNQjxp(F&B|Ft{+d<8%7`vFv0=tfUU=j+Q;UH^}%w}6VO z?fQpDQ4Bytq)|dba-6VfPLAoWSyF)sL?&jNr&;9-1S@&9x zKElkLbFORe{foUXt5L=aUUz|Q`2dv5`eZHa0@$B91Ljp{-=~f!7}*13zifSboc$V@}pNh^H5IJTlR|d=ZF~q5!0(%NCGqd!Yj}yDf*MDPi z7`48#o9$;pn8>3C0f9URp&e+xyu3Wp{w8=LBEc%Lhg0S4r)49Brt`1D49W_#*2?o? zp13zOI{Iu^!Bp>&KrR(?_tIUsB-Bo{YF{yMZfDR{6A}_~C{lV@h05q(BPHX%Z?T-4 zQe147k$!4AQiVGcE2uJZ0di3chw*19ZMc2W_-F8zmlvhWj(hb`y&4R{tG>Zk#=Ez4 zQaxZ?)jJdiQ~fZnams@?&rK(x&nz)95eE4cfUF3jaX|;nW{Uyg2dsKgQ+e3@SIe_v7MIS(ut(2BW_1TLyCC+%dt3wE@gx zffPn$8jNC}to=a2|K=l%R8yQ>UKTWJ1z%JHK!EwVIcQ@)hK2LIEVWc?emn0!sSy`!2W~b=@9@!Xv!*$wv+KI*n+Ch|OQ(U(8$WK(K$LAR`J|md2cqu5? zV7y|C<%AF&xT}p^Az5LraqKL15WPa^G6}Wmc z?uP2G>&yWn>3~Q)=#uMOFqX3}d#-%;d4 z+8%7L|2c`fTeg2fF#Q1n!}QO_y9#Bh)pO``B3swI_Eig{Jl)b$Q#&VyMlu4?v*2P- zD}7=jUYz>^B%VHSm>x*}*rc2=TlV>Q2^8*7x#TkLjk~N>?2VJbR4(t-^z=j6Ah35( zaF3yge!&+#whZ+-a8InWpW&*?r#}lUjr@)~JD!KrFII>08=Kg42twPO^m;yu9R|22 zc?`5?6IMmeg~q;K`t~c@Z(?8rcXDd#76HL5$Q$+*op_!l*u<2U=rU&tbR4Ai z@oL!`t9H4nw)QD_qLo$9uZiNd z^Cj5o9VXn0ii-!E^sb!QPW|dvuB$y+-CIuIHRP6XZb&m$z?WPM~tx zrK63`C%9=^KtQ}5F0^E7%gs8w$LD_xO@T1D*=jUMUSiQBdGsgv*Br*|!jk=#dwz7l zA&9VSOkE!)-K5@Hh-ghTcC8qFa(4$czmnZRkTn9M3xr#mC(cZ*hUz!@ajk~(W>fxIB%yOduW5c1&577fIu5twX+Ba2<`|vuGiv<1Z(t|a6)t-hdl@_ zg4pgAR-e4RyT1P{{(;ylM z0KUxDvNTYo*L|WM&f7_bK=g3cJap%jgV9X@6cgIp2M5mFt%=MR(;)>PuKunMpFc~< z&xbt#94W+s!CV-m4u_ZgiGrg<7#JArD%0UesOC2Qsy;qgMgUqMcYQGcoo2#yOGe>q z)8^(T&$y!juyLZFeLsD2o()mKPSJ$E(tBc`gXP&~4?xF}j^dUn=}rDBnus1uQbAn- zpFzN+Oe|u)SU2G`9|IYcR76^>u&T-fLZGW96xXCX818(D6Vr zB#mprwgV_aAcwlPFJq(|utFn_r-m8HmBj=Ye4#s!eVSs(svQ22guQhE7|&)I8Lj+B zG&D4VC)+)C+#0H?`%p-VJ$nX+-E=kS&CbpOL0|C9Di7vP!EzyKRX}PMQ(g(MdAJYt z3u)lQFZC6037<@yoenrU<#Cz!KNXAO#CWBrH<_zf8_KMf2SDlG2ZokVxvF+^@Tdv^ zyn#$i5=cenp=1aPlw8==F~+O6K;81qn{8MY^78V;H?LLmnm(L?`Bz{x zC(<^U&z^qTUN&5)1d~*9Z6Pj^9>-uW_>KHhaDI$Az7 z&CU|eQbu1Q1l(cqUgPWxvj?8RQH)6BfKwP`>(0*S`r;C&Ls9uM&U=O3Tq_8Ioy8tw zc;4uaU&nRb7#``SY^^gsfwhd~@*MA*ceG;GQ2iUiMY3aM_T@PmKV=@wd)B(?x4WU9>0hE1x+x7Z( zo4-?2Q;lPtM9mb`n_n37PUm+Bo4Xp;!Bbzj6a-}~2)E7)-#^UMd<4B3id9&|^8F<+ z3;9hBeGH*WM zQE9&tcBWOP{X)(5K(cR^NojUC9ZFl>22Rr=vOvUPn=9n(P{I_uSl3O7n4q-6>6Vrj@O04N zeut1t;kNz4GUM)2@b@kg?9U@uxVUoC(wy%8`);PS(7FTBs_#IX9hwo6l4wr&q;?v2 zNg^KX!oG#tQ(={@lHE{MHN4$3_V@3{UKfll4UdkF=;`Q|s-Dto^qFv~WNWgi=Bs6= z%L+XH2qQoSxOEjv3X^95N!Vq7G~L_hy4>3W+0p?Tbs&BpgPLJBAdd^QA!GLYoXyfRT}rmbPBk=%AS?=e|b2SplpvOft3> zC1|97UYHMOS>EI5cXo87p`~Tmd&P6j^RN_3p|kx)5#U2#|MtEH1*J+^bm`8@hy(B+ zXL}xJVu)*;LjTRR3Q0~$DeQzMiG%GPIVOC%4HpyRmn2m{v^00Xg9m74d7K3n1uoKn za&)mZ-6$ZyW5$4_g?5o?sat>*mS}u1@5!64Dmy78c0uWP_TaQ1ZYP|J`+QYGT>L%~ z$&4VbB}q2V)y$oSaUQmDet`nuy>gz>oRRvVGF<7Af@g;E)LHK|Vk&eyx4E?ECv z6Ipzh33^W5w{HeCzo0Yh`Hgq3t0nW&rfMaaNwPDhT|E+#lI%9DUm^Q3@ZjFzPx5F5 zE%wC4m zA;J?tG=erG^%5KO8L~fMnhOm01Lsz%k~D-!n9mHIU7G;J-ReIPzzSRJ{O&cZay3^Q zrP3M#-`P*uEbbnfNde_f_a(zq+88PaP+!=SHz0i6=aGvQOyl577ft*^@8!1BrviQ` zJ;{Ehc@mQ7cS?;6bU=4H)!qWme^+*GZ;MD5|Tq!xZ zus9cI=g4qVI51qRL`_R8%l4t8W3k}4Y(^iD(%=Ok)x_y~L)R{x{%xK?%t;cI!;qS! zA3uI9`UF7pM#Ljv>@wMm6?Fot()qvGg32l5qy*tGI0}HTj8i{iSsTbfbW|C;1S+fo z~?sc zrvp^tMcWr<$4YH2Eeclc5gb1lP**(p`FMFj5fM9R*=?lo=rt>VN<=+t7Yb;&Vi6`y zXhbNQa5qnUHK64HsJsw@4(u~{L7XlIK+`MCQZ0K4ohS}gx33U7|F<7>8Crw79{huQ zlarISbFD8L6d_HKav1r4Ie0uGhBh$pi_kEotPEeJ1YI)$m0Q8w&y zF?x|oN=nT#Ao=t}b(7Gii!^m7ka{SAv=tmRcQ}uUXfcJU@KE3qmtWw+#>c5qlA?w#qu1Q|C9WEC-#xCl@FV!IU@pgmHV%!|eV0S~~ zmp4RAR<*pR9v;yB2PKvB?s7y>P<%5j99*t4>(=($EXOSWR+(ez_Vcov2*fgaJYDti z*Pi_355qo)TpXQPzNk>=+zUv-*UUK=axNmO3(Gf~nIKtjrYL8isb`iwd&sy2I8_?3 zZ-Gt-sc(XYZ1Daq0_KIHHr1`h!oO6ducyZr{2XRF7$I%3hu;6^<6ul(Xv|It2=EAI z9};%b4lq2y4sR9$vLl@ViGA%_$qSEWLxvrkJN(;l58+fxcX9ELr*dEq`+!5%&{=WF zKA>b-EwTr$fk@qF=<())XatO?R^}@!PSv{TeUsYG6{%lkXdJ{7f2o?E-aW#PcxvN(W|3+egxX|w6sW- zz=?-}X~TBv+`QO7eYxh+V?jDDbb6y#SDs9kM6Wmft`|&Nh_Z7-0|I5E6L^G)dndsj z4i!*Eiy({~O})Ln2?^9EYMjvP8`5Rqt7GS{{1I0ci{~Mc?8S~+TnChVA z)}`{3-I2o&moHtq&y$}YWkNU#t|(fm(EED}buKuC(Dyf+a{Bxk5Mi@MR#w@I3SXgN zhKUF53ZpAgPf6cO2cPqO9ynXHj}@lr{e`n}&VxFF3~eo&orAXS?Z7UHF5vD4Ud}>* zOV@6!I$X*CEM<2$#rFE6N0>$}z1Qe)?{eDdufwR~**U0BqGsg#yf!k@6>{4b;q8d= z&}%+-RO7#SXM>*bPy$5nS(_CMdwXAsF?jPh4gR<1%4Kf0EM+o6LZkBX%}tvaTh8K_ z)4(%9hO5~A%5WL5&wY9;oNL#9#+Inc%s^0BTx76@b0W}7UklDG%f-q6AxtI2ym!uk zZQ0tt3A$?rK9?!i7q5>)i&A{#oMO^-i3V#xrDYnUVLy{d>#eZOtqeTW4RNyFi8NOitRY4#YNMp@W%}AIe8gS;7NN!N2CQd^BPg$t8^SE zFVDt8rX%sCg3^Nz2wuZdl3TafK^;@L&P`;irF3%K=a|VPB`$sy7uR@{27zc(ws9`} z{}*NT4Oj^zb0Ur-DJ*t()xP(^b*;iZu={*ZT1x1C)y$L+*5i-x_}@(N9C-5UxE#); z>79Dbo*ZiH6gbyJ&j-5@wmMlI_9*O-g~9l{{lT$o0s(RLux3mr6+~}fv2XS_=V2UO zCicg*V;|#>`GZUy*gz~}a=Z)=%gM>9s;aW-01R#SnFMAfe5EZqaROWOx7!YjVhXU= zAhSdITkOji02mh)8R-n;eq#*4#|UBhf6*TZsAnQbX>svR-)%E2ES%=HHkdr115zq9 zQY8yPKPfIQuE*|>amWWBZ|?Y@>-bWosyOfBOd$Orw;yg#$cTA}nJvT|P^*jDs600acnQR#$VBM`c@DIFRt z=uMc_D=en^Vc&71fh^(g9`b#8d7aJ~BQli<+t(e2G!sD9tSx&<&` z1yL{@lWPmN5w-Ib7Vh+uxcvvS)4SQ(bd@xaR%#I4w|~gRk_x%Lv@}hyfQKbqDUA0e z-}@%koS?B}6936!8t(Su%JfK4Ax6Uo;qMd_r7q5nyW6)PYcfaMM= z(4)tSk1(*p&W{3;8$x4w36R&oX`;yUZDyMRV%-N<5G$28JQ>uQ$1)UD=~<}2FY=(a z24k6Vk~&TP>FrgFsRhe=fVWQ`#N4K5u-6yPU&A-DIq2Tl*~v&sc3)eW$>#=Pg`EG5 zczp;ZP$^K&Rls~hSg(IvW?)?}f(R>TIu98&7^MUEK$Vn)#{1^uy2gp?ls-2vU#1q0 z#okOAE3>{F^!Veui(kG%;vpuwX|wfyHoCXXY~W)25Z~7?Y4{IIO*lCcjRMID*c}&c znszLH`(Y#8p}jXuR_{~PaAjupZq&vH5x4NXj$nth^mJv}V*Y=L^zlcI%R0xlBfGoV zD*@%Z(ltaar-h0)D0S2-%tq^eI6D^WFd*eb_Dz?iT7s2Wjz+p)Aa*@r_XWC6@{`1twSa)YkEA4Uh>{{D z!pZT$6A$O%ngFg$p|^yB3hHFcf$(3PTiSh4w83fMTE;V}HxqJmJNNspVq;qz+7@wB z)HS99XN=hOMW@|473ayX=ap1jJM8KvQ;raqH4$(%fE`G72 zHa?y5>AqPDYaPFq-Xk<-a@7eX(0@J4-gN=rrkR(6aT9(8{z)qBd!Y410olap>8YiG zKg8f>DcSypXNhW6PPJ*a#?;iti`XZ@&4V5bo;E*D4b=+ScWZ;)y6@@lS`KKA=eQD+ zL|#L*^#I#DtER+7D)an#kS`Jzt5(Hff3Opn=;CNzc3Ct?&dI0u1 zWlJ*GZjz9cfEyYPb~51v@<&!e;x(6(`~_ePqNB-UA`I-jeSF;Dhv^!;!B7DHS~Nq% z`o$z^czJo5Ut_g9+0S*AG@t=yZ3asMsWmf?dG@-^T?x=SAP~G;xAY~QNCW5?8Pjuf zbA456e#ySQFy8$HJ;6Gchnwq$%^3er-M1ve*Ki2_{4tBeh6im7r(NO1Jz_{uK_%d_38GgH@oHHwK!jzsF&<^0DP zLa^&vUtg$xTJw!iQm)&+K26K754bebK`hWlGDXhP9|3>^M7#|#7_c5t?si7gP-KHD zxW2RF2K<}9*lQR!2QTyhmDW~4`{i((TPn~~29OM>l>lP4-`m?SOaYyODC-CiAY`i% z<^^CAKhOmgZ6}5nPN%cmn5u_=rN-l*Mf?8hXkIN^Wo{xR0GdDQE;uoi;Jz0^RXl+%Zu+H zJ~RSw1U3xFTId0&t*w<&3zfGss!E6AUA{DJH)6;?oNFYR{-@uM40M~NeSONDepa?} z*~M@B5_D({<8iDA5jCJOV@Ul3!ne)4-inzjJlHe0SQ zzL@;7FX~uUQBv#M=LXM4W?TOqigUP#s2(3w#56T=!Wc}S(tq*%m3e-;q{Udey$qnE zP>8-gYCG#&ts|oQCwjvaG8T+)S}nbByK$2uHq$NFDi!7_rM!->91fK5PPqxKDp-lO z2e7-ly1;1XQo_n~r1C5DgFql~j1)*_k|gF#={XfwIN%$29xT56aEgH;?W)(bFd|IR zj6#x5&Wk*pSDNc<9LcCeZcr>BMO4~b{!KXV+_o?iYWjE;4^MAj?8%2ut?1)tDV8 zL&I5L11FiSl=JWQ5T@yfX&kaxFruw64>{JGmz1`MkeO>4fTWptLcA`@eE-IvUH3^*+!)kfT5=S^v~S+VOOc?oI}DvDomQUp+d=5a@U$(i}23mBwft!{;Y=V zD%X0W)=nJCkO8wHc08-Jvo%<-oE5Ns4lm4j=SqxBOhD@UaoE5W^8vbla<71Y;hYVD z?^#T1)g!y8S5jn>Jx#eoOY3Xc;(sS7lullLX~5{wTh(6+b5~pVvoz=lnNyE)p{uy* zi~jFR7_$+ZHh;Pj?Qn1Y>Lf^IS?Bn&p%;9(CTtcgF3atj*L}5#KjPFLJ!*c1{7xTh z4dyXw#Uc1Y5^Vde#3YD^Xl=kHw2(=5<(4iBczuCnR#R2I@7-y_IX<2_2H#A#BND|c z?q=8UA|wRIl$)(9$?&pVT(;P|V*9MOF|R|{dLw49^YU?(d+zT$)4#iXS-<`|#*vg1 z>UE7CQc_Z~m-RbI$-q3r;&*6$h{S`8(%d$hShC}M5VFbCEaMU|JEiR=Q&Iw9$VEs2 z;Yna)XD0+|G-%Ffb{vucsD>F@c$D5Zb&OX2L0r!YDz9U$7+V*9feGi;8Jgd}>xK)a z2H7Bqv^9=c`%%5GtQjiGS#?nPd;=SY;0o?1GM=XrzN*EY)in_jr6no9`V&9c&X;n&zGY-&Gq3 zt1*9JmTf$V&3`BzeF?qOv$XbWggjOs4I1lOkbWOOMjd&1`GkeNvc)$a_;l?0C5Upa zgUH>zNShdsik%(%l8TP&i$DjD1H&)T%ZvJkQDL*ZpB;Ks^9pMhV+b2ZHEM0zj zn6>%Nt3H&4TI9^g9ODZ2Omp}woEB!oF*8yzQmxVSI%WH#Z^=$#5)B65Yx(Z1o;SAm zTl0Rc(ikr?+_B3oqux5mN#5F;pWqK`m7bjK3&OgLf^u~8Ts*prsGfqz&BSAyXA4ny z>2$rLp*Q`{5}lZ3;j(de#fw;?yjO$g{5U zMfUR9G}S)o6DII$YCkMD9bee~qhJ=t1THHh%L~0+ZsTU>3llL!Do}P^5Sg9Qsy;#GDm2w~Q)`Mlxn^DFHkvvK*$i2H5iG-d%+wMdpM+hm$!+ z)uNQQOrOtGwX$|gE_$}zFF|7VNC7I38K2qv=XYu%7=*}O$W(h7*ad*FCjYp+04;I zYsE*JRFNg^ZO&RT-rQvFeYbmFnLoPa@$RygSJeBH=%bXsM3_=`%EL&=g46qKFIvN?;47l~hdKzF zTFY7Xd!2fVl0Mu`pV-KCZboc=7Fc}_T<*gj62Ui35V@O$Ow+Eoxj1EM;Q!D}S{E0c zYe6>b?{3Ot0!W2&#vsRwk`}+#TBFnjNTm&parspMseC}Yl^f4XMm4u~f4p+It!za| zL@0w5M=j$GkwVbUqxsi5L&0neu*(1Lac{`CJT+(4RTlL$x6jpDmICgsaIjHNqS(vq&l#W`VfS$3BGLudO05YxJqU2MI6Q7$t^Vj{btWUb4 zNeG;PO87uSa~a6kBT87P#e+n`fy_i zAn6yT>D9&8e_uI2onfYWN@0y;Pn|YepZ@6RhJ(op znBaOIa-7E{s0MUdm3wApebHqrB!q*lzUKa?*Wilq8==fX#KYZv9W%k%DkXEB|X|B+E|x z@ja2V3#+krfh?mjdHM<$S7Bai|AC>7mtWeNxtr@z|6XLPPWh`-{co$7-$>=g;-FF8 zLt8eR(>lvfbZfBbFWYpiV#80W#3xcYuEx%7^i`~b`A1%XBBM+?mT(W@-TB)tnka{p zJxQ4`E&>13@H5z+@ez`;9fjLKkw?x|o%M}P&l)CQNV52SiB0dFYPo9peuB6J+bhwX zX3D;bi?Xt%E|QX@jhEVQxOCf$TkBxL6gHoS_HR~KTs zwnU1@Cq???8sgSmLlE(DKG5*})k9s6R=`=CEB7)etnkM# zMSoJ-^mI~p{#-%+E%-Rk5Uki=&z)1XKJqW*!{6hFsJ%D?PBr5ptMuOg68y%NLs;9< z#jozET{9HrN=nqMvhdgSS9la2On0QE@DoKzh@%!(vNj2bc{{7ydB%%LF3rvQ4i0Jr zU{M*hbG|x;B7a^(o$>3}r!SKh#R>hm&U1Za(&T8xI)X2}34W|U*JeM<(VLropP8uTzo+gLUN2u5xMNQP- z%)!z_sB#=KduH9QcqQ@QilB?)lB%nZc#{;Sq7WF@6L-I_c(x^L>bi8SrkGb>AK;L) z%Jp6Dj+xY&8vR|1pLQ7%S3daoT32!1^mr_UgKAY#RE!q!fOajGR3Jdi&QBD2c=aM$ zT=K-uyQ}S?b^CU2#o#2As3X=<{9f4DnT;?es^8^il23MZxs(2UyZS(G@BADcprqr2 zowD-7{3gjl3%rO3J01cw{Hp8xj#Hwa>yPO$2b8}P5QGISMe(yZWfkxli1VEerPd*z z0Ke)X%QCH(|HqpI1Vx)PJUnb>!9Sj06#;0)1Lm2DOR`-p~A+%=}z~dnQ|5I116HY>^Jfl4i0=-G;O0EB3OlNs&@!tyKWH>Kn5W&D84I=orxQ%yv>3dnxYuJPz zXEp!LQm@3y@Z0zKYo;F{v|0a0%c(7}z;CDuoUgxXd#(>8?BNBM+d}PRTY)5!{^C(M z*o`(3SEu7W98RqhTI~Lo`}z?O>+r^0LnqiI+iD06BI3VUXh1zk4;pE|UwipVWYHV1 z?SGS`?EQtYbgu}13{)xu;$s;flzpaW#_&85xftlu^&8sjn_b^}9%B%q5P@?=|82gb z2k3hHkHZg<=VgSp2^|*)1Dcme7k%>+z0e_5R|Wme@BFt;AS%QP-sDws);BJaglJV7 zYDR=W6n*DFA68Z!*=3Br7t-iU`P6wHPIcSG_V?xPC;V(lcN{GZ-fh3^ihfmqZkY0n6upci z54Ze*(SIx9=qX%0IGY9h`n=aeBjEu|$717wqt-#g7NTfOyI zoNWeIFLk-f8?r6fS^8vg?=G8G5_;Yo8o#ikCltf9B3qKIMEN2!! z|Btr<^>`2czT$6Xx!`Bwt#ACaMJef|VX_Yjf3fF#C-Cs2b40S12DftN1DBGdO` z20%6yM>?2$td(-TE;#9B#s#5Pl0ZFGZRj9js#55v_aJ=(<{-MW89DrJ2b_Xa{I@|v z`@wvPa{)HO|_$bCh$)uort9;ae2&tKBjRmE8GCgQ69^JTo zy9)WKoo30`S$Z=qI#zK%Vfih&D*Y2|V^6mgC$zzzR2T7;h!I)55eoQ>9W=E)yM;W^ zZE%|2UqI}BI=?Tv{ICs_KdlM@UOCiDe+}>6y+9jygVgzOhGANan^I`3(6r^jSq)TI zy_Jlx=pKon7u_zEg+auC$mVd3+wJ>=P4+CK!%>wiEaU~sGc0X z*D&wkrdN{y_hEDHZSA7y4XgQuh3Sz2o3{SB>d9>&d+zUPG9b@3TZb_Iq=8JX)SjjO zz3=l~;mw(W%PU%1G9|)&JlTuOUo>PTN0T`ymccV{fQ!8_3GD|S&_Fhp5Qh#Dz0jpp zrAr8y&$)WK4v;*vBiLTKIHE4)u{m=BJdn;<0w7#jhP%EV)rP)TRi{{}wc z3ho_H>w>C|?Z%m;bjAx!{f-l_XYGb0BXd9L=!baqZY%9Pg>Gu3*09y(* zpVO~jDYU8}0P#s)zOF0LY~a#^p)V`1QNt2Y(OgBi^pE%1tu*k^d&|hLj>6+ONYI!1 z)%pe|+ur&2Ze{Q2S%T3X#D+)eqS~>8MRMeP)BtB6)wIRP%KmN?6Wk8EibGISV-|LG zFdTEj&=q>9VB%$(mN24Petf_2P!2bVf!shYl zzfiq8w1-XN{~jG3%${JHTA7+c5tWogy#GQeq^b3GL`f^==Yer|rwamcbvwYH7-C^HKI=ITlB7Uu9ijly2&;<-((2Xu2FmxBEVTZY zDLmbvh|6daN7p4{p`o8hIO^VFP*XixNw}cV9lm-3JQ{Q@ecvVFB!b`!aVnmKWOMzw zA*Pzz3%%^nlPiZmK)hSa07JzO3hAS@?`Oc?GqEy%wFfnM{{WYmWA#rwULbrWu8IQK zkc(GD#){_e8JgEn-8;ltcQM<13Y;E}Kmm9n1SX_fTmH1Q2&GtT$#JBR>8$0q=d;3l zjRZ%oOitqQ8@*{-=zJfNuCx2&$_>e|-_H_(uRp8X|2a)3oZPPz64`ug{eDxw+2?Y= zRja|BOS&46yK+sz=<@x)bi~2HLXapr7HQx2)Yo^a=E^f^|KUCi^nE-+cfPGn=&Wpl zf;mv2p)zZ<+ND{*SR$`|+T0x$@-{WII5)L8_dW<^K!C3MJli6Q`Zf9(sD#iH%d}I~ zR#4#~dO#x`Kkv(@%S%cHhDPXkMUoD(uGp1bDZN1mQkQK{6XLR(ErqLSoQIlKdpZt* zLI;7DRM0$4jjFCtMnjV`GVJX16T=h5**q=I$l4OIq`aKqu(O?xt>FCW>r^mZHEr3@ zL!)$M%t6|pkIYFaq(95o&t-?}BxUBe_f3%8!YcDJwec8qjU)eHlbnNG!~&%k=ZE)~ zgTCGq>&?EK3zg@1S#dGU*MG7yd;vlD!!(CYMS=gdk${-ky(@9TPG;NdU+mh)V2iT` zXm3ttC*djxbH_@tgl}nD*2FRcs$T6H{iZ`iY$17r6!a}t?c&bbS~m#0mu+J?v}eEw z^MgV>BVSNr=`ser9%ns{EjB|0>RF2RF$0DF;@8UBS~#)r zkRu5gLeVS}cp9F6-p_1bny42UvmfOi#DEtDu!-zvC&R-$sCVXesF8 z9GM&gE3dh;1ys4OR3zAclCJUj+*E1KS@2c@4hf7AdNlJIMRc+vj952FzE7z8C?Cy6 zQTxr;7I>{fHW9ja^TvZ zZThWXWgNxNk6K%Vc47SvSh*@x)oeYh2bHfW3Yf9s0Bc69Aay@C(}?m=y%_t-uvGsE z5c@!m9QAy30pe{35yAZ3wk{Hi2(UY~7Sjhr?|ku>jzKG!5hPqL-~!+)6>!^BY@Z)O zX}dg8GuB@HN#e2J^MT<=8vd&xYIagE7JUXve=zIve}0GC1DP;Ln{eKm zA)}*pp)6A?MLyrYUA^|==n{gA&Teh36a`+C-ikBm`{;$iP@TQPqajJ7u8#Zn0xZ^D zU;G5uAW?wPB2$Z*z-f+0><{U+e(Ujdy=LJUJZrdW`uB3e(JpmkJx|6bsXZP zP)I9StsdW(pvMYU7qDAxJ!~U`ZQ8C?+Uq25qdcF`1!{RCT=!$q50YMZE{?@=*m16# zsEa^hG>6qyDxw&`77`fEtx(d~;ek%KEy@jyjW-VWcLP2~@2J@~psBJNh#n5~%bc?l z`s4Yw$>ykpE^^-JO8e=kO`n~Ol`{APn&Il%gnNn2ZJ3=wS~bfu`=AkMr8q<51>`dl zcK+?!;6TV>U-xbk5dz<6+^mL?Yj>W&5H-9_4eCX*!J$j5fT_+7RmO4xKHkK`v!fuS zK<4E?d-N8dP(OCIl@-7w?7Pej{&?>0JNJ2!au!3>*RMaFTbr;4U?uwbcF&K2$nZ#N zVQq3=N*GJjCV~1NeA-EUfRtMvp}#?1ydwPp8&6wXzdI>vYv`ggG0wGWt7wM($1Ari zy3wnMfz3;lbKRg83?k9X(ZOy^-y9*dF8%&g*E6Nlfj zUQejbc6J4tYu*7@C7PmQxwcdaw{nACu<+tu=y!m#@3gteJ`# zkk~mCEJo=1vkCC&!n2yv)1z3B8gxb-gvg4Q%+rdpvgWzvSsKsK3^Zg(ND0fLj>gS75>sFxW&kX&1#fn=(9_c*Moont&6Ofc8h<6?!wF z!rX;Jn{9d?czp*W#Uva19yb?Z_VC*c6^D^Fwk-l9OPUj}57JeFCG?=kgvX^2`V&n( z=1`FE@kkDBqKo5*Ew26a6pCZT2)U}#U9RcIA}Bkm7IHSv${-zplv=Xyi(g-Ax0BIR zM_ptE*}BgeP&_eLGyH7uS8C-ch4Nh9TG@h#_x0r(vj znB7e!g9mGCwTwmkp~|(Zlx+44(CLHXx7dVp(`d5faf?>1N0qI4CH}M*gCReLt%y?V z^!4i1^Md=KttjTuRZIgBB$5OBo$I0LmvGg<>5TNcI$vs*tbPpYdopIPYTh}Qn>soQ zePX_!%Jn{4=m>V49rwlF&#M(m2khn_&QX&60=RG?Z$c`?S|*C;z(O%)s6?21N5q|Z zTC?h?7P9v{9wy^c2y@bO1~-)1qtZuv=GLnzD%L)6G<-D3PT z3~`^o(KR+AdGxsUmq?`QlZEiE=C*ANQ@hGKkN7HvvlO85XXp8U{%U1bQ7XChvQzw< z(McakxC`}~BgQz9=gPn89TglMd+t|!85Hnp&i`%9_wNCg+~C;M57@bdr^h=ZC7JFL ztmjtWIDn)jczpXdOs)(MHF9KT=%fSyQ+?`BPB;T=baQfg$YpBUAS4l&7V4F@;%@E# zC6ACzo9PdR#%|f8uJL;qDV*aaQbe;xN`@ir7nn=JSK*$FQtZ-Y#^He6y9O% z8-z7A2^u^Y3O`my)1n^D&5{tCEC3bx%_nN4ZJv#d`oVJ`vCXg}f?w)Z8_IXLeN==- zD_5ZE*LV9ZZ6_*6*u_uC_jJxm?L2$i&y@*g4Z8AjQaj5oi-k99QeXkdPUX9P)k)d; z>$g%hIJ9@L1QOm>tWIYWc}7KsIY0ssr^@i{Wj&nJ%SE6PJFEEVKGZxotewh{!qO%! zb%NG|c2@=PDJU{Vp_XBS%mN#!iaFZ4IWv7OfkRrs*dnz(UIJfsrEoGFcmR_;Z189( z&C7Fc@X8_z+jX(ifb#IZIpwuK*uVoYcAJgJ{2np+6~0%xwPegJTtP(deZ~kMHiO=- zr-!NDsYYNkA&iluRe5ZLV8FV;U0$C2=n#X+xTBKbVKbC1k1Gy3Z;+=ZzV_OC3%v8@ z7U=C49|YLsCF*#bD6QB;CU#?mooigVPgqH1UNxb$BM4a=ZL?V5*E4OnX|BINv$8h9 ztv$b^l+l()Zs6wQe*=`d8?)X1UjfB~C1wlMbeR)4$87AOF}iMd40O z4Xk_=Yu>Nkhx?@ePsr91JMts!JNtbB^M2W$iSwqjNJyV+#>My-H^^mkDqqVS+^^Ui zgVLzq%GFkX|K+{>B}9HLIJCsXLkU>yE7zy|2MQy=h1(vA=cr?91Z?#! z8lf3&tZ6Pv0ELX>qB3^55RvK+oAQCM#FS$n`C7p(L;wC|KiDG>gKX^L<~DQwt9N;d zMvPed@W?GFH+!#L-JzGk!OkqG2@fyz6LeN|2>hmy^=xTvARG4 zMLW4>xzg1+UFw3&Ufb)F(;&$DA#Ksee45B>3no%GA?<*qg|^Z1gdsC0%4?vUo&N!> zjcIdN5SV$QmMfUkH&hDJ)725Fey(DXu7RYNPHtB>Ta0b|KcX(Fgv$sKcS_D|XB32U z_4XB9?kxsmqfdk^W^TlqTngNJ?}*v!)&l`zv}ab5gN&M3SM|ctKE&t75p^dDbS#M)S1gY z`S_EKEr|9z&Lu@)|L@5j zs`#s~um1ik&>Mz*e!rI(b1BF$(Jt<|z2mD}gRA2yk*KMMfmv-8HSDZ!SN2NuAQedRKz)82?{YR=P?{7G~|ihz09x}L^aibqd=ii*gO0gR9Eqvw6qY3 ze4?Tz4f-?LY&Q_hi5g)yzz!#N8OH_)J;)L9Z^4N#cQq<$6u-(a>Xfbdd3tf~g1BhP zu+M-Qm@E$2(m#7tZ6Rmp@TVJ>D7FSyYG#0CkoKJdBFlQ@4Jr}5t@xZ%Yz0V(ibemi z(flI}5Z5zfT*XMIDFe_x8!hP>41}jo&?D~OpMBzBprbB@#M4KLSD$}^z?5X)bjGn5{zMKp z*EhK0ve^YS^Km(4;z;C}P;xS11hw6%>&9ja<~VqM;?tzDt?keVp+Hv zyO7Jwz*qrC7rB{v!Tj&oh|q)T5zumA3>R9odjDr@FE~_79(`1Nq`2u&GAJR_+$*Cc zdmJqlSR^l}r6XVJ8bf*aD|K}sCHb1QHP~g%18e?hO}OOr6HZNGFZYtfm_A^amSkW5 zokUZMvlRZ|yZbZlhT{W5Q^JP|X_?u2jAZ{Gdw=~F)f%<|!(#!8Ac~5LfGC295`rKt zARtmocc^rCGi*^okx)unx)~6L4iS~^ZlrT)28MdC#oo{JeSg9G{_@QBI5q=o*4*p9 zua5IV6|^V@)3-JRPcG-92^CM`6#HAq%ZpG1%{85YB*`Obj+kt7lWZ94uSrTD|dPnC zeJIZqLB2?p3&{(j;5xVtavOpdC?46WwE<@c9avi_#y->|JAR^E*f&si@KTYnob!Yx zBn44=mt{HirC6}?oqxoi_-;FYPItQH@-l%!w0uh=d*PbUVY&0d{TIr&wX`hFEUJ$W z$NSsZ+~iU^xzPLuYZ>!Ys(twC<&@n3Z=w&`nhxhQ3uSnoLAAj7^C__o#%Vas+TR~6 zhd+tGmy!Z*u^|+sWtCeTroBig)wrjo_h{f#VDIpR3ZCH=;91$aZtaD0E2;E?P$}^6 zC>WSRJ+2Xa=rRt`$i^rLuRus^;fW%MuQ=Tc*#a^j%fcksZ15W^i!OE_FhA|y{eCB& zQsn?KK0-#fLjGD`8Pu9KBt0kjo6z3m<|z`Beahz(WHjh8b;TB)m#5}G5#~Rg9);Nc z_|KaL#!PY8)n`!abL+Kz+HypuUT9b#+jsAtWc&Phvv2O?SW!l$+cdWX!S@ z72M60mHmT*y*XKc8ue5@d7BqZHIgmM%Ul$D@7t&LIn2?_NZ|_<%ng_M6+b%BnRbTW zu=zo*-7gX-=_s1!mzAdc`}gHP@ykyxv-J#9p4Y!o#Bzs1%xC0YR)LbhVfk0~YHLBF z=4cbWnK!4P3LDC=BUzQNSu@8hgICJg`9m`)$+t;{!zN*6t8z&rH0-0p!rAD=gIqb3 z3NzS-rJ;;0OuC3feF057gq?GM_#pf>1>N^OG!IeZqq-s7$a6>O&zt@xV za4!+!qvjD;I7ADKZPwv&`bxX|65g**pJrk@u{$yT%j*~q^;M4X=||Euq*P~wg-`5~ zNxYAU;34I_;M-#e#Y3-s|JeuA;m_Ijirr?uFbg0A4+Fy!}?Z$=@;<2m({LX2(+Pjbk0h4m!WE@fRr;h>< zf&^JUH}n0!s2^QyJ@in$j;Z6|U<6Bwpx;ml+h>j6V9(P~)KJk8qdctfe7UDKao8QP zpFvK`HwkGuM#EB3UkLqxDGn3<;y=`$aC}85@5U1Z95Ahl?uld)`R?6^w{Hix7;b;~K-t_D@TI8e&K>sj7_6CtBpq(VLNB4KL*eg| zcGaXp!un`bzMbH;qb{ROPf66>(Ir5+AsJU(y zxBLwZxIa+W>MbVW{`I@Ot0&ZYLS7b;O<;?Za95!Y>s#MOD0~(9-#RsUOGTk6A0OeD z&#k}2YU%>d z-&&*LwIOfZ)wgEF_>w>Y4GC#x z84VB5>eisc$skb@Vd2klR?f$jOJco=p{h{ScaYr9juMAuAg->y$*X=(%eZ5wwB*A( zQnF7kwTj!+lpAW+*2V9y#~#a-v&GewBW0k?ibxULX{V%PxrPBt1JC4VD?kwHKvgIU z>_*THa4xF7Z}Mwiyy)ARYUM8k#pWOieQ1yJJH-JhdwWkrk+Q#|s54lGMJmb=-4+r$ zD)lkZ3sbs`{pjYvsPmYr~PP zPQF&2M`|aD{R+4sRx5P4SBO-&LnwPikHdp+?v|b((XMc@Lv5|7FI(lF1jt4(cHX=h z6()9E7!pB5C5Vin>zT64mr!l)T+K!f51x`j4(I&&n>T-+mEGAnEi8PV@>StLlk(S= zox8W1h=RU6MQw@eG^<5^24DnKGnrl9@!ZpLRjnemkDj6)ujhIG9@=D?4hvJ?iqZtv z%1hjNUuYsM`4B+)Q9t;=p58o$xD^$koRgVORUP&KeqTiHM)Mn_ro+$L(H_XMD_h!! zzRGC^zM#z=RAk+{K0EmZuZPh24;Q3m<$R`HOA>-d`3yxEsIc!q#aL;$*o*JG?OHO^ zNX6BYr+*9phKiC)p&H2x>%vDtyT(xCzGZ*yVAaiK%-N-_9;|XbDp-Qkt8u@1lL4E2GtyQGtH8ivWQQka`olKe?66d*2vl( zE;a$cDab%BKK?_e&3SE$!3^zddj5=Wp}~0Qwjflh77tAQc)u^d?1Z66xL@T4kMOcR zdPUssXLk+^?_;!Oz?05>=-tPwZ}@9uctAB)+0^3f4f&V1*ho%ty@w$m2Q6nT-T`W* z$~@%zGOe_NX(0`uRCc|O-Mh+!>_Co)0aE4bIs>^tBgqEgM+g|G9~P4~L2`0o+C0dZ zQ9TjweY%j?*ZZD!WNlZj`AeXU48)UYi-lqo-Hs z<#7z7pNUW^mLU93h{ck{76hrgLxBmD<=D)N>-G^7m4S1ESFm9+eYbB?!8UtW525I6jZEP4$YR-@=(7oKW`~ z7x2P{^=IEbNxVu+>@xN7f1WGs>;>!#&vgAc>da$5v?gr^CDo)(qV#$vtL)=zPTbqq zLFlIO3%~6tUC%_v`?)SewIZ?iA0__(LHu<|(28C}%FFJf^zQDcUItD!7X7NT>n>>G^A(JNvN)7~yjQbJc%kJfupyqpjh$E(`(VxF-^PeL~bXWaIjrRA2E8AbV`% zF)vp=DfsNL^ic3fNX>@<1;|fMe!@Czu4LAXWPV6 z?^&EBp+64v_Fnis`8lYaL#5G8eF+NferOsFE^Gvl)vv7n1EnWTP zGtUFF8>MG>%E%9_sInrr&QDiNlDC*opd#O8Mejk}T~(#m8Sf}Rcq!G9H`f9Xd;u0A zy`=@Pq-n`ryRff_u>h97`{JNx3VyYL=Onq{@RIy2ihF)1t^PI@tN)p6lf#4Bh~Q=C zbg7GMCDh_up6EAx3ejSyZXk-_U;!J{~m1 zu3zOn`6KL|KPhAVebZj=V$o*YBdPU>} zT_|}fdP{cI1z92gs_g1c&0O5vGJ2(&7`l=%jt;pJ!26m`?$-rO{+dx!BD~+^bsJ(b zTMWE{Wr$6L7!x4T9yUjP0kVpGI<}1Og<83Egx;a=n6p?oB0dPE$B5mR$X?=M53;=@ z=?aI?-G%ybIW$b$)#8=tO47S^-GOgN)x%eR0TSD_KFO#>S%e&988-4nIvO6s*~)ah zlUUo@qUEalL8Iv+X(zn1atm2?6u#bvZi=S4dsG-kepoFtYCJkwcpLUiMXr+KXMa2W zMjm5n<~7v4>iT+m%gz>vTEZh`F&szbo`c(}SFsWGzxKXy3S`B%df=M3{$;#0EMoNS z&aY|%9u}jQbIt`AdgM4dp!90shf1ejVj{HC;IhtGwv+JZSYROOqL^b|{vczlcRd7n zSUy0>qgX>~f5-I+h~Y%p_P;{tuQOuPrxuXUy(5 z#Y#40r=+Ct!9+yPZi3cv1K1zaC8ebj;^J5L+VNCXgWG?ikdm)Xc%&itc~p0=WlTOX zApzi8x^vF=q!lbGjK|u~@p?lkFEL1hovr?&gJwkR#$36?mG+@m7C|cJVg}<58tXx@ z4F2L7{-FcNcJdJqevbuKR&5W@PqC;|UqH#&a4oENHv(GWV*o+_JqqPc*YuMwY^6wh z-#Bri98vL>Z;6JZGvq3pAF(W*!*-|>TML*jH1A(K5IUCzV`9`W@GtGcCd%L-YTLR8* zc*Ew`i}|_$RxOANFP9D5<*;J>b7hBS?1FHa!`v*w;UB@Y_N}PD%3)SmwTY|jAQcA3 zzVFv^3z$V-e4zhd_Lv8%=W8#C>YYoGkHal2Xb#HbaRj~fQnJyDajG!AB$p3<7%jJ~ z8PTTea`eJrmJ3WeyI3hzn)}=J_dk{ZEV7qK5Nh#L!53eOo2!Yf2+MjAePOz^+>@(8fOCKnSX;0z0HRY(U3+42HmHy#Ky$j?835CuM{W(UQ2eiR-kWW2pNO?y`r&Gb zy3xQEzC60V`gf!C)Ade(D=}yc#sco!daOZ{(%u2m+Dj zKb32Z6|%x2Z1~rBemdpQ?vEB!AYOXJ&kOurZeKD!uo;(`fi@+$p!CoHd&3Dp1~B#r zu*WJ3Up@QSijH(2gDGfp9oY-oPQ%HL^{8(F4IdjojB;ZJpTD&CDuxYMZxxnDoxXLj zymT%syC1=xm3cw(gLSw&Wi##DKfr^_rkxsybT#&!RV@S{T86h@`CqxWJ+De(^8!JO za;Pi%OG0m}z|DE_!t@$|2VQ?<{jH_O6;S5^%v~Nm+M->Sru_iL^$xg5j!xL+!SXM_ zpU+w!JAX?;CQe9u>5G2%q{;oa6u=~bXRakVii1T2+zVS$JO%cRm=fJd=C{VlTjJ0H ztq9W61@G7jEjw?&7TMPjujZMmp5*5Me04y@tpKQFB4)HNI$5H^fi?!tcs1F$n6SyWptNADCSb^QtlZZUv@ zIMcY3S;Graj?a!IIaxWWyg8{?3+o!^UXmFK#!b!!Az4W9`PjQPmq)Zt+fIY9iByV4 z_N5y_iDyVWR9st@*4Z%tj0^C>Fttq?y2*cp6#&Zq-MbA4%ZchOg-~j^Q_x9+9if-o&x4O8s-NIA8fx;SC2sWbn_Yv zK}SH<0a9<&13wHFC-n;23Z2VyTao1N;%u&m$A-3r26SAb)WUQ}cX`V9<0%p{O8;a_ zORBsB<4hPs?5b6E&{#ypnvFKd$q(kE%sYcj!AO&fyb7&W^yXU6k7`p-?1sd}RRQ)U zKqrYJJ6I~PJ?0G>a(!!>_Qux?kcyxOJDvqW%}wJgZ`6;CqdJrZ$vyiH?%eH>nMzYX!*K#>P%lED{Jd?J(v7Lj+!+=~c@v z_k9IBg(=mDlpcJlB23`4rZcMfiGhRc%CC3HRII1zfBqrL*5yz|L=^iysBYhMZ+wPe zt4GEFJ|G|?F9R4G47Nk=EnC~%oQ*y_fScn-(N!S-Ri@(Z1EV{D?G4a5-!CwD8Hz{# zlyr?PWNC4&965!09EL7*7Jc#wbktz4cqadG1q4Aa_oh1U4#+~L-?g^#f{}j!D^e36 zF<-oh?>5D1WN#!TB#eXSgUkaU>jhOsMFnNQiS5QoOsuS|0EoQ&uXQTNkZN?qb>N?N zP3DaO$9E!tEZ}gJ#(wmzZ>Xmd{oep8V*6&jFaIH-=Y>Z&4=hGN-UgTO>Nd%@h|H`U zmmAim@Hc|$v_{Psv^Rtl;tU8b zVLm5OkF_DnGZl5}RbpzRQ&ZZ}jkLdvze*Mk2E$1HyclTAw7Y=BVB&bjNYcjv(}a!1 zaH1ZITUG3S*eJ6(kf#3nlNj|ar{lZyTEc1QVm)ffNYFf@&flPqb_vFFE-G?D!Go}- zG;!XY4BN^xDC6muiB<>Cva)g_Ksg}BbtLv}#oXGxti1NlZq^xYWNR`L^VyR&B$3({ zSt%VN1riR}r>380P!C`!OAD4G{F<(RFK{rxa{Q*>*0RO|pX7SJ>z38*82{r>ZQ2XR zdJK~4VPV=DMK5!dWpBWp+OC5WDC6t>MfxT${$Z3GiF&wU43hg4>?= zQ+^R~5Y^I?w)+aSZ8pZ?laV^g%CZK^{(4uRqSA)JiMI`OfaeD#nht07?JD#AvJIst zpZ0od`GoTjr!gx8+IWE6JM5;GlHmrqTu0_i?J01$&N85`Uk*}YSy(d(tEqp*2uH-` z=9C)%YMX&R3hH8cf$dJIOGd)MdXUI1!qZof``A_h~@CkRi=diR~ zvWlTHvePS-nX+c|i1-;v$4tHht;o){BUVQ762dJyk;2MLA3y0s1CkdRbbR1ovpd#Z zqGkzaQX2}(~G5M3>B zV*E(l!4P~2hTpZN;Dif?AIt(TeXG8JPm+%-(jcf!^(1$0Ika=HY7?QtNjUz2 z;V^naK+9~OBF#1F1|s}m0k43-WaFw_^}AlXyhB_9Y?vq{PidhVJ#eX;dI5ODyiar4 z^XC3lxuGalJPy|J9E4+}3!4R)Lt-H3Oaj4BfsQ@)vLC2r;6PCNkV8Ha4#co0u*p-X z4iGP}D_9m&l0V2T2g}2`8E_SwWRdqrDBzIVxWLGnt}3AC71SKqyCK94$)BYJ)Q?!^ z+DP!%Ur$XQsSIfRGAn|(mQ9pn!?8(_0^Ed>{r}||GM>A}dfkD-G(g%E;~i5UL&PNa z2_vKF-VzXxf?R_%NL1gQ%zTI_Huo#WUEw#KJ+c-6Vm@DvhO5I$V~#bbm*@5f1<(7w zX?z`c;(73Gp!0Og4)Q5}mKE(17nY~7BLp)|M%QupzSxGL=rM`SobB%_M;dF}ayPH# z6Uba4l-_+%M9v2tWT394qDT`NtGLj2k3rXe1bRuvV`11s)c01&sbnfo@S&a~YP{!; zVe=Mz_p?@rWrB(w*ia+w0lC)a4Y5JUd4}>pW*7(r!ASrt@8HW`pTPBqZ7|tqv7>4} ztqL<3jCBmaN=(=>?*jzelSudvdZA34uON!W%RX`acr`#OgW zLr4Q9qE^3Eg3ZxL`q&lksEcpV8jNW;jWQoTIw#q19xy@-T|l-`BtQ%xdtMbBNvw=1v<9@+({r^>zLWUw`Urqw(0GR7iXHv?A5Ohti6?36_A2#Dic{P{iml^mo8*sQ`LJ^u+ps%x0I+A#i}`+GvN6 z(kh6pHTSO9LiNy$804ABgp&>Dj;JF>nTOl9p9F<`gnsZs@fH%k&;`jUC)cWiqyGTn zNJUzX?#OBb=g$J^as{6AdpZ<+KuC!JhQ9wP@}B%;aCEE%K7Z?&mq(lWckTb&4|`-_ zQ}RP*Wask3qGM_DUCV6i$wV%Iah@du;lHAT{SVPX&;GUREy+0_9ebR=Iq>H#@JEnm zHxo`O4w{vntnlzy(f#VjkimPs7o+d@$kWyQIR#%lbXK?&u~=@v9)Ltp@_Ym&2$s7X zBH|v~g@*>mLCj9rBl_XgD#&BQ?fXQ~_z7kU<;|Cqax78|puwho%8%nW@_5fVAfcPF z6rhHpk^Lu;G0L&H6!2_*j%=*ti8OEJ!^c)~q4zp0Y%Cq!fKS6J7B+y@JKN8&ROy4N6x9M6Sec~HaB zCrAO@46%GDUP8pG?eDQ6BdAHZCmCBNMLVnjfuF}?O7u_ur7&<2AQXlA#<C*`%$@-<%f^?l3D3w&?@ zLh0-0mgr32Ed-IvvjF5f(0fuK3e^v?3*tOXwz>{r37hQ6f^2|DE-vO2s_`@Kn#|Cy z`ZL!;+mg3^zdMZ(2{)fb4w4G>2Sk$m-fVtWw0q1x+!usB^qhe5R|AU)?g7%aON_U{ zA^jZ%{1wKiMeh8PMWH6(02Mpz6gr@=$udl0u*uCtY<$F?{uemUZ^_S%ji{s8Z<$o5 z#JCA}Ud@4sFvOPy7waWUPCcPRzSaS}AE}|>obpzsBWoK(1t~okAQhX-^0Q;m@`_MS zUGRgstj#+ht7#tyKxps=k4TsE{iufa3SeOWu?sWVY<7p) zVOli^ndEmDl28gI&)#qi4gE_!C1!0${ukF1IA7qeF1sl3UI7*QfCGpRf-}0{GT5sj zXZ7(oq#pY~j##njvGrCfIAlKI~1AgF{wOHg!(ED1v{UI*plWd zr()Kg2*G!IGu=OXcExd_BOOZmhRf`sp6^4H@i8ttkfa~ngju$~7Z)skY4Nf>sx4r) zY-*@?vb7&eESu9v*9Ow!mb!L~IRZG3n66lUeu6oWxxzXfYHybU+B>EJlO6`mk!FYi%mA}HU=rM>yN=lu}&`Ey3; zUiRnc*ciwKZaxp#IF)QG_a2nQI9^Y|Xedd5MGm;#tIf#FEfMT@FhZz`St(By7agu}u>`Wn9T=%K#?W30?CwNMpul-&w zGH~8>4Ad5YLcpj%+_X5TGp}SwWlpB_HAxNMds}y8*f0zmD(_EpH)KR+%tr!^KdpTl zJoxpi7G#K1W;TC#D14sWa8kP<_-HKXhCZkSG)g&E@VaEV~-_Y|iFgpI{wUvlqF8uH7 z-wApD^S0?|EB<@u#d|bQ;=gz72-?Q~zNP`(;eX%!h(5^GPQdcO5MexY7JfQv@f>mS&QR zzQe+z2=OZ~ePpN)GS2{}*KN!};RRSuY#Uzz66&E3?UAg=y^V~CwH|vcF8&S)gvd;d zk3X=TtOK09BRt}*Kw3&v-*tW*$B{r9gsFLz5V>y*XG4+`-{@gb1H0p5UoEYh3`Ft) zNe#JsO20wiku*~)1GMu%AwpVlu^RGc|0x{qU@};q{2El(A(zu3ms|F!|93eyhTjPW zx=KP84K>XL=uZF{o~aEYbIS`^23^_Qzsq3RgbA@Q0g#!6MH;!!G!JCUcod## z*QaM^i`{qDVD3kzHX906_!fD@Qp1i{M`F8Y3Ypeh>0a`A*9BcsGneXk;=l>-ZK`?( zl{Tm?>|uisti^3(GYU@tFnu_iuH}N_*<+}-*RveQyae->X*+4)j8qZrydLEJIbi6&VHIl?O!rYX}aDILrhm}kBHL#Tj+0`dtI40yVgn=BeERY%HMxM~^ z<)|PdqJQ+n9dSbuVPS?Vk~|uP*&UwEgs}*noBx+KH<$YJXV#_~3A<+3iSrXqqx#|&EJ}MHk@t+Ech(G|7L-n)+D^hTJMvkycb|4M`qzGgSoBcgDz9RbGs{v-s zD?)at>JaFniT;a_eah2i0;Q0jzIJAd@p{UZNvSXz@_Ai;Go7Ky&(KJu{u9|6P|3L!Gm-WYC9LZoq|t#n=#`2g70TSx874JPb0@dZTF|zW*NCCOZ|)!1Ql`p4cICG;FFX z38q{)>6qEYLG>9b(zkkahv<+G1nJrKSI4~!TT=dI$j7oo!kqg(&yX53FUFTEPLWa=;M7Lr}>}Ud3d!l zv$rjk1?2@cKL4l+ZXUR#16d@e(Ml!d|L?MLD?i$YC@wl~4^?38FbRZ0c%%Lkq7RYG zF?z=TayEj|)b`};K?~1zFPy5OFS1W|+{6Eh0H9#x^?#Nd>i=B>_&>!A;bEr(zzY)7 zoZI>QhJ|C*s0%V1AWuzS-XfgcGG!BFD|2@k7+r^78(*Y5mpau+c*I+9K~$-GZw*)d z<=i=4rLsF}{71I?kF%2~1(Npt*g}6}#CrE?R&+cQ5_3u|P;%G7dA52Br;W3;Gm@UF zYR4{AY3G!xD6coZYdqZD^J9sNDb8|CR*5%KC1&=zC<@;(KiZgFc-D3=w`Je0goK{e z1hYiQT69~&@=@bgtzGxx-Jni+$r&F*6Qe23pN#hXvW&sO74|Pe2dGeFN29{MTsun+ zR}-`R4rQ33Uzk7sy1dI0!K&;)Q3Yeh?^G^1`@?OZz_LFV31FjoUNz_ID14!Rd#6;5 za)b6xjd6&yN+cvfTD7<7> zY@%7_a5Oe@>8vX9TNU|XSL=3?P~1?G*s**_VL|MMh0T1(x_bTaB)1PZ$bjX>J=dPo zuuCUc_9#vJ^L|>!PO_O#RO?`YCGXtl9OnMfDBICAil4PLQsg(1!&STYvbIhmo)8%W zF%+-|wrt|;XU`5M3?8TdHMB}A(IM%b#DRwGx_(%ClXW%vLq)z}BP_+Xv+l+ioWXXZ zVl3l|BCJF3>DyF~&>`MRpcsE=ghQJ@mYmXI z@=*bzFyGF7xR-2ZVXb0oKvJD5^^iVC)qEVUzXh^fP0B`Mwh&Bi5mtQm9>4tU(yCeAzWVSl`ib5uzfKb`LS(6%(qbanbT-8m*~>mN@z$7^})ogue$J(gKwq}0h; z(n%M7{m_R`{mgzB*VKfr>K**qO`8hIPKlXZH!h1k*Akd} z_%<9_Wt%|rwN9}TgsnNO;(*D!qU3t7aKAm8p!h8Su1*Oo#o#@bCa+4O)5<-e2n{)( z2>Y%J1~RS%3;UK!e8mNyjqFR{fE$&2TRhic91=Qp+&{C>G}a|yDM|2ow59x8Sn=8wBN$#^ zSfNOp$gJHhJz%ygd|0U?-eZV!K1U~?nv-LZ+>mMNS)%2zWUqgz<}nako1QIkb$xuO zt zgl^Ixgqb^quf)XcEh{LmOZQJ@$_@$o>?6QlD6dod%muqilBLf)zQR$|?L^pCWhL4z z1hixW&tBo1js05GdS&r?j!RGG;Zn`PYr8)#O;=~)^A&ee2%tu`H*MRQ#Yoyy92#?I z1{^{7HFBbg;>v@5RG?aE?H9hoa|>ik3`-a%Z( zoyPkR9<ph63_Ha*`!TFERS{nl_ul=8zWuIk~BjE=q_LHFcr zrD8n7K>afDY|iD;>3A$>@{vA22)?A9^}&7r?dpWtu2l!JUhh|+Yih}tu7^9fY?Q0_ ze<%Y|)jni4TdBy-J<;?V@*XJCwZ-upP7nC^g#u@zS~p`J{A|{nQ5HME6hd3C&t_^jin(CF(LvLM>0?L1_&xVaNrqitv%xLL3QM$xsAMirmvdh#>9 z1wJXrT~8i&IQ}#l2qf;@LUNBVBE8ICm_Lc*IapNGM5Qx$Jk428%&G*Hq_!Jgi5*s8 zUY%WpL~<3MaKix*+o0dR%v?)>$*^s%>Ms7}ak%w z_%sOgD!>^XVn*nwp8Lv81&%ZXl@i1p05T+{Ke*^Rc;^q@T}+g%Y8J&y;PI)xXCJsa z1GuU~S^^0}GsX=(q*g+`z1ut&2?V??hu|2OLb=df++wUNV-I|#acqykLh|8@t%1!^ z(}f!(^l(u+=VJv|ApV&e*Q+I;<)$awOt*q@5<RCPeVFa)s&(Xs zv8utje0VrepjRSF?r47_%7{I@87vG%Z8^iM(V`3 zaHhiwS7k%BD_v)|4d!H$W8budWO!C+0cECJNL-dy)ugt<4}PxzGSRg6AOl?*BV^d3 zp53w*rclIVKPI@=ZPr;jmBk?#>$V;;b%yh0^V0syc)Zmcp1K(OiJIH@Gc;4n z0DC+0LKoFhG(6I`!0Y1^qtCyWvAx{xY6T1V4`K${#fXO!JNcFW=z&sIVMUcJv=yZ^ z)vo>I#Ipov41~Gu89kGFwuR5*B-9D&gK3hIYB0UoQ@M_go4Soxin*>)K}Gqk$;^+n z=c_Fa#GuSon+3xq5Xx?Dhc5e6v8)+<+DgfGE@H$Zw>+d1Mi&$J`DTYMn5|@ZI&Y8r z_^<(e?OQZhaHf-QnOcqGNo`R(VpE${RlCV+xBoL=TJC5HZ}Qax`R9t)R^|9g3mC14 zVY)>6ioeC}@eb9;cf(D;I>#mK>FXH%CcyJ0g*lXpro6Q_2*>mkJ|8t=VnsWSuZ7L* zGJ99l(v|jx$PYK9Ng0uerl;QUpMybdosz?f(VIusSG;aTTaDwG4sr&1(6DN zDkSz2IgncG>5OYuB+%@_!U{c=vW8x5<+KP!a*KIrO*O+IQ)fNNVVYjzY%tL&cTMAz zZvpEJ%*YV85RY6~!QhI>XXd3nmvWy6B}1EW4{p^%yDyuk>tfGGX0XJgf#|&@=kH0Y z+vokMQx?=#!j3_1MkbaMjPp*+p9rfAs=ZNW?@`IDq1EK?Q(${PTZi^Z)DLnHnI$@b z#6$BsCqxyT|JDNks^GS5!G61yI91uJ)2Xh=UWa*AA*XNkN8CG+x(a@E7$o436jdz| z;Gt{-_LLhZtK6}rTyYLwpin>SyO1sA8*>4k0ee`vg#+9+#YVv^C_JGWJDj#1hl z`r0#ZUz#=#{0~Rdq84l<5U>yC8`zJHQKp;f_N998`)k(&c0l7#E-;0h;;Bfb#U%`7 zW$gUkSgn0dGl8Ue+LAzPa&+9@g^!vfT53)|NAvak`3gHRl_lcPqDz2J1IxcUVde%))peqp)}*=8`Ofi>~TdXYbqxV)x73u`RKKnadszyK0pyUkb$%-RzUx9p*kU z4+iz`a_!pZFXqaqr$TaRV`x6fj3y~*j;dLfjJb!}J~sp-#BgsJ;v=ZL3N0P_I@o5{ z3@gx&4Xt_j0!;1k$vE2G>biL%0^OHT&R=(>6urK#1*H0AG_dN13gpBzx^4m2w)iW6=GEe(>4RKL=e;K9 zy*`&kqd%S%Zxfm(jAD11C^MdoL_4P!WNFBw${(Jo$mLnCstJ79hZ&nO^1 z_b7&b`CUUv)0v=#UK}qt5U?Kkc`BtHLiOEY`j;e6z5N%{}Grw)N=emOC%TbN-}^ zt;OB`&1qSZV%jw-TM#pMIN0tfxY4>h@aF34opSZ|R!H)H%D|vkCQP0hiN_ePAd50$ z8Sd`8sCAD$?0k^N3{fZg9C>ZiQopK(E&f?HjYo5;t2f!tQ>>%T^xcLJQ%E=q$bb7z zZJra_FPkQywSJGo;cUJ64;aFXpha!Ciz8AXpVy%>0u|yRBp_*Cj;}X`i|z zbC_ei&*t(IX=;&)y>sib*a;;u#aXRydI?u;#Dku5a;JQ5Sse(sX!pa7*u8jA)wN=v zt&rYjhrvp?sLX&Vw)*E`Xgm$;?yu;vnp9oWfNjTs7y)W`HK(0fZ_~hoxy3Xq?hXFP zET@3us2{~xC-Krf@*Nzdm1KYRj*`{N-}z*!(ko9E=2n%cZ%?15+4&fGrKkOhE>_*2 z`KK1O1^v#K@vFU<>`&i~ET#NSY3BA1MvM3o16cYKhRPi_&pwf19i^+({Jp1NbV)@y z!=OB}O0Z_nJ{Omq#X9!RlaD)JiXggt}KL2UEpR zY;gElj@07fb?-OFP+wV9Sg$#z4m)gypG-Aurmr62$krFzeA;NGRpt1%lkH(3LE3-p z^^@EAh60|4rFSrz0xZ4dk9)IqDBK^V(Er6$y6`<3ZQoM}PC1SWYLp$_9(^?+Jbp6j zmY~yA!VVncyN*bup@oa@uk-g)YN_8CzjqH=hmOY9hmT(}PTh{5zI~1@pDA@9C-$FN zyM#sIj;jJod1)DUN-Q_uU>h5as9Nia?(ud_-S*TqOv(88tiLRJbuYVLSxjr3ZYeK{ckFt{?Y*xlwfafz^1!_){za#guOj)F zFGfakT|#;NdT5?j>ACcCV7`Q^_{!g6*j~*u^myYs!oYWaE1iPH4wq9l_XX>_9r3ZG zmCtcosjE|33%Unee-9s}gpUk7*R0CASFC1Od3x)o$G$g}b<&45C*5q)OFI=zx0(%Q zDw(}n8KFLD8aMd;W}nwX?1lhd!^~O+k98oOH)~92^}ZHv5I$|C3kA~c%E}?`6I(1d zb4b6QgX>>yG&%G^O;>Zve-PtqP&|`#b(D}oX*yGwDLS(fKP<^uHF*2- zyH&)E-}wf7_7loBv}u5!tZpQIeB;yiQG=URPrH`J8B6FoK?Q8c>HQvSnp@4%scT81B|ap{ansVfn1U>Ojz z>ZkyX#+qKOcIHdnx!fBWWZ7Bjis~B5FD|&2TtPN#eQPQ{8k?RxGnSXbbJ!PpHQwLB zv)AOMnV5y~ogbu_q9s%FXQixv0E!| z`eRR`pn8?Tf%aa<6yf5jVAU}Bp~iO!JwA?f^)sjked5#gpDKY94T*vV9ciDIva%}a zxC4B_4~9pM-VUX-|Ct(wpL%mbqX!i8#UTE0r5T)6ETIMe$ zi7y$e*|Ga$Z8qnS=_C8StDBQzxgSXEj%lVaZB$>c@Yh8T)%btD&=Gx4pjTy$pmN_n z*LuiYZ7LaRj>V8Khnn7%rsh@(#OsxC!TE@1yHtAWyY=Q?aRv%}y#Q)&Evks^oaOn) za#wc_%piU3+gRzu=Y5V1#C8%(Lo!funEW~F&-;^B@~?4Q`>7+RQ=C9+ReH=cWD%P$ zU+*z`|K2T7J`JnmY-6)w%vsRZ$iE!pzq$xd`{?cWi29;p)4r_S-9Y>45#iSSoY$Ei zgyiYUzw|g7p8Ej?_Y5twQ-k+dKYqta7^7zAlMP-32#91xboGY43+piF?_~3eet?9> zWX^-~Zmp=PPRY^3U{}V+2?I|nRBl$<>|Uzo&MK*JN~(AF+?mf^$_#62cRJ3 zKP$qg;xHgI9<{a*NOB|fsM=BSS9E`3ApW|O9(O%=PWsu+yud@kuc!QndnJ_d_)XO~ z-RKg{JCm8&vDA$eV?_by{J1f?W$X?4h1^|X%`p+4eM2K|x#=vG>tLXLE+61Ix?;Jo8?SMc8v67_2n$hO4ix|Fj1A=Geuvx9#J)agu~7ZJdpBNL zSmM&{AleJ%97Rda5;!TvBJQsDj8RwX5o@N6_bQfWuME>8F2z%u^q=@z$rU^HdL+lH z;SmzM{dzOw@~7A|!F4t!?{k=J&7kcLD)o1wzkpxustOm1&I4XsOPLjNW?J3_S4G-MK za%W>&&WrT`0bD5duKoV0V?o$SXq&y(BU`*&ggBw^}pxYY>wj#ZedIhol-reQmcwwx)|J%PtG4kR1N zSr<6ExCC6W1QJYjXW*Pn2~pRMR* ztwE1cnCD#BZ*_dRRpZB_?HF(~?(-8nOPC^$L&R7cPmtkI>lwJgb;j5{v# z|Hx-+JP>Z+sC^AYZkjnmR8(%e(sk*f9S9J~J*x|0Ta!PV%U8r8Kdvy)vO&z>y}PWo zv*CxncBbGOw~}s^qy9|4-MPID>lJ3u0fY*+-sHhuAOuiiQ@RJK(s4WC!4ddt+~#RI zW#xMx8>h)Co>>#xPAh=kiEOb)>|!wy^j_fd`;lG{^zrzSZQDkm^EdhBIT{ZR(YWI2 z-}@HwRbAIzL&1t@J@9D&7VK7OrQut2$fSBrMoMR4o>5kQ&)K$zm5q*X5t!Y>_)9l% zg=S8AsjIk4n#a-tojX|mAA~ZY0RD@2ZRs2Jx;EcZA169lUE1E>FpnKJR(ajpt<7@* z4?mn0n);JTrBm{H#BX;n&Mg~@O|+WL*o9+$7qByFm5=$ok~w;~^CwdnM|D|8Yj1mh zfp)}$3%V1O#p~e-7Q({M(O6&a{&Pn@gXWo?*k4^Rmdq9|YiSmJ1I_)!Y&kDjaBlEw zFDn~c98YG-cP`fxeO(=ti}seg=CYe_`bW|8F&km;$-1L@&;p;WemM=mj)2@G56d(0 z*!(EI-Z8uoZmI6RKhQ9yugmLv*SWD{82{O7wB$j$U6gP) z&eVJ!M^n6%|1iQrPS(H#{{7l{F-oWJJnIzayvsDIL3RCtE7)(ny}A#&^jg?rE-kK+ z(=Fo!+*rq1gmwlgtI2sb56gO+C?DD%2WYU5Und3En)2F2fPV}HZSRK8gFiSK@hV2E1mXSmKPyp$HC?UPlEGd&UW&+KhdMZ>PAlPjdJtjmo6Vi+=gT)FcE!p9 z?ZIuq6lNNY!9Lnz>>%nNJZ6Qqzq78uozM2jy?l2e@H)x;1FH7I7-Ah~!4B*`7*3;v z_W0}k0xO+0)jO|zlGvs@HG$1X4=2Pv=vvLlV};TGPkUGX*Tl8O2e4Y)t3?z|gRg8# z1vG31Sv~?n5QHaHL`#UK0W}K6L|Fm`sX*02DT@iAh^zu4Dmw|AD+mD!2ozami!qR> zfnZ{Qo&^0X-n&1|CzF|T&pPLx`Odk*r}MTp$=(yfQ6Z8(+Io#{Osbp6IY^)?-lIRC z%9tz9Juw3d6r#S<)M$MA6wYF~kF}apnv-Y_A0ehEm_~K68F7dOvZK zgC1a*U2iFb6!oFC+ui`&>Y-ou3iCfCmW&e6<%`b#J!XUhov%9PW<;!Cotm)09%QFs z;P#e$n)c22Pk>x)?4jZLk-7oT$8{hyYugZ_D6=cOBFcV{maHc^O>t*884Rzsk6J9+ zh}x=#m;^~yz&8(0)NgB;+`)>nOEZJwUoH0VJa7U5dfWB*<5se$*w&!()P=(`tmKn> ztr(QLP4hK~s=cmrgYIovvwh$cy3T+$S@22r{fZdBZMgdT+X78K9=I^IDeT$^zqLc~ z3_o3kI`>xb*L@Xtc7*9OU2;JOwx3*D2GCrbz|NEg-rG1UFjE+ z&HcTIp27zYKpesqb8E`IP=mZqT71~rU^8>;vQN(OF=6?^ic(1IP{?HP#k}9#ei8;= zGg$a#;41d3kt{&;8Su4`NItpc=C0JO{e>bow$aM=o|soM63L8_oY(seY`98;YDmVs zEJQe71Q|j_oSEUKR>L$ajOE(OyZ*O1a|3^~we1bdb>=teKZU2fMbU|>szOs!D-Q}S z(5t=Sg=8IlF}90{60&S^QzmpDorEoy!g;l&sbu#(t-H>Jno`C*-K|roK1jkoRF_Ua zB&Vw~3Syw9geF$;$jCMK4kik8OE`-`e z#?a!(wc#2@tN=BBqTJrFr`N}3aifNAX29wEE#dXqB8a_~EQax5$;Y=E6a0Yu`}7CW zA4QLdK13}7t9+z>L7q6G4o_jA>vn%l=yu4pijSEZhqO#Og!3=~(fzxjhfkyqKVQXQ zAYKd6W5ugSKAvVSy`O?&+Ed&YO!xnfK1)NVpsWm3?1-M#_t8X>bOipFdotD{CTsXi zB(FM3$b>ZCF0r|#1!K`>Ih+J$;F?&JA@!zXVTqYTRAo-CWD^zrfN+KR`Aq})*~^(W zC6_1t=lKZIl@PB^U>R!3wJF3*sw+emZJc-Gc~UJ~mEh7WOZ7F#4pSjyDJgA<9#~Qy z3Ege=_n%+rMDHooRO*qyN(&NFC!g=dww!}%gvT0dsBQatZ&q6i+*csX7^*AM)#r?YZt*^yxsD1nXkuB&`%xB) z#huoT5a|J=ZljUlPK29V>*%u^NdUO}5C!~Tw-ykOm;8+Te9eXmzg8AE?pduy?3J0h%#10xmRlY(Y`ZR#pH?~{avg# z?>MHiMO#JN87{Dnd%2r5U!9Plko)7K_l_UI<8M_*wDtAE3?kK&XDM|xHR0WbM{v-j zLP^5f?Ml4Du60C{X@PA6$EbDWw!R2B-Efsjx#Zg+E#^*p)BgkNx&!J8$UD%equ{kN zR6N<-+{~S(aOO76r8~2+$f=_P!KVV|hPcxJDonEoJOsZrSz+}e&ybpz)MxxVRsBeh zG`yoBHRrCHwz;F4e=|0tU6u=tT6BF-$oLM_#s<^fDOf$oTM{$_s$R$B9%T3h)SrG( zImHzhyd-pfrMmvcqY_NJujxN&(r7q46fzcvW2`_zT@>uQ7ATn+zi}e4)=VHtou?OD zcAq|HY?o=MW$77I805E@QF{4e4(x^4upWrLG@r%hWli^2>O;=~yPYX6K9~APy!H&b z;9ljPdebp^UmyCrEzFBAq^X+v&{k>ezXQQpHDg|mYU$qfY>+{Fgph*k{Tc~%1M<;s zFP6yv$!4I{-VIOociZ+KRp)mrVEnfwD3mUF%3fb2+jndlzl^E-IS`r`9c) z@TLTgJkEhvC;LiB{gr{nJ9oOU4Ezaj{c@?eUYsuaDU6>Br;%u|M2|OlIHlr0bO73$ z{_zQVa&v#0xe58C?~BAosTw?g?v@`|R>w@|vTs~ZGC~u=s^4TUY~j)T{u1rbk`tcW zIpK!ir}ADMi0COHwFs+ux89x=KT~O7g8Qy)Dw|fiaCNMtEPK=jeNKQ{Uh6g=C(pLa!Lo zk`amU0J!sA_`smc$IwYe%OY2mQ~8}>9kVQQF^;_+VPe6PJ)eIoP|o0F$m?j$TM?2z zi9`Z3Ane;|XlM~Q6#it5g$_XJUEM&cQRcc<3Ip|U2J+&xVR>F@DTcov`K_>~Mh;Fi zR1-?+jD~6iIE;a%XO%L(Mf2XhegZw}0OwDqP$-?{NP-W91{O380=GWfU6?~wiR3#r zE=yRJNpBTjQ0Zt*tH7mPVsNoFIhl*tKhyvGM)<{LQk`tgKVBZ@X`3 z8Xtma#uNfKQo^fC3biYT_90B70?w@%%)^SRi(=UQ^}lwsfHq0Gn^GDbb4tNK&hd~# I@qR+ozYAe3L;wH) diff --git a/CodeTrans/assets/img/example_dashboards.png b/CodeTrans/assets/img/example_dashboards.png new file mode 100644 index 0000000000000000000000000000000000000000..24abb6533b9fc3e1e709b402e62a9c1476640efa GIT binary patch literal 102817 zcmd?RcT|&0*Efs>QB*hgANHmeMntlI~jz)SI(bPt~5%(3Hkpq`aY} z-d}KfVc<$b!_aj6Io0Y|U`|7Gl>74eQynjpCBg+iT`cP8$gw5%)BDIz@86$&c3%GX z-|`-vSNwSQ^y$+a`;JazT*;o*ApBKr)d-8;zy@aTUlH(jqit{liAEaVDO?`A<$`4?!Xpsvnf@b!cq zUgk9AxCV`!mxaY2Na|};;o=e!hB;3POF(CLPO6>U9e?#+LhV*w`uZjj{vlenWY5=A zW9B;AdeuPI_@oC_)u(~y=5=Pd|7_@`lhcQ?9`g~A<43K>OwFd^+E%mSHVms*X%*77 ztvd}uJUca0s*FO|REXQ&dT5Z-XX#sr%n>8ETk>81oY$TB1>7OIF)rNVE<> z`1?xR_U#Q<*97CpnFWS)7f(TzzX+XVgatkucNduTbMtzwKKpvmwcNQZ(9CpUZPUZG zZ~DCFWk=lurzdw{Hb6P2}ri6;QgN;m8(+G{cDHUq-(>BPz4{6%x3Jz{$kYh|W= z{i;8%NXh4{t;v9!-LCG>Wj#YROJ8q}^7^*XvC$Y*eMF!6DKJdhSoqGU@e!`O2?)Nv zP=Zq&NqggvJl>jl6iNCN#inpSvX)*Zl$F z^fu655&a{(Ky{Ua8&|ZX#T2j1)*aVAngs|<@!3V$>J&gH{Za+Ppi#2gVoW~=&mOlE z*l}O-F%V5&Dzjtmrb_UYEA}wFt5wg!k2W`UDGwbB3(Av=G&D}AX7{C>;4sGFzlEv7 zbJYQjbCZ^qjvsIg9IlMg(zmV}CCC(Sr=|)?dMtgYTpsi>i1w`wUvkMa1&$SUa(ja4 z>w8Ml#w*Mm7o0Q`jG=MP-$EExt!?n#JzYqPhD&I_mo1?v1I!j~&XF!+YaaIuCTU!B zZ`VT7Qo&V6a{rHt>k9%SqwE?^}!I-jH57bLFq`n@E19`3*{Re*4O z*KIL3Z{&J-W#~%r+6TQ2{x;muGxRc#s~daCdK+Qp=WwAJ%Htk&qL!Sc3**{Xtu#JE zZFx=p3dguHCugptXNkvedagIFUoY;A*ztFh@64z}-yh8$WN_Zt+Ui~3n!<$*Ye2f= z@|#pXf40XWRK#e{om-q}JkwGo5(5&Jy2U_R!2)20VeLKmOCGK^H^`U=-(J{H^$a2w zfz{r2IUq)~2YAy(jHqf;XAB*Pk=r<*~m@&GX>XgaLsDHS|V%oKHgW$%+x!F=cw1a{5fL`_X|m6H6a%&!Tgdj}761 zbzzrvd7+>XVvCj3Nt1N{j1CK9xb*^ov@Je=Idv_gW%N@4!{=Z07cT-vzMdt`W3}y- zqitJDZ=pd#80YsZ>=OF+#tC*RLNBv~oaWQQFEZ*y_)VquFSS^f7*FykN3LU&=n#+9AseI#Te4&WJfQzc)J<97?c2A36zntqy`fw7 zBb5qt40*OaV}#)fn_AJgM)^3{GbVg#d3CcV{_sp;*WFLumtNfRAf9jj@uSFfu4FNO zP>E2@`4f-Vc%YF7EhzZ8;%%Ye(e*h|qtqB$VjDP)>VKBY3@JUmQ^bFp-vMV@B+$?^ zy<(7uX{}iTHYL9e9}@=w{FBY|JcY}5lF)8-V6O_DBh=J)I_c4l=0Qk4LmDq84Vu!X zboA%ao^~-U3r|sK3ucsl$iV^PJ6ybIKU9Xoxz)@H{H_(spp7pfR4k!6ft7DR*3MES zqx@8xq`bJLNWT7?UG(ff;E@OUq+H^s5u11q$f z#6t1m+}7(zc&B%+er@fMw>RP&j)|Gs2D`|Fqa&)QsK6?D8-E#R@r&W)qIL76Nb1a^@`nW_-8ApaSB=YC?rrK%`pY{ZiDPuATFih z9?;pHnIU?G{=!ybeGHo3rJF@A4Dd#+l$~J`jBs(wb=3;XLP7`_7$Y*X!NgXTmYzke-+bR0!d%>IREyFrwJ9HTA#@B(-}> zb|#KbW>C1B5j#KXY>8>vovQodEqjCS|7Z^L^saQ8oE{>b6}am(VZ=LHYZOd6l%l$~O=~|4o09x27Ds><8*2>Vs;` zftSEkUNYb|p< zSjiYHR@Z*`IZu&&MOOB(4ZQSlG9a(^(bLa!1CMhKLDPC&6P^WJ+T&@kgHy;xK1rVT zdp~)^;u?E&zOLq#=gTYu50W1?^azU%D=c?%eBpr-EpOB^EIqpgSQQ(rtLeB0aLAYu* zyU`WCWCp$?a1sGnA52H)EgXLmAD1BPQZ)9;efe5`;FoK|C}((Ty+^6VGHzCc*Y+$~ zSAA}Tx=)xb6pkJ2aSfBcRkD|)w&VNUb5bXWBiR&L zA0#tu{qtMqVjm*K^x$!Y2Xfb@Ik{CZ|I$h}17BIiegn5vZwmVa!05MvF);@1)twEn z4AU310a`;ZDhMjS^IEOOs>WenbLN(>Tb$=M1N+a1^_gl_# znGrm&-gNsKy}@FnFGq#C!!|AV2E?MA(hhi9vs_2cE3}fdOnn*IdDI4Eu4!E#=YM9O zB?VP76{DSYz>aMYdB;V^4QrL6wB+n=RdZ+NJA+1JY*sUus)MhHyf6z8R$Z=%$_lF= zPB#0E2R7=`03_Cg-5bD zx^Iy>QeEn0ZVW4q@c|2pE1nzVmsbe5!%6#YLJe?t;w!toF&SSEkq!B+ z#QVatZ)-7|$;X>q@c=>-vsFC|S2KOW*4C_fpIKw>@zD!X*zYwVTjV<>^dw$PWsO$t zH{=ifaMIAa786Z9$%Zg$ZD=mrYdXHiH2pmfm2*@SdsjStI`mi zou1YY(2ohXwL+%$cr1%2KiKue&%bQK@`~y+_^R16G{>-JgIjtgA0YiQ#Xx&$hugEv z!wJhJYdM0fL#Ub&nkWPciCQtw*WIaV4-bu)aU4dcu6AY&*BJvVp!|smROP4{` z+o@z>veahG=1i+IzHm9db;wQ27HbyQ@`tWm(?o%_1$_SnoH8Tev)REXsXLoJc*ksp z60an4>jJBgOW=1I4V#g~S&8Hk*17p%po{|>94vCK5o(+@oN53Qh;mJpS!I)upGZzd z-<=Ao2_PMFQr`yLd z!Idsaq4W)-q8m47TN*PFE6?1b#b{UW7@S{x& z`@t$bz5|NLVfRNSxqBdWlRAJ0F0SP^yzMmQu3uEJPhxv>w$8>wOnNzIdyyb)?S{l*ub*7B zido9mNbmMs3b}3|Z@^eGZpzMa!Odg*w0{kc zu2s}M3kOh!MWJ_#EQRT|g=azVy~4y{1Xo`v!jagqd?LJfl7I3BA=0&`u%%_laBC<` zrJ_r`RbD9#Kd?0#S34pOF>cxA)j-!7WjD=@PJ=Cxvj9|`eXH^x;}gKA9?cJmViGs& z#IIv5o{;tj&!_Eez2ZemFCQ$*z+@++A!nen{YFCbz3|vCUs4aK=)o<^AX?YGvE=p? z{y$CyXSbRYytS23?4Y8d)!NLVj+i#s)FXmK3A)q+aihzmD0nV1pof7UyI($MmxiLM z_M?U#Iz1nZ?2WA7ZZ3P2BTm)PGcanedDnB=LfP_F^|aP(>?vE9`Q&WHj+e(L*E%{S zE&N~D_8Z4yvlK2uyHZ&_I_J^XF?GT5?B68p*jViSt7^V|+jA=e`PaVaIU>a<%Q@Ff z&b0)6U#x#B1Ey4wfVcnfLVSJd7vO!!=(7xgTd(AoAKM*>w?kT<8Z{uQ-DX3XUnpz_ z@lh49l`upsD^(np14#Y6aNDu|%aTryLD71Gi=U-m%GG&RnRaA1RflrF;%w)Q=v8Y& z>hPbbF;}cX?IZOJ(NLDxmBhdppaN$BWN5DP&p}i#%7l#!Ri}~A4=KYhRnHj z@|&pV=IZve)`jINU|a6Md#%Z9Ut){}65WeiyqF$hG&eni>h6TQR|;!=%LV%H@uJZBixxAwqZ(DszNBv7Na^K1X9_p_pm9E&^ zCFhIS=`E&t9u*$!^X9j?7m_tj!QxT}H0$zl!~@1eTm0d{FoMGFU+sqQVDF5VM@iPn zsp!eR`|^-|G+@-4+f&sJmE(d;oVw*fXYDV;0wbHEpxzZ(ZviFHw9h zyjHlx+i(eY@xq0^B%yqSZ;v|PzQaco{{v2T37;2)Nnh*!A8jr|B^G^&5aM9zJ>0>{ zb$s*K-iDJqG*T1YDp!bmoTHFB)@5SB*Rl9D9dBNlNEY13BOoriG5Aj^3~R(&I(bN? z1nx@R!fDN-$3npnrH|ppP{@<7@ab7D?C2RS{|mci7jy8Q5=b$!y9VI&{)zZ?DS2$$ z5JiNAJALdUd%roQNe}Hr1~Ad9+pLBPklK6&h`Dpi+@`^3JEyV(6=pdBREJt4`&DeV z5{Yy4Ql2aMi%=f_vsy)&tbqLmvt;RQ(aTHe89SA-JP(Dz+YXnA;>|oiD~XBkVw$=l zAL=a24?cBQ6Fq9cy5G%zvat;lOk3T3-`~7;VtU$FL8!2Ym zM~A`1a)DnB?mJZ7F$ks`*RlU-J(a32b|*7Go{(XbL=sRmHf8G9lJvKZDhgfVFxf@j zC?SCERn>=n_p=Zle7Y!On=I1YAIx}3^XvMm!D}OL|FY0Zq?ExZ)8p4!p^7R}b5}-> z!H5R#2Xm4Aod(hj08sR8_jAQFa}XG!+o+#`{TmL_Q$mmuwpvkY>Cu1cvy<66AXpij z;7^XKJDgQPD{aO>Ada<_*1zm)annv-2AJX78gh97wd6d~ozkM3D2RX>E$v(}9?DrM zpSn^{z9S=e!0hw7JqDX8uVi6m#WqJtsI(t1*1MCr2oZjfo13d=`Al1@{|S-g!p&FM z`I|oAwF#D{es%G}n$QRv+XR5*bs3cd`b_8Z(lMGl7UF25Fe?k#7m?2JaT-IJTJ0x& z4-MYdNnReB)RXF{`!Kq2`od3#D!r7KGE4N_RW zdZY8lXW4fMp(6?DV$f1tg2;x+X_^unDkdf3)NpxYb5lEvp_KKpxIwUERq1fQOlCfo zJe%TBTBe=~2^)k=cxJsh?!VllS-KLry&aIx>H)Hd&sA=J5Ew8$|Cu`JEf&f+aeFO;NXHo2oi&h) z5Lg^YAu$6{i@v^Un^faFV@b%s*0_hRZ?k)L0X;Krnr6Gtjfy0c&NrT?%A(Lny1h8kcP7Fh#!;Jxqoha}112G>MfM5ymgP%W886A+!vmC!ks`d_?luwIoX{n_DgjTod`OYNy zYCUZ0HR8hCX;3$8>czV&!BCFYJ`J-KNRgnJ(Vj5FNc1g0skiz86cT%{6#6~VPS)Kt zg5dH|S-DvMsfN<~atG&vL#|mifhu$ZfWh1!jy^?m6&K zgv~?<%o-4mD2d~S{zj2Us=2u&|F4Ii%A!^oDTbVS`<{(g@G3G*cK(wSLCeMms2gA# z{*idyhcoG1@BmoUgJtyuzh&c%FD5Cq&suAxZ0En4P^6AwnTc>gp~HjanK{IpCP{Se zu2B9Q$;Hc1n}nNj^GI-USxisJb+@w$99@Urny|7Y3gyonyd8V6H^&x$Z;NyzDK9}~ zYxfHa3SMl^2AERBmx{(}_Y4j^S3SU=Wb^!CG0()3C`OEEH*gyJn?0&S0CA(dwv?4B z1oUeoZ)TT}<;zJb-rKUS>svgSG(yo)|HM(9r>1h^Rha?g^AeKz%}DyLB$=@q!nc@cb9W3J!)%^F^4Zn@$+@lI)a;a6rX3k&Z0>8LN2&wMc| z)3=j~>F!?!z@s&7*_iT8#5DTPn4vpBL&NAdh;qlq$M({n9>9M#R$C3wG%F_gUlaEF z?sR!UR=;*KjpZCUFHNg12-LL2dI5l#RuX*0n&=-QC1+*?n) zsaA8RQNqM5AP4+wdoS<$fP6M@i(TN#EsO?ibi?g6h8`_XNX6`g7O7;Frm^REzs8ED zJzUpZg30x(5jyv8sZrsn^EjSjpC5Ayu^upjvA=zT(|AoT1mu;^Y+6j)ZK}%cFIW+Y>o~XqVPeR4L zjwEMz@!bf>I;L)YFSb!(W4%I(t!`wsNJ-WiZ(OUqHB)iA*4Huf(9^usk3IffAkwX5 zbgT#Yyl32|TmD!%qHYwWUhB{N_m}au*KQZuVDgCX=x*a+$V>P%PxS$fe7W2a$9nl( z&SpukwLr;ccih^q?CD9Zo(F^?iCha;*@4Au?+yCpk~>I^{;EBY%_~8rwM-a751@5LK4DNV@1lvq<9CoX-bl z)3GTae_ADSRRUPChB$1{bUhZO*$Rc(EiFsE_j{>(dk|nn?TagI+FR3b%lS56 z2R;498+<;Fg{enpS9=%PTXp+AzNS-#uCWpbwlz&*m$g`0c~|p3A0Ka#GZ49hlR08IZ?8641No8g9%>3P_S2nJj^H zZ@)5#%Kqbd+=6+D+e7pQ<1ORw%A~fYkPSEJ=$K#aGHsYgnsq0~vy(nlJ?i`1H(WdS z{k|HXl{z-%>mb1X-Uv^PT8@irS-kUNoO-${K3N2+^Xd~^^Ij{LtZZXD#Y#QJN_}%g zT-KI}>EUMU{$#2fdxtGoJFLdBPCYm6lgvj)_c|C>^yi>qBR9KpPdS^wr+fOpGG$!^ zx-lQK^OxlI#Q3P*HsC%=WHOoU&Ci&v=UMd(4=E9{g)I7WY{eN_+U;)_Bj%+|Frm#{ zR!ys0-CAKm!&LpW_T9b~#HSmsU>z64t_8MCCi%t#I}1H8(ie@Vv{as=N#^tP^hDU0 zSw5`#HXfUw&w;Gtu(__pt5CVFyIy!832t#fpZoDoAEw;d*@}u7*~f(T`R(;o#wDDc z{M)yw)NRfl&mTo)bS7QET`@6$fBl9O%%nDJY{;A%n;=Z7l)BE+re*b~tICwwap~ksDT1JM{J`Va;#;ESQvmWj%<<#){GOPG=jxI1@pK%F4 zKzCa+;s)Oq@bjm}eY1Gf~T<5nA<}M26N8Y`brZ0;;s05o=Mjxjg zDp+UK?Uk{j-*kA%=)#PujYP%;3I2Azg5s3McdIg~0s$absqDtFy1`u4@f8oup>|^2 z+ux2QBv6vJ;!cgWtg0IqM~$A&O{}J2uSU3(eiO}HgDf%&73aoPL+j?+CAKp5*L3yWVP7y z>O8eq6MNc5M#T<Q=cnhr;Q_$)W^@F`P?{xFKZDp)aYQV%yOrn}+S;{k~|n4evk?LEWsO^;DDJ`pYG- zu6I?!LYES~@b8t+^@e3#>F-W)?x$|`u`3P~qEfz`G5#kdJhEDX?z&tl@77G7!)1EP9DYxEaEt*6_K$N#v&ak&ivr^*1Do06` zOVVPI#aj6Y)k?FFKX+oy;J@T`P5`=7UC2~i6rWs#2BkE;)@hdk^!%xsbZ4kI@0 zHfZFuRaPI{AJ&iZoC3$gLpb?|y&80LE%^1cpXq$8>EiZ>ZjCtq&^5E%E0|cMjW~TQ zwBX-MbOJd_a$YwxE^$IIM1;-$OpSzN~Q!|L27*8JQ-Qe!|~;`deU9Hk6h z(fk>aw65)e6{e?ps8W8FKoESJGkLS4roBPd?&>(tJMiTJv0T)`nB4BXYTXdSaeo?S z6pq7<8W0hG8j$!e9V`=Jc{6MLnyTZf62LfNgFsNDfmm+I1+7L@i&~dq$;wBtfTqg0 zR$G(4^fEw59&~RpKcd|Y&*69B1b8_n`)4v@vivQbCDO`r)Q)}@%zwYC$IamJj(hmS zzQlSFjHIN{X08=#epjOwJ6x|r7>+McQY{TKOeUrhJNO8bCB6Z3R`?T2Rg{weDyQYU z!N>9xXm^2yQldr;;VQQEJE43C}yBWP#oQ^)}3`j;GrGL9-IiiGVl>o(z1I$Da`g6ToMI zs2 zzv$3ZjiH9&f3o$z-x(}-8crg4)`I62+hba(<#^V*uoL}`^4l8bBwl5L4)(3!$spndB+J^%7nPI4Ab?Se`qj<5vUysw0p-g znwS({81((wNyHjWAd&IZv8VMPNmyRB@5CweSJ@Mx=YK}A{Z&HK_1ph`jQ_&NgZ8Dz ze^)xrS>HGMemCn+FWKL}WF)-_H~IVaIG23qKcQ`Z6;;P*)={06vR+h0q z>blpF({T`AD`T0hn5~BpWz5mM1Jw@c4lwuL{it{ zf24+mFJ~xuqeq|aN1{hcL~6&S$&h381daRg1%lZ>O3&HS2(#UTdKl`?86KU9^E(n+ z9BD7n(M`zT>H^trPuf>Yzj0q@B@fbIArte^D%j>c`x`pc3xEY|KdtOgmJ0)y!)-j2DjBSvbTNphE3X(-TE z@d~)tnWU9#(r#Dh{U?y}zw`n*5~wIDI?wo7AVk9)&>SirsrBwL`7J(^)rNT5>>5*U z@Nfn4mHj>=9@dB2vs@1fF9e@43nM0^1iS4KX@M1Dm?3>_r(&N9k`TQ+$CRzpxt(Q9 z4f7a=0o~Q6^l>Vmn-i$KZ<{wZZY?&#lfNyKuT6&`wr$L%mf>#cGR^TEyikmP(pdD) zcU&U>mwTeJt(v2uXMJe?l@gcHbNfS>qvU<4JNR7vD+SuN>bl8{7a@Bn={jN(S z-RLRlRW>R(2z5z4*ZQyN)P|E_6-SGEWU(I*(H9jnPUqINYFiNs&=pm zV1?2qYd_X8Gm}Y+;!N5DsRl#fc3gK!Tbq%2 z4$O*zfXxc7y+K+DDVXfyT0`L#(C5rY+N_U2tjn1ctmsx|Rxk`?YxfJ^vc?~J`_2)u zs2PJZ9cL(_wrVzFq|2}+fRh_acGv4;f6?<9tpBJ#aC_Gm=-TP zv5MmM-Tz9&$+hZ`y0BVb2NCu<1BfLh>5>&Yj6PfGnaMkY9Kw$E*M%l+y1wdj&i7oG z&^gmn3$yA3AFU^g&YUe=t?wFm0f!3{OZ!-NCon#)DtUgAQMlTsW&2CUdEEoq@t*VBvKT1yq+hGR1QRlF<3Ky{Zg}a18vg8P6$EGIj zss7?l{%XoWo|0p0Jt(&n7|%DVXHdQ^p(yAfCUMTDW(dCZ4cF;!P~N$@?*;28$YaTs zZ|KX=5+OX9DJjmi<)lNURqJ1p*=?g>bB%0JsHlK25>g0Yc8FJcrjhNbYr^WM{YB73 zv3_@U_a=TP)A?4+3liM@*1v`T5S*W%KRJc&z>+q?#*JOh)ejDe6MTDOre|jCk-}+0 zR%IkS^O1S2$qkQCSq?N1GO%$8EQ=8)5-!-HiR^@Zs`#4sv%##!(@JimKnqD>5X}AT z=JpBZ8s+j8UtmOP+Vtic(I?>fC!DMtQ0T@;l%~c4b}mdM;&$%YAJd^)D#JFSn~Fh@~^STqueTuJwWPy-pa)*o=#9v%8`Ctt+8J|`#t zz3PETv2i7?lM3(ZhjI~b|(j#Eos*7M>nIoQGB=ZPVZVBjK zCSGIZ$xYPIu(O)w9WES#cj^^kE$1Jp%dub#np+>FMhbHKn6F2)O ze8EMBzw#?);VmeZW#bmv!q0-7!v`OoZ|jEMb>wb-pvBFKlQB_P{l z$rGoBD8W)WQCyK5(6?{LQ+`d7JO{1d6qrA7*By>Jlv`vknhd`Lo-v@u+v3?0lc+|mj**rr+}NjQ%mARP9$**@>;$b${4_(J?2NUW(E+WU^hYE^748L+yD0O0KiQ zWIa?+&-Sr#08@N;nm|H@ksYY+6nh$};5ovcdnSEz;6qrx9kBU}`BKB6>>t20sWJjT zQLb}aE5450Q#zq7QMD>*lc^Hh$;nMH7iA-rZdv=W6llhW?nNo{uUesM{XYh2%tBH| zdGnxzg6yQ0%H`072c9vSF00-2Bq4s}-70uEWvP8a7E||@<(_~*4C|ZZB&DLWUV}*+YG!$;m>3@3Z zbYdv|m(xy&q(=YP)g)6|a#B{dnJ}0_aPfp+5yHObmuPTYD0AFesUNF)+p*1VX<6RE zYj^apc1xMPW6=zIdTXFvzi%wSnh`lUnKe*U*r2&C9?J&KWf+BSc`*{ z&(U^JkuLQnGf^n3BBf9}nuSN?EZ0pJHYjI6o^i*ji}Y zWL1uM>lqq;MvCiA?|H&zL+sveMOfTbL%^dyVetQODUi2uT&{tB#cpBF5sP79uP_-= zRJ5CX?>qhLmkl;$n3HZNNxQage=H@@9`zQpX%ZPL!Se+kl(p=`l&;3~J}KG8B1Z+Z z6{r|}8`!$>p>rrmAe5z7%y#-FRYkUK3Pq?vSj(C&hckSlf6VYR*;{N+l=DGoyNq4- zjXGKF`lG3%g&2XF0x>e)S;Mu`X1(5Ipjb{8$~>M9<21zNW-ut%*?$dM;%ENRtj2)W z@4m9=STV!9nq!~>C`oz+52>H*wAE!0OKPKyP+4AIkkbh^I~^Zj{n4_1qj*!&#K*wy zF)Q;#Vxr^L=7w&8G5oj5SFmrjL1%!wLmfWrJw3KKqNDY%#Z~X7ix2V_k|FQ>~iy2o`SPXkA(=US)i!`!`P{Y-flG20hA`bLtBV%Kc zmF|z>kstc2Nl+3NpXir|-6i3w3@2yc4|5&#=iWvz2rlkQmc6MHwbE19NVz!~X$qTQ zh95R5T07BMi3s@T;~rANL{r{*@twA#YY|a=ZFu?HV^aLZzQ7lAIw+;$oKGynu6KK? zJngZ~vJR*Zci69+r;-bUrC>fr))}LbhiK-90NZG+`FYLfPo~Ni?PcGnU%plR;VE`s z=?_J=DeaK#w5wr}n#zt=^+BCY!!=v%d{WktX-#M!(0_S#a!pKQ85P@I;V_^^%r*L7 ztoxd8KOzb(E>^wB+yy_saq*&EHu?eKfq;M`bWG>`8u~YHxXod;s<&HmhCz=it|9&7 zqM&t|C1v3$X{~KwSOUKAi9ni!^&5__=xXjbb$yjEMkcq;OX!crVhx{KQ7U73BOT$=cKila{h?3ZHd7am`?qTMxOMIHgaNZQbP zK6#$=`Fk#|inObtGFgq9BB!TreGXs6Y5O~-=Nvj`($k0QeE-C~IIPUmCYhsy9tq*% z-UfWvcMVv+&O|UcXdQ>v!A9ee0WiH`e=WdUU7wFJlc91sac9dR&R}jv8>43#%IXgN zo@{9c9vsqzr$cTYoa5#nsER&6cX*X83Ha@GGcb1+1yviT6DbL>nsbgMvJsj!ur6FT z&hql#G5L6AMWp*d|IM&h&7|Ae@A_K2(%K4S;vfB!<$)cx13#s(Ss_B4BZ(mXZ$bO* z@pQZPW0RMJsoqnaHZcZ7yIM2n!@Jd;{Gho1Xa&jA8@=Z0J^NIZz->2gK@F_FdgS}1 zUyi+K!M)4ZaBlDSC`$8YvLsh$atqdFr3wdLE4K8jAvk=>JKW`sx@?jwH%hKZ7_?8` zE1M-;$wLIN!dRH1Fv4~#XG2QB>enC#?GBr|G5cU>ev5~0Mhe0F=kQ_l;aUp5V&U#x z%9pT9x(+7MxDrEqDi$!k578Xg^T;(aDBVzB#_l=J+F1ZC@?^DtWN@=4Ugr*rAbbkY z=Oz0T1c(LXf(5n9VQ+mD>+52LO`p*10xlKle5%s}`Bl9_U+e_dw8)4m3Q9n?_QjYD ziD_DHN3k#IUmQFypZzN3x*Z{2tq?s9)uIc`qc}uf*cbh{ zr6*U?rgGsPhmP_Cp;(A^+k*YDUv==+5evJn&H(XM=MDKi@6|=T?j31^*wtc})H3O1 zlM4C6rac`A&nc?LXp_}<132TStiFmV>lw!+n|YkT3I83lKwi^9*u8;T!=O4NN{Fca z*lVQT95sqNF0*N%T*;AdZ)4ME-#C)GoTRuM$tOQrp&sAa0&k86*v=HKE(3)oujTTy z2hSLIDf=Sxjdk>DH#c=hO4ymE4Kg}qM%?E&ZcT5vzUR8z32oEK8D;oa2J)mt&|Rj*Iq=xQ?)ysZZ+P!$B6Cr}?~dF@IsN?tE0*Fh+Qp+&Vetk> zPjpf>UQ%t@!pF6O|Lwx@HITQX2cGNV(mjaSL%dxWpKe>C?yGSBeFoM~!au3Dtv95n z){o^+B<8ONt@EgrP5_s`ifDvo0Q*dT-yY|L@BQyHb$^0Ef7koJ#7F+$BsBfy(;&Q) z|6hVb^~QduPUB=M^itIhL^5DMUR&_E{5L~rXkPy>c!%@<=kU(|N8}3p=@fXy!GZqr z<@kt*h+`1%AL+^F!zotOUA~+dYJu+G zAS5LvMR80P=DT?$>>S=-V8o zZ?3r{c2R?CtvOsRC3v?w5)HrFF8*N5AB~{8v+UX=(<~72cp|XUVZ@Y#qIabF2L4Vv zr>aWMP$(4OVqF_!fxUU0$Ld%nqnrxxyUe9xT$)I*0E>>~WAWOv?o=&xn z`|Cd5H5M13RjAO`sVIH^{^uK{o=Zv0`Hjczvh->{W1d*|L+xsKZPjTARt~9PpUSX z^B^Jw+uPeK%7@- zYnreNZ?su$ZAvE8b?Z0NN1yV)KBZlpZ8z5Y9mc}*s_rq8e;{R?{nb0eoU0BtR%ADq zj^QFCvx9cJX7`8$$c|x#4et*`&+$_gV4Y7W&5L3jdLK_;zqLHt;vMOi3*KbpD-mz; ztF3LBpSuQqKi2(k=$?8wv-^qo$P(1j6%7vEZMt>?CpjD81J5(5dLdI2*apD2$#MF zk_c;}u1$5iAt4IXGgl#XpiEkV9R^m+sx(f5z z@;!g{j$_a|{a=IwJAP5Q3Z1?*;Og#dVr%pABMDgYtw5g|jp4C8{B8HAb3B*+mEfAC z8xNFLy%cV$CAEqT^D*~GhAMqE^#RC*O8&u}8=w&~tiAj1)tf(t<4NE+DgX5@aJj=3 zkW$BJYjo5d9`2A|br>sb;Up!ov2D{;b_qS#?0UONMKhQ;=1TmhtTtp0m-DMH)ly5* z|KmM_XzK0yMZ5gxE-rVkYjVfi=@?F=h4c-8R9doN6hotjeLdn0DN4#?&D6xB; zRl+pV82uX6U$mevEp)*QxnSn@x%R>xgy4Rs;|ol_WQrI#ertt zf>f4#@14hg-td2;Xny+hK>s-!BTnie@Wme=-OxBAPMf8sm-Glbl2q;hmaW39 z$Ai7SJf%dT_P#X`;so^)C0(mUiFfgUQn?5|<2ReVQfAD6`|V*sJH(Xf7Rs!8@4ZCF zf}NF3w~rOe-`yxtVqXQCCpvL1?@s8U-Q17gud4xZWkO7?d{+Hcq_JIO&DQeXZ16Y{ z)@)NfYkH7ZSM2Ti1K3=H?Xj6G2N}#n2@v$H$K>|LASk}v8FlIaM2H2_Rm%DP@El zy#+5>mg4!T&GU0K+us+tFf!9oG38W3fL;kQbGK_>R-R{tbAih5DA}MztvB%(E36&! zu(|5mAY6@?iaZY%B2wq&KG3^vpC8H@LsnDSj(k}f?EhQl!LxRXQm>6o?VyFSt zP$@TZUkVzSi$=$dO7(d=gydIvPCf22ta(zHU(CS#`wKxN10F`WhDAg{G?6{_U30*S z$7QNr+QyjU5t8Cdf9n1ZwCpmUs*+qR3aWJEUI-*}oMEVR*Ds6?)T0)Zju%Z?f zid$Uu3{zlxlibyMV8-h5l=`1T&%;B)WG0XG278r~|ACz*e~k9zD^scZvhC*tHLk9v zPUJw#2_MF_*e_dKc0D$)zm+G{ZE>rmp&AVeGdt3Rg#DLZBM?Eq788)3C5PLi5BY4k z&UagV%RA_Ci;BEl#%9YaS*Z$_6_Vdv9M*u*)SFcwA?vZm#xSl@AnPcA{SZoP zSkSqQt9liVqh$PE zj)(Mi4H#?}CtXZZcc-0oM{P=|aX@k^-PNaz3gjcwG1>wTF*z+%pDkv|>a1A=23ePb5w(x0Ft7LO^$SFRV<%&2x%1JgHUN0^jD zVs3A=JQl|f*ucW-_g>XQ`kkDT+8%gRr0-3+RwKHfJPPh{>hf|{3~n^OAG6k&jOzvS zoV8i(Od_2lC$ZKG$PWk~Y~x8(R9c#8c=c)54hAzf1OgC_(4JdcAI?!RZduJmA&Wc{ zf0wGoapP;z0Zg*mRyg_OlT11MTZ?NTeFu=onM#hT z9~jjqT`l_PVW;aE{TLs%aOj0T1zy%HE)h-icF1;s3#RF?E^0Slq?5C zn51PcFfw3&_Q{K^m$r)7HJZ%C4-(IO5hekyEuPok#2VM9m&l&-K+IQ(`cvp+d}3VX z?kQ>XZZY4O@0IYaErlor$e`Xeh{VT&n!hK_BN z&Id(S@{%rSL8xc8B3afgK~p^(Y_QK{=?uux3Mb#Np~L}9X}mO7FcWNR5D$&;kjHfFebuN$)j;NC`b)BS;M`0YVAVA%vcUB!v9qJkRr+ znQ!m$?l1YkaRj*cz4u!8TGw^18q4QSjb{UFg-{mn2Qt8$Kbl=h-pS5w|2Bbk zPO6iWSYk+bxq?dL;sLLl=egOR-954`Jq7vuRnsIutsk5=V-Qbgiv2o1TcJ>@N{#~N zC0C=*)xJ$D$jmh5p0%ilROe=pU%N%L=vR>UDner$0YiF8D_o#$?(q;VH;iN zJz{F$1|XRdGZ4aaNDmdNR-s52i#iBbV#ivy+BaTBB<#snZ)-9R)RyHo?qvf`Ac)q7 z1#YT}Ht0S)bm!&|ft-!uk#9s@i*6%;SI16dn?}h1MEATJm95{VeL56|qDC-R@5~?E zB9xF&R9h7enS-)S9#<=aC@G&!kAbk(w;%RKKp!r9 zj0J|Jx=qYKOL!aF(dS3S{HDq{44NN*avD(|*^^2W5WnoxXscoC@p?jF^Am4&-zwOQwqI%Wdit^Grd@zw`K7-$=}j{&4l@d< zr|!B#)~Z;46S}X8T!8gq>iVk5=w*o>tF7#%2Ug4HG>Dxxn`{rNvB%$z#%bkDlnT8S zlbN%q1Ee`d1XnM$DQPCn$k;$wz=*NcruGaHTZGfHqW2gSVxO-WWH2+dW-1<1b508+ zx0xjA8S4O(u*z}_H4O+#Wn-sR5)95`f81S$(tVApJj8IM1~5t2b=Coxkx9Wr20!fV zy%DI<%}se~)g$+Ff9zf3B7(JLpLhL6g8e?OT&;yxg=vujXSO%sdoyWa8}7}${{P{Q z{Iy84VP;jS&@<+@&$Dy$S04reBEEtUSSY$-EU)Zd8HE8Cc^}4{2%^DXEJ6+_BT4;a zkS6Kfv@^nH($+JXw=ZZ6cA=l#R3_c^dS6;?fr zOLAYu7^PcuZ&)U|@XETUJ5=l8q>0dvka4RQ?}KUtLMPS6pq@b`hRtESS>7BE`~-47 z$aYKEg>P8o#oYmB*zOJ~lh_7rELVN?8myk4IJ>T9+a1(wdMN=S*Nm%K_;wQQ{j6iZ zr|*TG+E7&tYGNFpZKO~>w*_qG`)4Go);tj)Q$L*jhs1ZQBstj#yN9n20UD7?Vd-nL zY5I)S=}RodK!HC}HC4JaK|rQ%*LA_eC}w+Y%e;{n1}na&u0>aT`|~H5V}d5&wR`}g zehrRVE@6nF8Nr=PPSF8ftXSwfI-gG0Yq?ltX zX}-p`TAwXbwAJXfr6qr2enbX8*-7+<_6=Hm)zwI+lLQ45Ll!B`v zs_7YZP(tA`NtCGUl1}jF-uTB^%N}CSF(quothsHJ;cpTMx1_mdBP(MZ8>!vdp_K7b zmzGV^*?t?4Ei;4>={Rn3r&(EcL0(1~Z_}^rT$1l*()El21D*0*2uL1^)vMd1eKiY03&sdO zjdX%q!sHGb-tUmf4J%AIisYDB83LCDXM@VV0XL$tYN)C9dpe6S!ns7AX~ zCGXR0Tvf1AYN=3A&4W--ip>Qs_jO5>1a|QQimCBk^izq0d3Ei%$t|Q& zI$*|b8lQKbMV!x=r1kK)PrP%KeX5G(4k-%@aj)&^SSrLf5{8V4fb z9Sn-B5ZfdH{?XpmyZ$YEkA(?l7hdSgVwc@q%$agYA6SNpzVK^59oO*ILKd^H;P~l4 zl$=$uw-~g2QErp|kFtAzFI_Nan5NP#bNxOOXvfd(RAuAuaGBM55{an1<0l}!{A_nG z$gl{CXvXEZw8ez14(#YhOq`&FL=+@;TM(TYmj#wZb|q)J5}yK)2opv-KDSKRNzK^+ zO)pC_S|P&ww!R4F3ff0HvDEsujb7fVms%((c}`mO-4@Hg$C+y|SyhbCk2K*wMSnbj zH^pR(UFP7|ExmbUc<^@ggjSAX>1dp%c}|OdgbiF(@qy}fK%cLjd3j+#hEjbnTwa`E z3_m0v?Vt38L#?Xik))`mT63%1f;;dIGs_2HpKar(~4YL0|M{roc6+KBv(C)>{MzZRs|&QT6Zj0p%}rHOX0ydAL>u)mlON! z4Rz#_4ZN_Gl>+-ZB+8w&*8vfC;VP#xiytX&3d^HoS^K7xc&YkSl-3l4p zV~h1#*#F6+?DqrOLcJW>n z4^D%0zyX@@IMp-i+o=46gnaqW`<-5yi~r26mzKvBR%!c>GZvPM4zC3j5qn*hoZdKa za0*&`pT=)p@ZMV%fyqfVNE7gM&>0JlJ@(DTyRE?9vOv#`x$cFKeCMmzM};yOJ#{oj zpF3%;OD`pjP*i>`*yAZ%Ytc2)cGhw$iH%kt|14L451bzF*6#!KV935RHNA%3IzkmN z2W*B#rX2AC<0s>UF6^UegUG_#Ao&DsmRcsq_&L(E51yD0;fc8nA71sTS)7W6*8=T zH1f5mr}LE@OztremG4EiSMl_UXwkrs+fzC{2I|nx0XNyNEZN5N2mh>=z*V=g%=5v) z=c8j{D)g4Cc?Ov$6Pg(32=)f}2nr%jPq+Sn?#hxiV;tr;hd$Zqlsn;%u5nF1`tk*~ zu!J4oyPsZJH`gaj*|Ut0b#FH94=ra8rKb}Z@CK2(A4Ua!Zm;r^G>Rs>pX8)v9VGah zdnCN^<-ZnpcQQff`j1|ps)b9o?Fs2GulIo$&6br21K!zPZ{mJ8L2$ohXuufSS_&$_ zPb?o;CH*8zfGuM$Bfx9R{V}^;H{i;+t6gkl&*$p494AkcgjEofx3U4Tb9AiJN{zkk zf~$Clt)Q<+JwSUi66N-_HPBmm8aP9Fmfy+eX6nQqjvVqydhnClx8%zN?{wbL@Xde3U0FSDVbWJy znd46E+%{oz$U87}%1a#0bx% zWJ)QsWw08<`FoeTI9Q0nR~xFp`FB}zZhIlWJWRjJcBWZdzeDkhi?@Q6(>__YeQh_| zTdQJ@K+ll5Q;+hIsSuuSW{~rgES9I#80EtwV>9p5Vp1ds)}Un3xsnUH06ECbSixGdvfsgF z14g6M+RyhaMd%Sx(}*thCk8U>>y&rkovnLX2eY?coHRDr@=#@zt*)6#$vrE@g750F zyWxP6=shciyf-eRMK8WXyGL@z@T}udXn^`?{7^#o@4<_qfjrfD^7r_O#*9Ggu{e={ z-5~K1zSebFD}ZTlN6C&w(vVTbTL)+|`Zoh!(~N?RL0TiCTVl-r%{-(aExfrvI@xi- z*ucPZB^QX<8+yZjQpfSkzjDdkJF44#KJ3(q8J-uRh6MF{`6+pr8t~*J)$J@*o}IEA zw7lS?RfwZ%`LzxlTv!d7^I$1DV~N}fq`$Q|PBrveGb0Idl3EwGpzh4z7#5dugoWH` zT$_gGO@{#&IWL#@?V|(tA7(D~=M^>LLmea!h(@%K0XEClisIrwecMTuj^V44Varaw zE4z|v7TTy&00$Gl_lUe+->e2fd4aviT%O>~-$@Cu_loeZ&Pc^w8X=~3sjau%3!_a)B)tuz~y6~8q)*zQayhcS!L=B_c? zia3(Vp97-e2S2PS-Od?L-d*UX@X*O-KGeN0@U}A7*wYJm&yTmxRZIo<{ZLNfBNnwq zSs$Uc=9(7@9nrWHfjwWdryJjKKh-hgxX;s$T zh0y)S@(5Fe@kGOQ*3QA-E~dgeGF0CP$gQS%gD`2;6VOrzT5K*MGI}JOPCek+|12om zkn8{}g>Eo&qKLj3atrIfNxKtO%S zWa?|^%kKX&OifOoKD|dA_8n~u{;dx-7ZLzJu>?lUlN)$=Q@3TmU7m&#Iea@^Sh>n+E?IUFBbO$3q6#-O?cprU)i zJc@w55OMh(LVL5F2}{h}=8U#cY9kig0*#CV^IL;_#)|^hV0OMl2}l4vxZTuNtAj6^ zB26abmKBL%36a1%M}XVg+ZK+FQM-lm$0KA9HbPF=E?(_SY_>~A;q5@p%MTJdyz^WB z-0!Ws9^YmhdquSj!tAxG+!Z~)FZ#1V?R4E-?Nv@^y2<>7tBFO}v)#ds8S#fw9x_esEM8Pn`m`UDGlDk`|veh>})?-7X(t&)F_sUe& zq5_ls5|>KCV#!i7b-t$dPn(V&pfQ@3Y|oW`oKvP3axa~91K1MO%mZRD5T(vH~Mri+Thc+!Fv zGrokf@N}0s$82(xfkWH#I7KwAVjR(e1; zSN!1eXiLI%my4sq)g^|+sUkD-b1cWKFb84kO>)o984L5}+%;ajg#(CU9Qe9%3eC^j zbCtjChh`6BIV>9GCXRxa&#j$lg$otuour&H^CbI)HY8f;(|3d(=BmLs)}1QbQnLc@ zDLsn1XbYX4wX&_H*CT6Ii&Y9~BJ%l>N7QFks*y+3=LP>b-`J9#+5E(46Vt{CE@U2h zSZSp-*~T>hEP~uiZfYr&iBYyJp(}q1O$S0_czS0Yx4p)CCQm=@wN=&AlQZ@sofvV2 zV}|>CH%o0%C_^Qx_;s2HK1xTZ(Q!ib!)`7Ns~F1jsUNK~RF zwXJuAnEKRf#I6Gi=~s7FFds^phkQ8-jxm$l$nd+l$zn_mi#4N~o_Z1ACIM{ro8qqi z=neYU&As}0bIn8IC|K^>U5U8Z*fQ|pD5M92G1ygOe(!lb#MV$rMpl`ziS(Afk_C*c1J7# z>_hycz0kDv$ZTv;v9VU!NWBZm9!bMrlJ)Po5$5SximU~PzRknd;|DN_W~2jN(+?G+ zR2f;~#0e7HY23~gUIv(v*=_`wZ9*NM6Nea}5BB|#&AmRYraMgir6JW76Zn6WV~_B3|5vq^+DsIIWl+zH*GwK6#r`GpDQ6t+X)a!xu!uTYavO!O z;t-9PbQZU1w<0Y+@s4CwPWJSNKkq-c>0Upb=((F|fvZ=AZLGe8fASg&d$a7e0o8$D z)BRKmm+SJb!1_#8YjQT#-Qs7yy}$>3f!fX)^V6-si;wKQMLPBWs|`R6Yy-S^_?lY3 z9A!rw#sIsIf`5U$TBxOVq%X0XEB%d5b#__l73cNM9vi{|J`y}hGaZ!H=AN%urholy8If%dX8#6}WI$&7dvIl!apojrFN zyVg0zX?arY)!C6zk%-HIJ1ht2=z}7`+~TGW31JirwK98jNbgjXZ_50*63blhB)bk7 zF=W^ec5M*7MFrmUoJ%shy!cS~^Z9${|D{^H@4(UtD`o0M;)RTcK(swiF@RG5`-Y6n zS(d`MtWFv>+x}_rxk2{xq};F_&jbNE!v`Z98Mmgmt`4 z!GmjYuX&5#1d~nP1tjZzGU^Q~h2G3BLrp8|pBf@JCuA3&<=l8%vMN&O7EgR=q}@&AcQdW{G~~*rS#V zV|m+C!(HN77WPHw15Q4^!tdW_SW=3V^u|UO-GGJPgbg+(WiyKBf`8i2*HhMp8)$=2$JfiDdT$~&Yc*<<3Qrxz2GyCP7l8AhDR@hCl z3B~aJ@C%aC+NAc2Xq`RAEq6m?kZR2BukTBy0S%S3SFJD%!fDwnqBlpm$w6nTC_WtUc&~<*3ZZI9cmoe zvK@|&e(CD63Julg`e&=Q^31=ZU})UTQZ=aYHfZh_1*`95x{Xy!SkXx>sd*4Xy9OVxyG`h)S`oExe2dkdK1-k4;%v z+PdATb?Uv$`FS+`NT#~XkAJOgll7Q(v3-VKs#WXR|NXkZK0gDisBHVG6K`4cAuwX? zUq3ry-&I|37A)Z}f}XX?9s1W7vRvS!a__&uS>6a&%aiJKj3WzXd-^~lbrAB=)JLH7 zmxgfR)1=?g@XI{Yh8G?<2JxqF_$OAdswp@T(b2Bzv9e#rUjUOkFp#&PJ2=S<|Gf35 z*WH=(MTIxS@NC~$qQIGM-8YMXR;h4}1bF)EKeo}o@EZTQj5`3rQF+`Lt8Fg?3n^Fr zki<{F#903sEBnk?Q0Nxf1i#>yNgGdYVWtgOiQF%WqRpu2N3}+elK`DZ*m>1iC4%+G zV}7d=%O8UV4P0&k<~|(_LaHSY$M|HYLrwtoEPMJN#nlk3dLnaGF{K9Xz*P?gdv>Ib z=iL6A77?sX0%6Qdie;qc#1BkFo+RGlU%d3?QRz|ZeXL4LqN^_&HTg3clc zN2==w6OpMO#~n<97`iqrJ(jr5&KoyL1aIbxX{R<=v|jsQn`979yos+GEAzKHz@B~V z^JSzPR?X%8TYJ$SR@1zD2vQz~@V9bY3r8N1HN4eWG`&TU2#S9{n8fe><93?SI=ilj zQSma%LPM=j#a>QBlpO>9re#Ys87ioRXqbU|%<7!_J^5e^rG+PH)5ogR3c%P*U2+Z7 zpkU%QJKvZ&iB`2kP5haC4l{+1)_>z=;N)pQ@ya%O_TdAaQWcb-R-lSuE37WR0x6T#5o#ydu>oXFy4yItZ*7f8*Gz)hkmhj~qh^Snx%%bfA3E9j(O?QD@UIeV!2nc$VzKGMghT4Svs+kqf1t6 zFQ;lKYSM-><-7ry_tO#%8eEN|yZp}55VI`iYcBcW^W60D5L6EuE|4HF(vmn-4uDdt zz5}LQTrMZK8Yxc|VTAJir*_E$W6%J=UT=jT`s@jvbV71c`fXUxH>)vKQAMOtuNuS% zP#OQAXrqKgy6Wxdo*lgneB!7_9s4GfU=>CA-ag~Spm@%l+4PLHsSbdw;y!J8i_h3m zf6>~Xd=XELhcVpZK<_H}HRDw@9RXFvHJ|kS@@sotPx1;Jj zyhhvCLY4qNRsg0*3$yzsCc~wE&DvoBWZ73PEGuD%V|3@q1?NqBoaQdAFr)oo4mvZo zqb#&FRp~|!h138`3`Dg-!XI6AJT;TRiijm-$_us27s8B zdr}LQCi)5C@6}`dcMTd@I?3MCGoXd8q*Wa=u)TXnN;S8;=&*2&d2NLLk8=u1>I=&W+zj(jHa!NtMyc^{Cp9`p6r|Ps6fg=6Q z(!CC2H$4tpibTDqANWb-j1IV@%k!%c)~(H45&`78YRQKW9)Ojamt2s$KQ+7uJPU$| zMVGlT+RK){L)$wHgAF*=l^fT#FOF2F*m?JFTZA)ny%uk!J|&J;<}95UgE=H!i~Ifb z`_0u&Sqdz8kMzYfy78@9V42TpNnE8Q9;6e1E#@A>I!ax*u%q6nG1+%qVHzNIH>=GRO@Mv;d{DfZ&U|yXZL22tfGQTdB1O!9AB?Z& zg5M!J(h_I#koU!cax9=t)o}?G{z*#l*d|_F<^JHsekoMQms*n2SZQnvD#$p58d8a9 zArq=Tr#pN`eS~l4l^6K4%AF<5K^|>pj;!0_2O?mK?@_EBFFo&Qicw;ZeF`Dt@f7Oojj*6%;yFYss=2Q%=#Y@t?Tq{o-i(RhUfy^4z)I` zC)V167b)XU_$a+Q^I@We$Zf=oqHYPV{Oa=@e2?p@eMm46Frw?|Zt0=ZnXdEt(5;pfNZ0Z3MbU^mnty>IQqDV(9yJ|qff$iVH zs9LHY9Oo$ZA{UWTQ8j9#=46UY6VKshUV$Kt1;KmpI9Dn~4@QoRJVHRY>OlykI%=Ld zp%MJe7CMc0gfyS{ezV+;!lS33k0kr=m^>{*@%;kAMBPWopd_meh}~;P?C+>6suaI$ zJ7WY%K@x8u>jI00&j}2^?+fqtiwZ;SmtQ5FK{kgJNuP;lxcQ8J4YpeKc$1{`U06}! zj&*g?Og21p7eDv`xfJ->bmlOcMaUo5F}4%B2g2NNWXx-pBop?AvKMvxS=11+=OSC4 zXK-SxhVm_u9}_hVXOya0#6fT=#cIPWcPF(O*xo*o?rvYyalJ4{_2tQ6Gv3DV5^;#; znZ`_d06D8pahiJp!&2!d1UUJ)g2f)%RT~Al|LBKVvyJtQxdrI=1L$E%wTnVbOEO=# zK{)>jG5nZ1tHKw$DS`Fb{iux|?F0Y^ryQ(}+=Vrro!O{jdRfcuMXB`uNQ)wTPw-x6 z?%EgVUw(~b_^9t;L8k?3kti2yG55=65!9t53)W|a574IP&$jcj!455cw@|>R)gvk{ z2MZo0<@ekr5$v*usl8UTpk=PC<+dkeJZWiCa+Q%_nWU?zOzPAbaPJB8N#BqQeRdE6G$z~+79m8Ytq0D)U)D#5G5aAam|#xP5id`bWmO66 zkeo$QtFbRQ`2Go>_qt6bsE-!!-BjLYpqjJ;D|W8Fx+#L%|JtwIWXii^T`8?3wy^Ll*-0`=)>i#Zf{rJHhR|%~1p*u2H z_%nQJ4$isuZDNxJ2R;gxkSkrtQSkLafC*T54GutVwbxcXV6L(xqBSaX*4uVKoz%+a zn|a=Ap=dbEp+z)IMV^S1pMDGbY8!SaGqs#I9V!ysB;ZA*)+j z<?-K|KPsX$5!aRVPVeoV+k=b zI*20eJ05F?^FddW%~%GW`9gmSK3me^*^$S-yAL;Z&coIB2`^EjP1XIN0FKK;HW9I0 zU0m{yF1hqWET$J6)uNZNnwDF}ef>rrJ3lUy63(P1ai^p@s@@*@kmumtV@>Q$Q#5rH zkbBu9lQjCVN6D!-F<07=!7(*d^^2gv2R)86j#x=;UXS!sLvg*bJSii0=y(OX|BU44 zc@V-)QJ7x2G^~@PpHcjT0TTUsuj^%Bpx9M{h7G0Zm)8gmw)dVYy`h~9iQFbx=f(RX zCeOY;OR&FCe+8qC<3g(NPdK`Ku55V+3Uy2fU6M2}{@}X_GcE!2Pfd_h>SOf6&^?2- zj8*BAdVY(3vHU*fZ1WB7f)2~!jzosls2JCzK;PYVzGED?!7S(FUdXO7A^EOtQJv(on)a@9kmm-;!nzR3;=c zFQW!NYF*hbFa3El^Fy7@rYa?mCJ4%)VwZ7YUv-E+)C@4iJnn}M+w5Amv19v!2e*ha zb-Z(TS6&l(`SPUBn>QFvvF*~bbOxBSqTK~jn5Rm8wIQgk5d|WfYZSwG`X8t7o+2f4 z5BwY{B}>g|ObIER2SiLu{A>bl`70z^nHwu_Rpl4k4myqia0KI!4=4jh3HWeJGc!c5 z_F^l^JjJPC2kP)Di7ywW_n>+>n?2R*pp`i98vj=3^_^*aIKj+c(B-UiRJ=DJ{sBq@sQ`f`<_4H$89ve#AiV=S`^d;Yx)79p%TLg4!R^}AX{$;l~$u)3GPf$G+Y^?peomF|emjMx6nt9bcGA9os(y4aE=wJT^lw{A08!q<2% zl!^*+Y_IHtx}Wir<@yxRow=_>!>3ezj6BAQu||@aF82t`y*YwdU?pxZlb+&TTDN<~ zgwtJ9NK&S%?t`2J%NQm2nK#F-0Xw`2Kj7C1|RZ3bHhS6K556aV^~W}_lveThO@rV- zx?R@58azZun;U;VXR6GVn;ZsWwjUzOnt;|XiczgQT5J;XFG=m2LV1r{9j|@QJVcr* zPy3q*Mu&YIvF)g`K_QraSbgLUSuLjlBwPLlPx)Vw=t$(!9E+>hC-Oa)k%t$N+7372 z=j`rVe{y(Xyl=|A1KamoAfsoP6GF9beH%XNx%FFX|KPSGV=1*cY;j^oB!XFNHA5}S z)vp$qT}nQXVUS3Wwcy2(!-0?o@Pk$NmS#7;Y(Q}-b2X1Pf7ZPN-xqELKd7p%dePpp zIne`NlKl!tH(5=41y%v=i}1=2no&Y+sZiCzI>l%4Fn$-r5QGdzSkMzfrmQJVx2E0g z=lk$B46yXq=AN?6d74Y={DE7kNjuzGty>oxVxgF*=m$m$rV|>a3-QR)j1>d7j_84&EkdK>9 z#cI`}Z(q;9@WhW>xj*CzRR`0DTP%6?<=d!&eDRF_tMtGrrDrVEOE1qa)JtR1L)kK3 zWOyc7NlC1}q%~F5d!{D>74Z1X8*|e1B7jnH$K_)Y1`5G&yw_d5;+bt!(yx;U{eDAV z3JJ(g+O{v=+w{D}2em5dl2!U%zD)QLLuE|_<2Fv^GL2Y>P&z;S=}C87jaR5H9;tf< zKT{kSkmE$$s@FdC40;qZ*R3IYby(42&EJcrGt_+zv!Q5SM#sVw;AzI zd153wsyUy=amu(EBw9&0a>BpMboBv zK$P0f;+0{;ZU>D;Gk&;k=mZTXL$lgo>*1F_Jf?kEfFJ z`@&Y98`%k%vI;qeuOQ22c2E-Zv$+0HW1s@6(OQQRVHL^;VyYEvlgCrun)`$1ABf+p zsGkl><_KAppj@$#lpR(q+3Un*`lx(a+7hzrNZh)qWkvt4RrS)Arz{`@cEzW^T!ooi zkU)E^ZPo$#JJ|Jivb1}u^bVO{CQ2|Puft-~k+F^~U2;A_Y95DMA}<_LCe+uWAgnZ; zHIxi84yXiCec@IF%6vr<4v_hxO;J}>BJ>py%WkPDXaEU+IoD?Bfw5y&$8HM1*mA>d zcn5wHU7cZ;O;)Lus%6Um)J6>8t0>@rqz!4B>H*GnF?zb34i0Ob?!q=Cwd5S!?}+G| z8r!AOJPMA{u^d}E=e9!pC0JG)-&DtAwuiEapRXOd@sSt(-rn3mFIMt z2jfsh5qAsFGyWj%txs9`3ahgYO8Ee~3H1&3g-&in30vg<(aEkrdlhs0c3~YYsl)0G zMmi)ju{%@kUVST#hwgtehB#82vnqOu)lsILGkXb)A6Nrq19Usocm`!!f+7=O+g;w- zFF63InakDkD9~EdVNSxXN7<`uX0k1B^CYA%SQp6>f$QeBF;YJsILHn+#SG^Eq4k}8 z_`5Q_d}(92{a_{(83nG)n^M&l`F3mdFjiRXOKU!kbmfi{G$Pb7j>le6!sL`lX1HG5 zMK+-d(D5OCRzZ&#n<65qbu-aeY2t-Rcu{c)sm(! zS32h?VF$xMk;Hy+jm96X2`-0|`)W2PdmKXPV_qfG!oW_;lJtEp%F4Bpn;#kRyLzDy z=Pw?!@b^z{uban9{Pi@Re#w#k2mJd{3eWPTiv0Y4vx=T`RM@y#et)c0!bMj=;o2ew zK=zC#KZdYp&uozY0&yfWZqJ`ISwoH7QTduOIKY)$q>NZs*_2FZNpC&Bl2Rkc^qH3n zeP}e_7OyrhI2tKGfH6#fETMa!w4F~dG%dGlmzJI$U0iyCjdjUpWnxwL2j4=_m7n}d z&Mh^G)tkNII}wYB2*o^p2x77ddbQ3>)l~n@KZSnd+MKrQz?^I?+a;hJOrlCXp8C$W zb8qf`iwAFm)8Czd&nkQC%N;U)H>wQBD|F201Sk6SYcEOLZ);d|UUfr*Oqm9+idW>< zUCR}tjxa6WfegAX24}_}*!!6)G-t9JZIu=3yAG~ym8Dd;lWg4H4{gVj;dIMfRqYC$ z<*UsRPqMRy!oNjz41W5d0;3U(NVt=`x2a0)E*0`@=dhnPayV?s1?}?7_&Ycsd!3l! z29{}=fabFD+|zd|`(TJd1npTC0AVo#jh-4hg5Hc3DTWWYk_Ao~^PE??u?f^ zAqHc8l1AK#(l+mL8z1^x4?l+#{W5X?`Bs))uE?(r;Wd9PB;%bJ!U1=Cyg2Jn{0Ri1vkR;)@x1 z)$C5~rHX)l{U;6B>6FX7#Ro3E^{bK7ZoT6Ot!0h5(E?4Y&Mx6{yRmy(`)Q&9XlEp) zQ;k3{@1m^OvwGv5fhOs%)YdRC=yilnR$`-h$o&_I)z*^BK1OTt?(uUt-es2!D6qit znQ~zOTO;8$D8+eRiNJK{6*3R5NHbF_fdGr3&SNwlozVI6dM<;(U3}@?& z_hJOsks|E8o$S1!a2T!<*mdH>iQlyTd>?_ocK@Nqu?fwG8bSN$zMT6P8;%^U-eRoW z4ailj%W0y@&b$?&2%5*rhw_2}NdCxY;0h9!)C+Mxff1>a8oFWf%winf6>x=nUO@Kr)tFez3-Qn0BN7nC9vFcAi2yQ4Sh zVto-qdHGS3PM9U!9(OFtPcbi21wrB46hPi6mk6jNh^i&>NzLiZ*i*$u8F;W>hLVo9 zsEVxa`)gpFQ@ws(b&9Dif0?yi!@8M=Y4AMo>5QO@Zk$Q_aiN}rC2v-@OC1;E?!`h?`>93L3YOqK|SKBu2;)WYvwCk+A8HUBnBVHzflgBf&|CVGIlKZQH{M*bavzR?>-@K}$8jBy@yKCuVG5Wi}0rSK^if zDNgNDJvvbLCI*}~8}}R*HgRvGjZ1ZZj8m>>o|l2Xfv&(j%xOQr(hPCB*5L{nW}wAu zqe|}!?Q!QH9{+H8P)M6WFBR)kAPgepH<`0QMF)zy;M&qF{><>MdD?iQ&HDUr0xH3t zQ}7v#YDBmNGN3t)%lq!wN9o{Z|LYtBLagbuH^fS#KYe;G_JRZ|(rcP~sZ>#YXOf4u zy|6E;qGo;$Q!ZP!HF)=<_(Ye9ca8s!L3`PBl@3bC6CKCGE~qGD*Dnv$IC!u9ZW8z^ zPlX3+{t-_m4QT`*U62As?{3`I{^_8SH0en68avmNPK;oe&oeS}eX(PA);HW7FD@TEoyki3pX%`sEJvlF4LCc&<)$ddfKCwd#@F zP^q|chOpTNUCup-_UZ7im*D?H7}G!`-Zp}(lCs^rCvLzuI0==L zkOFtd?9#Ek2n{P>BQ)^Uy0iuF@?|c-6_yn`5z`891N!Gxv^cuuy>;9`A*@Y-7#HTS z@+LzEeX#aqS_RdV==cEF2+u+0a8nEssiL*LY!h9co>)Ayxclwy=pRzXMLr?ckPxy% z#1^_^E%yN%+m(~g3q{YQW(8E9lEjG0lHY@>ZeF6_BR(1~SG&`UHiy=R?{W!$G!H)3 zO$v2kY!uPjAl}$w?J0E<@ooQheICrd|$ZqOo4t^F*T8NXx~= zEV{ZS>u+v$WD}xfto7_9PB``_j#a1q@5Z7}3D0wTkIOfsN2kuY-(-EvfNVn77u3vJ zHwgYV=g;MH z{9oq9#>Rpv-vvjNVM(PCN?Py(f)>btP4O}Ll*cVx1_XC1xa-kY>*CTR3Eu&-HB#Fi zeKI0=NQcPztfyKc1y%Q7=N=H>6ByeS9Ud$1&QyiDV8lrueS)a@*6P3p&hwRy&?M&JyN!-{o@vI)eAsFoLUhMqw22K| z;1|SkQ`%73$eQIVHZw7Uwz(7-4GzV3 zo;G`L*`K=_PkJNQgTYA?PV{?A^{qFJKL>SR39P9G&9Zj&XPc>H#KSz~fm3Fd%jK3= zB44=Gq%Noi=3va6>bM-nZs>dc7`X!2pHdO;mPkJqwxU6A>NoY__ZY3VoKafdYSX;< zb-{Ji=^*JmVr}-*7b)F0&z#_40RcPl>Mki^ngp82{BQ!<^NWJRybmK|qxq{1JZ$Vr ze+1=}fg_+k2cD+^yL}{Ia}m6cJtKd+qq6&V*$PqLs3r7%)}fR)sRlH7{P{%{ z^N)>=yq1zf+2@^xQ%LUR>3{y;5*~5y6A|7gPSkbju7h@oW9tvu>ZA{{RrmR`6(|eA zd;v=X*jTe$g4vmu4shp?$`Q1BOOj=ky;~=Gd9@H?P#5 zrp0T2*JD4Ch3Kgcy3VuqO>S*@>2Mg+ioWYWfvT@mhWHX~R5b-BftC>m3AMb(e^`v6AF7`0menCDR-_FXy5?L|LURYNQ)+iKm`s7!{jaI%+4 z2laSFn^^Zaev>rm#L!TnoEpsq!U>Y-2$!+D>){Se?{)P6NFj>xxyn5o|MAamm5og& zt5OF8r^83S;&(py;=IH5MCO%Pq2N|bHKt0As8YA4$ilgx{Jg%6ZpJv>OA2uP*w7}W zLk;36%LpScX7n@+EOQs}A6Dx4c3A%`=_O&$p#L9Z?-|zA*7bX1@31Xcs9TY)ph&MO z3JTH*J)qJFy%S2TumuAuAkw8nLJ}#V1&D}%^cn&Q3B4tR&_XX~d7pc4-Ou}+>w4E0 za^+Y{dD)DM5;u9ww%pQfz7kQ)U;dmou6P8CAxU9 zjV)1rXh$vZL6#;s*JgXuv89FNu9@@vz~R}&rm*q30%q3QN>o$2{$AJt)>jUu$wu1C z@$W8eGhUl@HEMn0dr$+Psjw%S;I|%c4bQxs0Ndp9S}sMu|1ypI)OYnzpf+d&5+6Dv zrI+MkjHo)K6ph0_`|jJY&Np3g=M9aQAq-;nZ5Dbf1wKt_D(FosyxyXu5%zd_qc6eT z)FEmV{=CcN@78gi+K>r1!h*+yo{9 zLJRq6-z}Q|Jz$!F5`mUN9(pE9(Uu?W2?~P~s=J(55P`Q}7_I00+;z-J>QUY1&p5*U)+1Q3nCDKP6 z*_)9E8q=rjJ{Btcd-t>~ky+Q7QmFQJZ;k8&{ZB)zUw@G@Zlzk+`Dn(cBzWQ0p3*=p z|F3;NKTY2t>Mh*j8GwDI0iYcBS@8eBemnezqxiN9`L>d#j|4CNTBZN|?#K{JWwx$! zd%Wx4ohF|7q_zsLyJ1}3B~u_a%OMcI*qZZfRtYlM@;|@!_vZNlI5m&KG{qfzNTnEU z4Ou(;Omzs14DctAAp5?`02`wAd8gRlfB*AmigNj1Gl5l~_CG7RcI}!KkZI?C?Dzfa zd@Dfhv){L`8-E@AEnD~JE~`I|{f<%tzp421%>O7u?0M(v|F{z)FCP4ld;9fJ<^6k2 z{9h9Gu{l2c@00xi@d^wU7rF6(YR#_nd+|O`cZ5g2k$F%ejYenu7`*?2pRIz;RCgij zWQ@MKdFARn>dU`u!?X|#20L{4uuH&`=e!EIj@HZ2nwlEUOQ!{aN|(Tq<+)0xYg<}s z{&oE$Lm+eWS;H)zG&z;^`}Z}F9>cr+$|52Ik-YL>Y+eI-iJhIjJ@!$?AdqvzzL7^h zlYHA_wqpS35e5GJWaZ0qeDy%O*3n=^T*n6AcANF4hLyZwITe}*&C3zLA@YDg08-X` z_)hZ`?Atjqn}*}lx3J(B|4+BGfA0R*!KqQA%*eywjg)Z)18Dg=RKQoDg3YV8TCN3z zqRu4rfg|i9s?dY+{lgHk{zMd9R z(<7M78^wal$Ja$f6g4tZxs44dkDJ(1uGiLv9f^$r>)a-?#ea+^3I1~qfCcce^z{5o zY}I5Sr-Yuum$}v=js{l!p{2fqcSFlZEM;UAdM84Zw!{Rn5&dXz=#BnLA4fR1lJ&zk z6c%AfvecU(B_1a9wtAe%b6O6v9t;Rd{Vq!%-eX%$*!s^JxyqW8&LUd+7k z+kgHllVYU45`9TUS0#A0OdBnb6U-rA>9G?ovpGe^a0o3WBGy0<0)o?|jpv)3n3M%D z;x|`%MFEDrr{MzJ@bT?;;H{Gd?fu&^)b1^}yaJy%d(+>o4)8vx=_=HDtmGF#X|OZ{ zeAt1xzaST-0zbL9tj@M7lrSDZfQ6j2?~JEP;9h-_FCDB!CuqWTbdnY~q=NvD2)6g! zzx`IuLr#Uk7B{8Ig!~RGilCL(;;~5fr6L#p3BcQHtpG(Eyc_RQ^uaAibPyO%`RVy& zb9((G zbyi)b8kVWN)GtC*R%6qgpwk^gcPlcVq4$Yh7sqPCl&h?F0 z6J<$QUSw2Mp}mprJp1eLS4gA04*&0mfOYQ*vkXZRekjo7_+##~M@>c z$4}IG4&-UIQ!*0vST9zYYL6RC(N+%aN`AC#{=FG|pIi+d6dfI1H0Ah8?}G$vVsF;b zE%%|o94owv70Y6kxhR~G==v0QnJbo0f)mqKSQ+FGS5G?ramMXdCAwn(?TU54- zLe3@lXTCnFU_<3>L{H9nr1ZYF#-TZc)PMO{n8V%SPX6G#^YeU3sy+xVE*e9|QPa&0E+Z)_YB=A)0~zeY zq~EG{9b9VA(xstGt!qGFa1M%GX0{l<37wppa`MP232vyWvVeqXNQ01>g~ozzxsfl& z#}wU?R&%6~2M9jfP7OilT_zKJe6#Po*YU%MN zl>YLw8ePGpbfYUl2gQ^d*v+{J6&Sy*?1)3$aI>7og(5B$T9e}+ZfX8V z)>TQ|QOz$`#>dXl924~7Y$U)&VDfIse zFtiUE0ex}X<$#v&9@8+l>swn#L=>76GyDnbbYB+eEb7(w2JQ0zkiBD4T~zQeU3bq6 z7r-cPY2Q*1;5uHqz<|-)WqVf$tD7%noi?Jqn{`~ohO+OY!lzWqzO`61H|gE+uZ&-i zkUGLdpT$~7sIS&OX2xazxcJszJou_>F4KZs_`ar@n4BO#AhKSDcI zIPx`Pti{B|v8oWYBq?*7kc~=N59*(i9v{x2=*UaZuLQD}ocEK9+ zhG?C^&io5o!|>K(1PGn#v2P5r_IW~n!T|R^)As29zL+rcX?898Nz#ocuRa{ejx1IY z6YWe`CjLpv_+RhzupVF7qf4PjW?pg(cjK*hB6wABk=l`kK~_T;}Wl+~T7@m6@4|mZ+(!?eTSZpI{H*81EF4k(F=NGy;=FWb7c+61~|@%};52b}zwa zzgD)jwra~!Jxh3~)FjpMv*L@v^FxwxiIwK936kAS=^^>B?Qtfl!mB3y6fgR;fCkC3 zOj8ohulDFYm9xcq`@QwX)~!wQuuWfILnVo*pnCzVx;xOUHLUG*eEw-jh$9R|6yDSH zXt%O!dVcG(W$Au6W?v3{4^=LNN5o7|lEi-vMmEQ9{|+ns7&J91(MzB5%#v+B3t^55vOC3EeUiSH72)tH_27Paj<*-R-n@9J&YE@PhcHMoik>6SYNp;H z5L!B^tM7~bU5N`PBtGC_N=J6)(h&I_h7D6O(tZy=uA0}m*h?lBN=j)HX3nDdP-@># zc8sdo#>gGaxA_EWN@3?W?Bh985fmH@a|@UCW4DF4^P@i7!lhj}ETKct)tk2CiVAzh z{%hFP;Z?5a(4q14wf8)vX|M1}6^CRQrM}%JW&dFE&Yo22tnx^)}pDn zyc~n|+bUni5~#9HND8KTir7-rIX;3dqw~!9%#Y1pgTCe;Rc7a1dxb_)82wACe6!fV zYGOdtvJ$WQqhH?5*GnVkPxHb8ry3N#5{ag3lw|>siOGzb$L`k1jq0YcX=1~`)E$uV zTsOBz4GHb0`0a;M80yYG!4|7@l#7zTzIuj9hYd=~_NLc%jPy-xtoL+H{+el@Ma>2U z1_|Hx@9d3YO1vWsKPI(9?W{g1C%b^+OU#x{rjY%oQBMRk#n9SVDaRe1-oogWf2}f4 zip*qg`4T^S%AA)VR&Q9AYVOmzv7_cMXT#%x0Ebcv4+)+dj_?F7OckBB%`OMhq|=>Ik`Q&q@1(XxCX z$JR%lXR}!^4DF^73JksR*`A!JuuBH?qZ~f zRZjK0FfG~G6qn+a4RLGZ6i#0~~y8Qnn@>6|$~U4pkD*KU^QdMF#l z6m{4N`4&f&kqT#l@F~2}P$@dNZCUk$6ASPELLw4!GaE`UL5aRG8h&*gwY~+D9zI@{ z*G}r*Ww>3ySx+|R7_QYFS24rr;!Hy_3r_fO@Rfuzp5BOn*1CMUVp+yK9m9=!Ayavr z-V08paA5HQ8{U*abN)m8)n!7Q7^!(}yeiFmfe*VTGtmF0MyTXKFQI;T&W%{W2z@7T z-`2fC%egLsCVgIlaDJ^$Ygvgzf08Y<^W6!8>njW-+=5P9+KxV8m>Olv_zd?F$rfel zUc*V{H%rSY(f<4al3Nd$lI~$l{&805_vKb%Pbane8^!;(lHbu+a)J@wOWt!mgOh(d zSv_4{j~}tLw6#q`tX_BrI3Up-8($Tqu4pA#dnYa}d844Y0|S9N)$3N>IywZ`vBa_H&;s$zd!_U&0 zM<3LT?ziKf=IOUo|M1edEG$hdQ`6ZD@5|EFd4~_cdQZc$MUlxE@t^Xk{^QWv^XTT{ zD>4e)@yttke4;9GE~S=CYthIRH{(%S=gKb8yu&u>%qAo-E~cgYdQa|CB?r;|kJq=( zCG~-u3#L{ZsuF)`3XFh_oF^*wv+5w%o{D4v33 zOq)J2>Du))7CN`hI(Pqlk;@9^%|+IvGF3|%4eTqga$1kODq>(0mHTMtwoih)iOvIr z;hETT)+P6n3#(i8Sr^}BzUz9h5qFJHt+0%R`s6N&uOXX2<+MzBk9ha(yZkyEW+&uK zT9k^D&%{E-G{mu&hvo6_thqk%A~-s@ZyBwyl1P0qc-}U)%E@LvqPAv(;E^a#6Jh6;)mF`{NYF*!w zEV3kzhl^cp(-cHb(V_%fvCd>&5bwo8x(8SGmjgMZ8*3^$iK;fqVmon)6)|^}vm2Kl zZK-0jPDPL0vyR<&=gyM2wJQaKA#%H&A?kD+EMBK6NUv{_W&V7GB7MgYUu&K4m(w#L zo4EFkG8bG?mQSm9#jeCZDGl8~IIdtg&O4=U>=53gkI7kOnnif+*vQ!6)eVj{Ydv~H z*p;E%$&`c+(@rFnbN6@nLuzB@#jNEtjZ~ez@bEQ{2Mtp87Wf;UD{GVQ#4(^4^dxDa zfesC5R7SNh3tyF<%lYnWlDk;b6vF&%X6Qb6BqIN*$LpVwk->v%u?kl)F)=ZQMo^1m z!eHECGcTVRYVGWfv6Y3L;1<}k?kHG#>77aR!Olw*2z1i9?RXXSZFJsl5}Tt)&V95oPYy2oJ~591zYz==ym` zuaxbczjwM(mY6rusW~&AR13k$dPtXV8MX$*mFgaBwsz(zaq~_`g_V~TT4%qe--<7{ zjg>7-L3ZM;v#%clqVY}M=&S33-vf-av^cI?tfa1%ElVoisv|vevaSvU0!~0m>b$%D zJ>$a6@elDm0r}ypbX4|_KP#E7rlVB#bd!PvtE}36_7<#eoQ0jMfQ*ygE;RnIW4@`? zJLPhez6re4k?g=}Jl{`x4FOqr7();SPyC-%alC!MxBC?JLmETT%_WuV^5c~>%{e(q z5TRxTjhW(`lnD{D%yLz5uGsj34)%46wM~KUlP!>mes*d@5) zV{Rnw4;g$X;Tk*Y&2l3E_H!7)wlkSaamDdBip-krZ6I>_49a595-l zO4vrit77pcX@loklP$x>sK06(Rjb(r*Rh!PO_#3w-Ag}gvYlH~)W_t`x%cJHp)5*5 zaNUQ)SIeTndTwYrNJ-wXzW`My|4qKZGoOZ2QJfLI-<@)Qyce zd_MqWG$>`C1fC`}p{OevUSW+vml&oXa`xw_Cfc{79Fr1(I=A{kT#gK7X0f7&J0Ww)7_WYdx??z~p@P&A6NpRW62fezTDZwtFNnaa{?wWe8m6VrigRGW zY3dK1yO-P)EncLf+&-k$-Q4zuzgg~{x8Li}LndL=wCOb4`1co)pCwj{$gbph{}N`F zX8*Ms)!-Tz3kvhz{Q#G)9vz4Mi`~}41zb0(Ble4xJGBs#;l)Ke)NA(c6od|vSkU%c zNrSI9D-2(x<5bjydF4HC+qCc81=K~fi;GJfRal*Tf*yb?`|8$7J6+9TPrK@Fd zr%s|~Hy!58s3P=N`KY5>mP+E2jRM!Z@J+_3Zz-{Z=+`ZGZG&z3U!xPh92{B6_qb8= zzU^{ndyy2msgT??;FLgmvSz3gy3E|1PBvmOs;+Sofqb8$bcw`uTZ*dU{f>PqFF7x` zUL(l-DCxWimRjoSYp*eZFB$0#ta;6?bL+ua=A?(T`TaUzEi2~&c|y-B#kBW3n^t=H9kcb_J2ftoX%l%Dp3qSH z?Fyp=A`S7t#WCG|YW$Y5t&hGhpLp-5q?ZkTYwJE>lJhzL91GtS==QeKnkbxzKRP1X zsV*>RL8Du&X(aNv^g+Z7sEB?|QOggQej6rf_%JH+E9Lyx&FwdOk&W0rD{x zr;gc~wh!GLY}$0MBU<2}z>7=DA?>92q&2w03fZn^naV9TZ0i8T>UB--zrJNFqOANa zsa!MgUUp;@s_35H18`8TNyp9uX7y^o4uddD(w8`t2hP+UYr~E%v1A=mvk2uL_Xn3?x9`750oDmgMgwY_R&Svk}@|1hwYdkX* zx6GQ~v$on)H!UEz&Q+{=LG5l}RsS5~L-0MxQx#4xgeY4TeAVMnR!k|<_gQ9(VnJ;B zi?+Ksazn<%`+(WH2w0|)`=`%Qn>per%P)OhwfBqwR3VW?6iV#WCw}hN@$+``)WS5E z@mOm*$5zFLq1F|ny?5Fj1LHgr818a(Rj7!3L3=Rq)0OsYJw1M2Nn!u?Hoa0$NWton zt72|5@)X}Dy_H`6p&acG3^~-SY zCeSo>^sR}j)xc=83W%FExx_~X$*Y|eDnb6QBe^3%KCa>pFmG=+1?-J>vP4ejJ=G}H{D>v4{6j`Rh92f;S zdyJQGmqTOd#%6s2tLH@LOnfu&<~Cjty?P-<=ycbj}g9yw&A=Kc`@lT&|KDOfU#MnPl(tQkSOl`3UZ=rWC47@-fD{`>8Ys-qRQ%e>ITAk1Mj%f zDL34mAnpwsJ!&-EO+;Y8zcTm%nXx1JAbwe+*`+;Q$+Fddz@AP1Wm`t~1&Q^3F8`nU z!3W${_g>PmfshdHWzf*oLmDyq$Xnhg`(G)| z6JDJ_%^y^v-i%J>a=zEHjICqGS~^F!K(aeVoly~MGOLMyki6SS{$+_alBZoYad978XtOZ-w05&I-rsMxNly&1Wkpx-y)&K%n}+Iq@FvHH9;Qp-)Xl zG`cdlUD;m*hj2%>XC%Djlqn?`oEwzv;L=$89P9GZC`6|WTXcJ}aqf%k8=k^XtKj)3 zvJ)5b7mpH|Geqsvsc#buyA_psEO6xZsu;fG+B`#>De$scHJ494-$EHn2J&NlYu^?d zw}vYHQiF!^$%U^KG$4v0eT2wgqSM#41Ar>`j2-L3I({8o_CN)iJzGYe2Skw1HB)~8 zS{IHPcAE+?{t}|j0g?!%1h=9yurY~1{n*BnCf-&lPjMXmf<)ENZvcpI@;neyDon1i zduc$laV7$<6_lIxwyJrjk=|%f-xu%z)?krnL@z~JExnn?+^ossJa;j7fSN#GJu&9H zy?f*oV4M|k7HOwn`q4r4>}j19$r>V;y9U$;-*Bi5NoXq1-_ZFaeb*}*h;;0k2&>L) zYg`|+%GSWoa|JA4%)wTC6zLSgMHi&HP5ou!f>EWZEkRGs^EvcInUG$`_TB1Be3#&6 z3c(A>_%o0Y3oEAI*~8*}^%Yc^s1y*1ERnnf5{t-`k}$O}{_^76c%1B%^{FW5R9$~* zplFU*aC*d*Bh;K@>V}2Hk3nvxIqL<48&$%D!+GmTu1NRyCXQI=SgL?u@4EGpTacyR zZORxZ0t{PvNB5(9j`OV9J5TPv^`7YoSf(x2&F^pJeLK8nxxDHptMWUKvhXMlXe!)_(Z#fsmtZ>PG(P z>=AlWX0T;w68*{j*zp)GeXUL^ji7Sp^sXvG(ke$%Eis2N)m43+F2vIJMi_MFC!gU* zbYM3u6qjwU4?`T}2qL&2;KgQ9tw#?{hifm<;{W&@8J}#-eO5v)mt@%Wkv)&qpMY=PpJyboh%`zW;^pVhQt zzsNsYeC|i8vBP?k$qq8+V(xE^L+)l)t^K$`%MlkvAd}v1tu*v`6sE0`C=w`sx?tu@iB3ZOU5H7QKgr`tg$4zsvUwH}g+Kemiq zR!%wDvGu9&)x9ih$?=q6A@P>AXIkZsjP^8e_Dsug9oYu;ZwoGWM#a$ z5w@TO{fkIr(>x7Ss*9}Q^q-yc1{hzp$C3xwIz9j#D<@}aE|scl5b~&6s@&qh=d>F> z`P49=6Sl07$$NvF2t=X&L+66Xs1sP%!83sLOhYW5jr;h~owY3o>Gy?v8unj2VdVO5 zFxE5{abg+d@03lbm*1bo+25cJkAF>~Jblow4yX^u6EM?@YktPjkMD=2$SFl(d(~@x%|JdTi@nC-*Hk`T8#iCj(hM0q z+cv2FQ~ZDCBU6V_85$arUM&{y!|(kWs^IwX<8@0{%&&KhjZJ>pO0=$%l6HX2)8v!N zYI~B3dOB=3vADQw(BRj>+1TCz*n{V>sXuCf?tA}20@(ila18JN2Q=~Q#Fe^#b2Mxh z?g1zLuTA2A2P>Y9dk0Koeiws1=Y6m|$E0N0*qV%o_47V|eq32uIe7ZFgFW+!NlwPa z3p|<9syGf19_t5IrPxNU9XoNt)W9GE?0ucx!{gAQ!_DN@Cz`)~jjcn+!Q1Bl%T)Yw z*MUlkgLQw@{QjN)o%Wcv2Rs@a4hVSv8_r>4Lr+&fTA-85)8{McVfSBYl72sN;J{X` zK?x(%v!fYb-g!b4q@=Wf8gl)`tutrP73&A@AK4gqF3WQ{Qno)Lpr|$Peb!ohl5gw1 zQ&%|t#Q?E&$aCP)`}9by)6#l+l5qUf7ZUhwvHKbtmPOyc+nt(GGqbj~ZlNUFHguHT zIU82ESTEHltnlEFxNN~tG=JZ?3(#exA+kFJ)m&zqckSUzjZ-EcJYIW8tq$IF{%$Xa z(6swYk-*jD4WHWAVGhs|KV|iQ$yx4nq`OAk3iw|X)%q~4YAIig;|qW(aW$d7Ae^+QcE2{AuZqi7pH50cvgIBx# zvSJC%`JoR^DbFDxcZyK5$OI>_KuQyjVtg;l`oh*O2d=2@rk$WwT2(HK%8H z#Z4jF23bLWMhR?aQT)gUPhn}u8#oE zw!JTSBiolj!>Fn3q-173Wo@Z#YlxdjV2Ksc>B`&xflT6Id-c(!Kam-gmHItH|5g7` zdHZ5smHNAlG^)~(7CFa{X?squdFq*c>gobdvXS->VDh!JVy(PVrRHVg$Fq$gFJChDW)OA9fUYrTdrh&CrDBs*jy=FPz zw$BipOXmZ9|Jq-iOWV*ZY|;Hq`FI+RvTbTDLF;a`s`aikYC^!07arnZNb zEbQHKnFK6kq)y}6G< zCNlM6!wI(OZ;<&)oX&DbZBsy*D@tf0MfS{o%onq=J zGHl-Jf8|)%LzlLcLRg4@eqQ}|WEkR&D2uexttPE9-G!a#2sU71DM%hY>@rv#u2 zA%UW^r9O^mUIjnpt%F^B=v8GRB`y5}pcblZsNv(1lULSl@$VB7%4-fY&Su_Oylm)J zD=Rx)L%43@Zjhz?ZA@m}?9F&hJsiais!`L((uAu>pOTYB#4B$La5|b|rTefC=asxb z@u-NK9f(4y-MOm~=_M&=&i^SBFOqj3z)0PLp0A6aS>L@03w{kdw~rTDT>N49(-_UCo&=3{4o5G6D z))o%>VRWbY00Cn|7i|ILe1?FJztBb?*EB^xAW+H%9LqI#pKVz32z!dRBZkdl01w00 z{4aqglkJjG8!cM2;rtv1rPFn9LrVg+e=2(X>Y3kzM$wDp(JJXUw9UfxVR zti}nWum#+FKO*m%MeFnDcd9lT$s01-_5ervwM47 z_t0w`Cx3eWclS4|d>yZ(Sy9XJVkI)uKiLVB-d2k^Q}IKBvo8X1P)qtUXkemXA^WmR zs$N3&ova9PmF-7s!@{U`NrTvL@lry~(Vs%%!n`;($iG8=iY=8XK)O#zg$CQ#(Vs|M zXPS!1)8S+fyo08drmu0F%%1C}u*H`J^0=-|XR<9JCOR5B<#GPc56Yxf8Fl-u9kQd_hdS>gs*bh0nRIPM~<@dt}$Hp67iRt`}1x(#ry)`tF8oa zIIeDPRJ2R}C;myL-E4+Y>1VX*4===4J6_TatH z_RebtS+MFJXr5T$oueEw9xX(Day{2w9z3 z+ZjhLtm`WnXZ57q1(CbYj&h&NY%KoJ+t*74Owr6AhwO9+%jvFPY}pbD77#&0#EpR*S!+2r`Kzg|nWD_xH9Wx8?9>;lQ#$(!L1UW-e6+(Ci=y}b04o?iK* zMzaTt#6z=JjqU8>%G`!?ffCdC==qhC4}!y%4Xp4{GWnW?Au4a58pZp&Sr={~i}iXJ z@L$)+mD2Yx^`lt;qT5{G+Y8LoeFOO*q-lPxLptxi4n1BB_huCURpS`oY#3leo!(tjtfyfs8|@(wR5!F$nf$y8U=ixO zF=;xx+^J^dY+k^ta`kdXvII|9kzb7memOl>cS##JdZ9{aut@@IPy*`RU5%D?DxBY# zym3GOw)XD&hgvBjJWLQss@znwE z)56@!ct!%3J8yU0(yf!^pqYxst!n)ACLCZl)v;JCz}X^PFeVuDoSxz{N)GdtR;!D%cGvlb_c5sPn8sWo+|Xi2`a!%IIArVUsTk+wZ68>?cWsaj z^BUFph+<=6uWIH8vieZWR9W8sjeOs2Gvf%s-1{K}ykt(ubA7YXuLzj7{pj zkuLlb&RtsdpQ#NF%cnZeGEz1XMFj>yLA=Q!{f4cKz{T)j$YFw@&Q5PZn?++u`bMw+t?8@6BV@WH{Tj_aZ1;oTt+f>wWIW#mq9V z+xt(ldrqar>>sv4uMEWjsScj|rVU@%7~7S`SCj*>R2gaMnfDR=y(4$(OYLUAy|j58 zFyCenyJ$bGwWLi$!?}sbqM~jKqIJhXJlAB1tR`}G&ftD)v(~)N(&}Q*ZukL&TQpW{ zVcFNs-`}&dj=9C1F zw#lm1BOJ8nbYa?(wuFAQUZSkq^{zL0^Jzf`Fk$yhIoJ(HNp2)a)88}QFbD9iNCJ5W zKXH+254_^FI!slSR+9ZIXcav}d7@2AhP5O*uslN# zJKIcY**ivQastS2MNGOV!d2E&83*ytp53N|5_kF`PXM4Df%H+e+FDfK%K^ezVk#=* zB1URRuy;UN-Wn#2>t+@HF2a-!m&5DV?rfuLl{M)tF9_m-7GdrxuPDc3#DyNJ;DJ zFNB2PW$nGXt7=)hZ-h1!0LLJHkK)bvVyh`N)|gIP3VSZ*zRQR;ITn^M)5srf>g^~n z&zb*<+L*w2Q2w%puCS@wcc9z``6LuxQG{kbJ0VjT@C8A1GrY5$^R_~V1J1Y{(Rmvj z%DUfjgLc0P7?;WN!M=TyoBpYTN>91_D}57!6w;nk3Pbr7FO*m^`JH=aGCFLzjPP{= z1j!JLi3cfE#JRKE$aH9DB*TZUWcCuL)K{<-2<|WD^=j#eKd3g`tw+llre#BBX$tF7Iv4xQA$T(hq+Z=9z6S~0jo0vnYTBT&vObp*_7v9H`cGvh7eI@S-g2!>7q#jZk*>i&9b&X0rOj7 zha&0LqY1J_WK4 z@jM~(jw-^8`QEWgySFRg3#Swtmkrn={;w>51ju`phHc12Ov%7rPNqxlfcMtMd=MD5 zYGlRhsM=n+;gFoJt{Xa4EM2f^Amig!r2JH(hjlih`AH$Q3}~VCLyxk_^6D=?1e7G@DL2_jzA*S+cB%| z#V9{=?!{I;8djkL)J(L>8@-uEDjBSpx&Wt3M=L2IwZcRbW;b^`6dgF8*L$Spztu1B zW#mCODur2d;^{6m#6Rs{kwMQ)BnX{Mo{X;~_~yu>wjZNCfli`(bZLvXPQ-`6=PR4h zQtjEC--0Guu?_T!yc9qj`tqY-V z&L{y5<~%%?o3&sS$8CL<>cs++gnli*1`CyO z(Dgxrh-Qcl*eT3%co9{WpcOr^%QZ6e?GMCd%G;MF^Iad}{bvrkCa)q?KPlX2!is+c zg*AGaKBD9s`V0)atE9%?)hq~z%cFVM_P`tpjSCh%qN{r1lQtUfPNq_AYP}!6Jba4q zl4IKp42KnBCuV00xII#`A?P$Gp!6<<(@s`z)Ef*7ER<784Ww&K-6q zU0MEKZDZ2+RyCrq#552!<2RQLRP$k29bY1e0lzy+?6g$ z02PA<@BGxFD1J>GzRU1(tWOpAz>!nfHQ)K>j*g!6z3sSFlX*f~q)LDBpihSApMT!= z@;cV^X$&AZ8bDGou-x*E8^(Vj;YxAYrm+rzSIRF+dBV z&Jm}@@;Z!ehGLAX!GxYK>ccfM_qAo2`W?~xd3Oq~FYNfJs&onhYT|7isR5LxnxbGZ~&pK9G1=q$0vxqQAOu|M&@LHdIFk?RDhKmzv{=_+twO~!&W|`wyb&`uN#0!FS)pFF@J}CN0j4Yle3)Dbq)Od;>E<* zTsGUBfo$L~oKQ(Z6$tr}9ITkYOTE(^y0)udu2V(7*h}G3w5tH)RQz3 zVLSwyrVWTIpVi$cNkB`gr*IwxQp+a~3=5bqmS^&vAXL>u7K;gLGl{aj$;)#D;p=z9 z9GHZ#BCZ>ro^(+`;}s$_B^4H%M%tS@fs+_Df)!Hm3fv62mSdM6FAT$V)@4^!;%GL zKeY_2>6Z`tC&;{7!2%gR+28H1Tb2#bta&S;v*n$TC)PFJZn<{}IK)65I)nCh{?d?* zv8`EBuPOB{r@zu+Gg@~30u-QyGCFcM0EKqmrV-P(=*YOp}kL!q{RoFexd5ozs1RpvT69eEnbvZ!*!X{XLor^NsS zQc10(Np;()nDD&0JJ$ohW1*dJy$q86tm>(oxuOw&^=<%AV-eO}ifu4-f`Osta?_jW z)aWH;Jr{Y7^y$%U|9@Um7YJmfpS)+_?EI$jEgumL)H7%uvUINmA7SU|`jB&iF*9iV zS*%38b}1gZ5u-<&6?lYc_v%*`o##X0dSvCV?0;*9LL6t@rdYZCK*c*gl00R~&x%=# zJznmdw#d8t-LJQz`z4^&77}jR^p!Mjs4XX`&GKVBj4HwPhOWYw!jlK#*I&1cx=E%v z0{KCoMfvfVuU{XltYoEIQ2IKP*WznaZndKK#q0L=a)7gv`JjfFux+6K3{+fpoC^TN zTC3y;XrHS&he!qT;EfrZmF*G?olrh+Ff1V0(Qo1$TQi`AT>qotHTNw{AZ2l`wQ;v? z*8J4To3^L5WMOyQZ&$a8y;f2+#@;M9hJ9w>03_hQTZcNJhDE;pt61C6$?4Nm+i|i& zLvwF^J#}gOLu^UC7M~)?Zq?n)X1HTQQ#1c*m7me~Cs}bI=lZwzThQ|M>2XINNR6{9S8Vwhla%P|ZA zHOHxUxa!N`KWdYi>p?2s3koc&jvxTPP3 zF%z5!Ekgu`4OaO@QIoen4yg>uY(*uSBrMBY`B6hcml}IFv59kQG%--HAY{T%U=HiH zF1CBezc7E&4L^^sefB!r|(13W% zrwL-p(1j8H$AW7pb6ktH4?yfw@e>fcmA*wBLW1Af=UFSuytG^8x7P|j{01msM`K>$& zS9}}38|pi*$KL}twa$RJhdof^yHIf0Hhucma`_kQz)=O1Sy5l~SE)_s8a0M^4J(ii zt2Vu0Y_7fp#qp!)O|5=j>3@m_B)L~DyQ?g<+RSdq`ExVXgIUMOpI@jn7D8X{ZqFQm zeoqVAAga+j@jLO>GJAr!nm$^kr1F@d&jIo&t+!t;{QZ!ep6rE9d;EpNrJGs2#^I7> zhVgq_H?3X#JJ@x$d?SIhSCDz7X|JPh$p>4~t;Cu>loj?HwYL(PQCs(^O_)DoD1gb~ zN6yAY-=v^2)Y~qHlb$q{lMK5=I@gOf7b&KYzjjV~{TNVUWm#^C-<~8D`@S!6r&yr| ztOYysPxvxVT@b}48zB*&LA?!RH(F%B=9lOOImtS#yj6+ zT87KP+{;6pL=_#Cp0|O_lPs;1rqiKH*vGVB3*+vYq4lF%N>%>}<46SA%;HGaRTD4j z8pDFcFLzmjlpN^e9H#kxM;z=)7Y0ptuHf?Br?2Rkrvs^<}k(v-|unZp3ZQ^Zt6_3gEZE;d%#it%nenedL-;bg3JkWO7>^ji~P#ebIv zOOj`T80^)b{gl!oxVRa?)H?tfU|du_4lPuUUvtqG_O&(-qXn@35WIS-yUU3~h7c$= zI~KeWi_;Km!D%I{23z=)Ut(oy&LoX4E@29~`!mHroWY1akN{v=G-MQ7;~6*giK=0F z8k+teR$cpWB_FxDPxVcfXNxX8t{i+nRw?#|57{c!BQ5e!tYF1he-pmB(aGP-FwKK7 zJf|RiU0s_)Ih$u*}p(GRwz4sO%Dhdh$(xvy7kOT-F0wU6TNkT8u zTS5yZw6k%>DgX2B-1|HC%gqN%lw|L<)_&{rJg-Z!r`2L{R>wxbuHLeOT@F@?>_qO3K)QIDDPNzT9A`*k@Cn^*Zi+xpvU?e!mDG*n!fF=41!GO zK9YMZ{Y~aYHAboJThcvsQ6jM2Ykqd@%E?r9lbXWOW!EG1Wq<-!bgF(P++$U0MIwB* z51KOY2?Y^ebtG8@FgN7$R|5m3+ly3kQFq1>IhUpRCS-ONTASJPEnhJ|5RS3#=g?Sg zE!Q)|&M9HtNSN-WVKa*t!L8{S!1Rz}Z6JaBM^1N6ZPm-z!lue8`!`)Rh^*9N9hH%jo|;a@lqQb|xJ!w}NcsewZKJ4q zkxb>V_u94TH_@Me=TmIevv0#ht*H;K@7(x2s^Q!uxs*5mQxs;`L~QK5KfA1{?33ni z!`ai^oV*Ue92~_vCzXGHAn4YYJBhqwHgDB;4^(2%sEY44dt$yN{)ddq<#A4T$J{yt zqN0~boEke^_KABi$}_cQGLzSR{!G>ZWWep2$~rx$@=ji>#q>|YQd zK(y!^fdHtXbiKa+m!t{vioq)`q?>Wd<|eo_SW)ez>#;o7?-JniIJw*IadATB@#^S}3zP zzk>s@N&IYkL%%Ty$Bav4`5C|EYG`T}wyn$?%x#u!zDCICNEw62r^9+80ANGXhy=x( zn4d=dqvota?@vXyxG=Hns2(CcZve#u=JE-|(c!tav1Ij&Kb`cSKZcr5>T(DPQR|WR zoInFTGV9}`)58&1pQ>fsJ)cW#JUkBSW!S1 z{BP`5^!v4^V_pq+f!NDtJ@Or8H)^NUzd>S_*wGspx?Ezh$^}StaEgeStgoLRw)(bB zns9XV+ZKD{lXskUYAGhVppX~D#aEygfF&MGW^NTl301{~w+U>5tQB9oCZo}*a0FsC z()RoUV_ILacJ-OG%%5zupD7O&2iLuO2IkwY1%XWp-rhc~W7DH6^DU~x5lAJ0ZG*8Z zD|V;i4ge~62(9efH2M>3w;?{;B}vpVu&S`=JW>Tl^^|5J-HD@yq7z<;&hn3hkE_wh z$6qi_z4Dok721bi{Z?MBKK#M++=-hVdhMzdc`j5rR->1u)711uv%+$iV$eOKci`=n zI^piHU*;t+`n?$$j{tBmpfyx#zmMM}vR?SIIP>Q-(FT~zJ3rTU@-f?=_Nn@(p40~dS|Cm zeE>6Ed%mkyPl`)GflPMi5AZUK$8L2E z+`z!#?^L;8J3Ec*>+3hx-1I^2G0bOw-VH|^KpqFOz`_*r^k_d&Tk`hywsEB1PHV(y zP&H2n0}ic{y6)JIf?gVz2GzkR=aJyjuVE?1fI@<8$3i=~F-{Pf?v=Q==icjn*+J9G zD+|QS!-JQZmLHjzFg32;g%lMRU&8JjL59mo9c=w0A4|_KW#qU#iUQH@19ykrhWOt8)`_pO2wR3o33s?YYs=C%m20Md%L^$ zgoHYlqJN!F0W?e)u3ZB<#rUu!iGTM0yuWcZpb@Sio12$saCk&t*Bcz80;r^Pw}kNG z(&XgHz0o@*cO=_`y?wkEx+MM6zI=ITKEI+_{`PLnP0{aTt&Tu2d+~&f3DS)Q^xb-v zLf3#1=Q=t%QKhSkF*zUT)ihr;Sg3iM2PavMfO~pAFs80#Gn;DQ4}2{* zKFgtj63gm2KyS^wdiCKC_I}BRzP-K^>8d-uF85|goRB^CFzy+2+3s@)2@tHTkp|heZZsH9wgH*hzg}XJ>fZ0i0d^38n)DJ!oysJ7bVq04>9`%Z1p zjWs9zM~@;C5*R<)4q4kn!S62O-@pFdW_T~Oc2N5TH3ZQ*k|k_Q;4bf@dmM((LhN%^ z@Ip3oj}=)vj0)bIP7hX6)(&^5=Q5}?#DjuZTl&7o{ zeltv2h$zR`$YPN0KEZ?g`h|M2SARz5rdC!w$wmv$^_&O<%;zfa5c*N_}CO@ z-r1Mvst0Kp$HyS?E1q#y`cde}zQ7VVp#=J2JjE|>JgMB$8ic<(tG6eL|MB z$={Xl6>xQ40!l`hNE z*4%+s0|x{r$z%Nd`E$K+e;{(ef2tXa(sA#^T}Ix5&ie{O;vvZ&+?VmlqQDt@p&Z(q z_3MgCyIMLYO8qe-Mgj8^Xjupfc2FkfHg(kW!e4w?{LQ zAUP`LlL_BxwgTkMf^w$l^7S^>!gY;8eBK#u4)~Oe&?aoQ>UN4=-W*{c_cSS(Vq;TJBL=g|M{7*B5M6}K1w#Kef!Oh~U&1MRPqdT%eFli6$Hx_uH| zO=au>NasGl)@s`X3!bu@n@{Bubj1h@k-jZE6kpoSxSa_yHAU}D_}Sz50YwmlP4#3+ z$Y#F{q%ubL>TgL?_gVET4mx;`m*COM7n!sPS?6BPEfS~hdzSDf&)m{D%SNwT=a0<2 zXZjLb9^Mg;G0C(^47wu2mOJgEA~c_w4=zP$r$ksY3b|wuD~<_1GsHyU76w;|{{${c zaR9=N#6d5c^Rf2yR(%j@!4!Se5SHQ>5_f-~No^UJlBG{>zi!nCfR)I5D~o`Fkd~2= z=$Oc&!-*%UahwAhT7G)WIt0`yd`(e$Z$he`J4Cla-{o;mek!vK_xc9)7DkZ;pxt70 z^SE4fhq{iG`9pZbrDn`K7deozXG$ay%udV;O(@p^sNdZwa?rugs1%gKHK=zKT8ra`w?iZ1Ps`|b-%ewVQD zKZ=jNZ)`fO`1+DP4?NcQB~Gvv?yR}>M2SWlsqC86K<8OO-s7^{ z(Dmy3Br+fWC(LkksjeHv@$TD4JL7Kt$Cm-N*=0VLGROU-z~$A*ob;FIfnT98+`Gh5 zyY0HtvcY?9bh;ISKO4qBZidPd2k81>w^QSg>E6b@uTN8NtsbDs8+9hS_DD=n?O63D zz+btomMRWH*B%vnn2`#mU2yP!1!)PtP3z zeB&n^{NMSU;J$u`T?X3xe;$%Qf30O5y`3>lKiEEBQxs>Mza?{H7?W!R^dPhWD0ocl zl@kTp_4N-`PP@N~GbID(s1ghm`mnh8`Co&Jd@u;E79GQ6W+|G9wKHe;pSqh5aTSyV zSiVJnotB@~3{`feaUUqTdlKCp%=G?%$}_LJ_y#;0F!0zScS>yg_+7ced`q?))9e4x z_9s~L-`DrWJ9ct@ZY~GNoEE=_@J0j$I=mkralooXMD%(nfoorU zs7(=V<~{YlajH-Ow|u%k0S)w%Ai%x%eT4FRjgrWHnh=^&r(WL^=mcqek>>n)Tt(5e zutx{O#@1S4`G3D`a%FvOnQ<)(>)W+XCgT9HSkywELF40QPNY67ckQ!!QN3=z*Qexy z!@@vy9|7V}$VobM`_AoBHrFURCNUM}n;z%&jm+{6q-;Qj{Z`Z?4f<2-y)pW9xvLmw~U=?nYm88od}GVquJv?qX>6mmjT`1j>G z`CD1#mB)3VT+66sF&wpt4xjHHqh5n6ZQ=pR*zBAfh1q%1KvU>)fka3g@4$E)4GvGl z_-wTg7pa{;uT=GY15rS>Y)ES-^{Ejw`j7&K6t?J!+L`a(mA~`xkZ#E$j)wzg|DOW`$Qxp$cfBQj z-2nE{Ku=E^y^N1Oq}<%5_xo9Jxqs;P2zC>oP1-aDXM?NkiaY=Wpq5qyE-@9re(E7j zp5w$Zfy}z`!s@6o4GRxlaQ!7*e9EwxVTbVs&6D^&XNcGj_;U ze#ge(!Zh0B;gl#cZ`;VcArL41O97d>yQq<$S&kn`F#Nb2>_HUdm6++NWGtZgv*3&4 z8VW9Frp?jVqgR0@{*qn!3FFUUJiK(&qV7NY)bsk}p-mneO3wOk*6%LjgOxOLa>_eL z&j_EHA#OOk^_|3NLmO<2V$TYBZ-_UcW=mkOD4E>_X#{oMy1q#!fHXxEyP$zd#Jw4f z6Zrt9+j!G$v$sW(1((Jnz^nF$0;T)hH^sQye0u;qaQ@YXsvmJM+w!>}e;|nZ>8zpF2v%~8YGj&eS zs+J21!(X)90lIIcO9hlEkI{UmMjap5l%gJA)oJq>ivj*}N0a#lX*tg@<*4I&kp}}e z<^>wPd8N^Qy3>~->@s*3soVxsr&HFU@ahn1PmsgbaI-Y~WI)XmZWzwv?2dyL!LFar zq_2cr>Om#u{L`hOvRy4@p+Bp^Cwnn@Q0QnHgLU&r-uMkWw^k+}L`MKkqx~*Ok@Pa2$XqDiXXBNiB*eDBW{ylPWg%W!RI=XvndoXFwF}9#P={( ziE@Z40;WkXLk-{~06Lc-BO@~sS?EHgZk)+)Y&!?p2}RDi9{X%I&mKkBEMK#dcYPDz z%5Nm9!3Zxr%?6g7=K)%iH<9Qqo#CE~?|h!B@dPfYEkGkO0v^qj&r7O7mzg^|htMr= zuQcXv>zOt9_=)c^`xW2U-*2em?YR{O@PYV2WPs}ZqxjPyxOIh zq^zf#ULpE6Q@4X6`QAt9<8=?orJO;5{iWLssI^J3{T|V^)3{a{QoNlv+-Vh%c#gY- zQ={FYCYMvt!uE-rWg}TxOSxdv$io4UpMquzKM;SK&7IOGJAvtL9F?+` z8JEpz9EZz%t~5AH{fHqdI*VoO4XDrsQxFl^;A8u~s4gvym&;_kXnfD946o;*2h6r= z{PrZ@Vo_qKlRD`^;KJXvHehR=nCClw!$NoT7S>G^fRjD%Gv&E1Y zlNg2ks(eSc{2?n=T)yA3Cf$3a!SdW8NeIPpPp|8B>EZWru{3PSool^ zna!Zx!VzYv@cvxm&3RBFI<%|TJ-e#0jmIWh(V*sBdCQv0`3s*Cl%p{jWT!&&5D)lQ z69+_nO5)P?i1jNTKIAvm%hA3EVEPzng#wOf?L7*P84yxtF|iT9?!dk53GXmS6kEzo z5b`fMm;JU~AIEjy&QZfW)Mk`G0js&2BTB7&*2OV!~Q)Rc`;^fD-;&r-xcHS~DKnwOh)n8j6O2X1Ue>SjPXb13JXV07g z7xl)4;gZWII7FxGR#uqx4nNi&x;NYJaIt0Q(lHG>uFP9;x-#Eie8G11x~pQ%pW@c| zt;@7Do6%A)q6NNdYG;JU@Am!4D%3$dmWk66fJ9l|C!iSVpJdbZwjhZ8;r`osM6AdA zj8!fUUWEYEVTAwof`)rDS>Fa=zV*F4(1j4J(6YZzZ3u4uvizfD@uECGcfIPIldpem zEucfMHHYWnkdBFsHQ4`mRQZtlM@MGCQ{zAm+fT6?&ehp!t1~GfBuT5p^7e5b`)l14 zK}7-7qqV|cxsMu?42N~JP@g_M0Ll{gWb1g_CQ5ZKTrYaRVS-?7@o zCHVs^7xDsaV0(QeI72%ox)gv`>yDX)Nk~4F^B7wiR=^;nnehaZ_tOj5g=BMFqUCZj zOd268yDy5}8)8E82%tEfOPWGujy`=+6xz}beR@%MwaYtUqfLjh>3WQqH}NrSS5{}Q zM{9yCs9z@^(z99&PTk^ri60zYzV+&*K#(H3VAM8FUESP#w*EQ(-*?^RnVU4Z6BWdI z!d#~Sreq7PfBamrTLyEj4rQF&uzB^`HOMB=BtP88!IqLek2A$xEg|#!M5rT+>Qg}0 zzjzMNF<2cb?*_{p_6_iL>BBqlN#(~#QHUC!t(FP$N_5tR@5!Zvt1*nQbKE{&%Wky` z{_7j%amF7U!>1S}J+dw#wM%IPMF%%+q5^aU1>5$(r8bq$#M!0fSX7F1{|WRLD00}2m`Lv%Yzp4K zBi}2d-H2jvVZoMyMrd?%A2w!70zL7oJorzwk** z2qr94<2;^FEm$3r@O&^b^l&@ha;t?(65n>_u*KlG9YQ_)7tC#P-z?1mwzWe+?g_Ej6~~NBiCpOu|m$koaip z3nhqKnjacCmT~~z(aM*Hv)#tXOSY#8+9H;LbLJzoF+5Tws(#@Z+PHm<+s~!OS_t2VMZCa)W zk3&Og+uBF+-@bj!{h9`l zg5*-#mR8abE^qHieAQrqYgwzO!&jXkwQ{#??sIEZ^XC2-m#9<<_#LTX7Bv|fnhj0E zp`s-*@ey4(ce~zH{S@qVKi`jxdTtVj$~_4n7Y6~e|6@vugg57y4#>!YPSKy``ubaw zJ1HQGgLAl)^ha_QVD{bR7brm&$mmvEB9-j47M0#|hhxFlXa z^=`iFtcN*qL+@)|FXL2HF1pSzOKLf@$3WKVBkv-wdDp(SiU;%FX0Iq0XzWcVQ3DZO z&fC7k@E3MK%)5ElJC5fMeq;VxK$F6s9U$(=6bZm2^h|AzFLvC6LfUy?_}K$-!&l{% z9o-ctek$oRfR|i3nScTa_Ti<)K>&rR6v!u7_SQ39TqOsbr&6TNDFno^?!$+#0LlYb zcOEsBCF6Pjprmks)Fa5MuXk)I#3iPI@jw1#GGc?4{Z22X*AS;4#;lw@ zB)Gb+oLLGXx9j`oL>I(^S)uO&h3KD~BxZM~q@)+7%J|;P@nPWcH9RK0o3mF_6l*%N zF;_oyJe+ILsr!qcC1%Qeryjj;Pig3O)KtD!Z8vO+VmtPE+TA6&6~#RvN58xy%ZR+r zDw*bE-hH4~^}=oWki{An1KC{wmWo27p!o)YbiGfPM)aiPZA*!^ZML z3X;9WsdE7-Jp*?@!$97J70{U(%7`03Qg~v*U$up*W_YO>o;A@BzF4Kp$H3!n94_k? z>QFvnv>HF`cG&sjq%ZD7oCIlQUNQ}arkDdekRL)w(~{K{q}2HnEe%>vnI-w#5f?6Q;My%WHe$_n=;TUB2)3Lq^D>mwu-GgGczQUG=;=7)^Av`+=HdQw6P8ej(>~TFDe%zajX7)@hlPVy04CTzt?G%F%2``7{;LbOHg?MVaEI?mMRtF+ zU0|E$8Zk z$cj(WFB3GRkUYq}1?!edr$*mIM&C09k=ly=(|W^o0a*f^qP2LTd3dS?^W|e*mhdq6nEAP4yBRj_&xY zMBn4=D?Ed$TOxao<6Je49+jJlgX8GlN7+Dj+UJ*7?Z0t>%;MOLMmh9k?ZeWV^~;HId@zmD0k)@O!*dIblza@z!9ut=9C)kL;npG36nMm2#PvPOl1_+$j}Rj~wM2m+GIxF2~%h zE{-`DO>IZ^k9J52-_w#O-mp?stz{Ip{#@jDp>~ncbGh5Co1rV!2}Byss3ggxx7vpK z?Ni)GyZcT)Z1UnCC*oTAI)~{d2{GA$?+fv=p9kWGYg9H?0Yns24E%H6V=@c5Fr}LF z@O{HxO7v2>u&sNgZ$JAYjI~-$o|1*|Ozo{YY-L^E<}bHCcpCWN&!_1jbz+^N_XcRG zB1o7^Sb_h;A|v@rNEIitvpYk^AB1sPV(_TH>~k2;u{CKd+wYiq7{~GvExXlTnDft+ z$FY~kO!h4dvi-BAu0+6!X{gM8KM%hs7Mm4t-~2LdUYwI5x>OraOnBNwa2UOuNM6U zKKo)lbu%;i+aKP9IP2!v8a#G`xqr-O4tt))cs+@D)Gq4L!TG~QYx^yFn3r8Y`Y{?T z%<{*`#FY#HHTN*c%Fad?0N4TaLjWx2?CM%xCb|X;-;B0Cl3KEA^&8_@h=U!5r&Mwq2c094K_uuoE-<;R+t8tzZYaw9$k&#~?^>EI z6;;GLrN?r*e||)5`2ukEjJ{O%1f<60<{tR^%`Q$^0}Bv8st+q10PAH z1fZ0+V&Ngt`UZTYwd?QplxYohfa$yb#_=9%Cudm1`#&4a z_kXoHb27z~dgVk2^U~F;s1vN)zqPF4|k{dXqetK;r*zlVw- zKPc10nMln?DhsC`Q7>Zke}61@>UYlD@6+5)6|E_1rM&+m*iAF|%7sDA1Z~|bF(%nh zgC0K8_~lmg-MikH?$H~OpWdE-e0_a?4{aDPzSF@oKR$JAx_mf34t%4N{F$G7{o@M$ zzkg?OV~_fqA?Xy=&eIer@7a|hL3}QZ!;z0~y>pSo_qXh-Q!)g5?mDgj)hw?U>Q3fg=Cv>;N@QeE7Au0;6XP>{8WCtv8PM!y{X zwszs(^*^ZTnLkxjj2xpGfa!{yS~Xc`&YcC{*(v|}?sS&_m;yfcrikN^8Km7$yIlP= z=OxVn#?p`vp)m~Haczwu<*Zrv9U#+H?u^Q@9r3ek5dj;`sdcS5ggz4KK~cThLgH)w zCA{U`g=+=9Fm=k-VzyqVk2cEA`$MVmG~PbZthvh`kC)6POp{DCA9V$_tY2;}=#4gS zi&j#Oy=R}&9*wWZd3!>p9zDr2!3IW3Lp;QNwhHY=YltbyBX@XMqN+-7wq%g%B3C8L z3^I@JWBu!l{P7&sEiLzk>R{+^-@X~#Ilkz}B89DS@Ghu6l&rptGP(TMt^=OD&g;XQ*{&Vt@$Bb(pnp?5xJ%k=_0nbwlU%l&tM z+wK}6smv;aW@qxB-8jM{M3d#U;kx>{1`W%Zes2ze7f?+S0d?-TQY;znx*saDk~l@z-^oul;Odv?eA{2+e8;rbEfX+8*p zA}Vj04R3 zC}P3qA$L6N!LppEDYRD| z`84(@BWE{8HJ^(*>#f!nXK`bTpml^QZqTiIe4t zb=jr28p*-#&bIN$q6}`|QN-SY?hpldpZs+W;n+pPAA@Kkpx$8q0{mImZq0Re4xwLQ(b&QxWNP2lJc;fi8OqSQOzjLF%y3UrGnj$r7DqsU z+1IQhm+#-41yS~&ulbYnjjBCBZ83;vP2W`(nWoW|4*lX3ah2HnbJ!Hi7mP{$rOu<* zu+8;lu6_GAOb?)+2TCQx!VsIJ3ve!T`9KiKZ>>;(sU=auKyd86u?T$L9srqT3?PsxpzvTJ} z$>EX5W4`KeW;<2YkM^vvHYlm?&e2(#_?OM}%8MA3y$oV7EEnl5>=mcG520lU_4a(T zdT^nAe|kKI2O=tgo*h_>ij;&CMA0YwSPH< z9_$j&t+9tpGz2-8D5wVqCYC%QMcfHebyDw!E)>e0-$nyCwy(^R#HmYrw>^7kS?bgA3oLBZ~>nP95B-!I{;F@P=4@uDw%;<&dFW7qxdINr(B zmywrw5B9RNc59UEV`OL=GvghU6vf%k-ZIB?v%}KfGEo{zC(i0(0$6|Z?fD8c@Zb<@ z_`=)`>!#@Q4Ku^cIzIP)RDArl{SBXNRHKSR$*Q|MHZmbM=Ax|W143op3^8BE*7?$M z-{#+}3#v(C;jt+I4f$f6TYm#~{77JKY`u$c+PrP)D#kd#UoBC{)8VG{lDwe9e#Rj> z(r>P^ z`o}e23qn#XHJv_5;o->h?&5Q_3;U5*IJH&%4_7S(Nx1aWjs5-oZwtRyr zgohb{PnPl4pY6!P`rW;90gm%s$uXYIA3xDIg~!$%4d0@sJK(8wosAe4UN*$6*oU-7 zb6SFbaU3ceVpMxdK#}BiVQw8vPH?V`vXmf=JF<2i_o*E|+C6m6RHZ6g7L>k}q|ucm z&K6*QWP5zm49rFvti-)NyrfS%FcBZPAA7upt(3ubA@tEoab08CS`E~lO!@?sfDq}_ zWu6;?<{}LI?&F1`fi2l8QyY&Ha~a|vGT6RK%fak6?rOp$QYBZhL-~IM|MlY? z4CaZsJi1Yr85wQfde-&?;8ZvYEHJpf%osf$8|7K>l1vIw$|A- z4ezE^T)at%^~E~ne}ux|CH^Z4h?lcV;u1;4W4Sxc*q3lon-ELdXAXlET}*Z0(gb|L zP5Y3Ym9b>IcUbmNmPn&v0iRicOJp#ixZJoaybBae0R!iu@8Qf!B_^j6O|f6)6PUrB5;#EW ztA6Ubb^G>L50qd>!jdaxHG{8Hn&;A|wHr7t??e&D2ZU^brDN_v{Hlrf(}r_u9Z56z z<^9HqVgH*leKkXOEK-h|CT<+&_&y*SpTrgxB}y2`~qcEdFi>|g}=|sm$An{T>T3Ac*L8IS!6edkYEZE7@3DjEa%ue z*6anJIm;|Ma%|1dU*vM@eh0-2op+7JUrxZw;x|4%4Z)mK8GuNVau-u=Uk*7cN7!~= zz59K6xhMP=w|w_bkI~)NjhK0Ll`ZmeB&Uvr?Kcx8l+jS|C5!67Rml||aOtemTI;iQ zzDYiYHhqDYsKLTTjmu^CK})A}*woN!r=6zacv~rCm;uJ2Cpdud*MYTIJrma&AjabB z?bXWRftT^{g0nt*4dIu*y!Pw5TBzJD=nQ(OPXNmsW-8RNjX^zxPGK^e!xAQ6cC+}Y zR===6UtlDy7?hWH4@255DYctif0IB(yuvkVBa32{ESK$Ns*7vEx3my) zOU@i06M4I(#>RBZB)tU|a!PwYWu4Py-ol1iGt}-1J*fGClcjbNdN%&jq(rQHSf!~&gT;~fk z{@z3?BKt2u;cWB{lGTx;{pztVpz}gVv8XPaRjaXY^+Y7CiWz4!YzRHpEu0MfO%rV0 zR}tTlxEZ6GC;&-;mc#xIZ6qCo`YC`}AK)}KdE!>AQ zi>OuAc91=GxYn2hGlp|j{g5(Gl$K1k4;MbjyY-Y-%LY_2V}?P(%w~iuqZg{C`_qh@ zKHli8To7-RRg|`*0XPM@Q&@+H%s61 z2lxJwuD8G0`u!#R9DBKmj{|s002?IzD)0Fhz{V%QLrJxQ>ci5#a&W!;se<=j-1*)t z|MZwgnNOnT3dNE}D=UNdzD3VcHW_>cT5fo8Ykrx~bqT2(Yq*1*>DK>Z=Lwoz&&L7M4|zm2r3$RzM94a*EYl5q1evgk@-*Xq^6 z7tAnA`wW)gka}bVD4~nFx@~^*TI;4XCE+)dcN;OYK+@)NFJ2GLnXg*}H$+c2hqtyy zaU`MB(k`*X7DmIUHxTqI(n-@kXd45w^ihWg;{{fQ>s(i1~( zD^}_o#dLW1WTep-Z#d{eT-}kbGcC`%c>HIjvvG@8{dP*lJ6W-UTK-zQdhWp8}{&TPJVxYERtwAtbY>F&#%jtx6PO6yUr17|XskTIXr z{VMQai{);)3BcdU`Q_!uRnIIIyS|PRVLBSRNPQEi*05BsQI#9CuY}6uoy_H`x^K-& z2U7o(5GtU*0)R#*dVtW`a_z#rr0MWdzg&O_`=q5kWNkJ#jCQ+@R|2~I(Qdp(u6VwM zGg-n-t)~lQon4(EY}1b8);BZF&`am!{4`-&Y}w9BO-GC~Z@(r9@wv(k?z4d<6w}@$ zv=)7N3FOyj`!9j~_Q>-tGYfK;wHdd*bF-hR1k%1C%sd}v48$0DUx#u^97_E7k?}C+ zB1>QQ9^@7e&qoT6&;aLr&+ z3z%F0&aM(}P)E#KyGzgt<;0qp?pT_BDlo(h`(In`1kxPSLzLbQG^_FD943%t(UHVe zZC}^_F`t;;rtfK%K6 zf&~lJvlWAFyK4~*Ioi*LiWx3TU9LL%AfNXyGe>t~<_@TRF;=h7zp(y9nqJ(@QrsEu z+vsgJ|CUHA}sc`yC7rVWsx{z>H7_m`=sYKz>xt zSbkYKpp5ip2vrYvyv8GRs@$>~`v)mJ{$6;gjV)JMwnnO)4X(g;c-N+aztm6msUTWyE;2p+B?gy6Pzx%Pgvh z6!Nlmj+^)mu}O1RKc7)L4QYG$lcO@cKROc3cZgVKU}U__t;Zv}Nyj4bY}k2OTn_9M!h(Tb*V;L@e|s~xF+{Csi` zdU2(A@0YUIIjeUL*w}*Ogr<0<%k+{$J_g5zTdGIIlv#eMKe4aa@LTiUi8gK{o_3`Q zQMoaTGS@}h-r`eyGcrFi@#9M@=`LIV^3bNl2k_w<^MKXOGZ+!Z4cCtT*ap__q@|=d zCV&We*~Jc59O?~1f_1(&G?;G;(3=k|lqbbdfJaw)d~^?US42eJ#H%(neqM|eZlS6K zdWgRzz|~i>hkJ-HNey&#KX#q#d_eefZy6p=qvOg{j+`4W0d+vlBNCE-qkfQ6yXX-c zknL(<4)xrmb5w6~0Cl5FNUr?u$LM+LmWa$CySj;K{QZ9(Wlx9oJL%o9)cGX8(bsL#SFB;>o67 z@e7>5o>$Wq!6V0LP91hm!;BJdG*VAEX)ai4T2_Q%TA@a|V+bxRY`ZGvK)JZcHskx= zl~t*7%vJRQ&c(5Zyrc~4f;@7Oh=LIUwWg} z+sjGhW}{4~937oK?j`?}^8jl0wtp7B`A*dVCRiD$q6kO&oVc{5K^WWQJjwJaqp;Nr zzpCfcbL5=c`m12~Y=hoQ_&no}_I`24g^zFFj&z`f$|^DKRwsXotnU9q?^uRU2|z~7 z2CQc$GfXmy+Hql_w4W}^ahL(cmYvzsAAp@d7v3TC6)?!?_5Q8NI9W%T5AQ4P-?}x= z#>-1S#zpcb2kq|deF)dMD(-&Mp7%A3+n!8WxPxR9nX9snOX`)3nrVxH@ETX$?UzY- zHaziY^Id{6AxuaUwcBL+f(!=if%H9vd|E>?JT9LNQNw`TIN z#2Hn(C;_vdnIuX~Yas`vn2dilTPEv1{Jg{;0Jgsl%&U^w=ZF7>4g>EyyaoQ}e_jsn z|NnRXS&;v$t8vd*=J=J^vNY8s3lMsulcb?dlr7TC>MUEWxL;#?{|KWJHgDnSdD;sx zEqV8B|J`kytPA6+*Maq0J|sat?GjsBAM(01lX4=7s=phsFLZVhuhiFUS?8cTTVKc4&&49@i6 zFDfd2F{@qawZ0t?{jZOnoVbP6C}X-)umhyTcU7*kR{HK0lNTpA70Z||U2++xxIu-O z7Uttrc*o@!zQCd^Q~cGzz%&gnZ*QgXn!QehUZ+G^-JaPh$#y0=g7xz&Q+&Zt(nzAP z{Yf<%2zLYUCPq5NkFC%Ddhhn$=Cz8&+2ur)NAmK-drA3Q*Y`FUo<{j)-&G7_I&mW( zLPM~#B=&W2>Avs$i}>tZql(|yb2U;+EDkqmu5sON489guAT!iad6y&KTNqA=1u~Z5 zF4LL8qB|BV4*kEmV1ig8nnDwl>)*U%6S_Z>a+v7OM@*lNZ#SK6BC)5g?s)NRz=4{0 zX9Bs~iR_uZ^K^bZ)U`^7lC)f>2;|;vfTAT*>!0;ex&=68hsoOWyV<`Px!_=&llnk~ zq4m?PfSQA0s=dYem^l#2=*i_7@cuquAMr!38BRmCZTABy-d^^&SXshzg$t-`d};{N zxwdwb|G?OqP!=FHSTUrAR%Sr7l-Wb3J4wn`2_n6$0d%s<8{Q1YDizM1|RyOw=re=#(f+D7pzN3bQ1p=kPcjcq9nLi6BQ*4k@8`@6IHhp{6#(^v5FV5Mdh9 zclfqeIIeyDYIgj(4ym=1RTsoD;vVU@ztgtZFM53SpG6#<{9(#=#A$C~WgVh{&Wz0) z0`;__%3AAXe8K+6fBQEn%l!Q89gveO`))drU8@j{ya9)eXDmwmU+leSSd-iO?XAnU z)P)TZL5hlifPe@A=_(dFO7E!D5CTZ=QB)8WklqO>9YQCeM@2zukO&b1gdQLUgb+I9 z%;?%n_x|tm;eD^`oKGhovs{A7e4c0CV~pRJ%b#~AZ7*C;QY7=iCgx^_seAHa@+82= zd=e>XIFB-wfgmFH@x%#eCAZ_#YE2~P)BSx98!?vN(7b7LiAR~9r`mSF9?ex9Gsj;K zi7Tc*cV5>4V#uZFcOC6QN*)1{uNV=1Qu3Brz^sM{rWb|3Emt}3WQvf}FV;7xRl?&p zwZi7!fIX-#+G$TkN!E0=Gr?C*S$2TOATpOWhSbn?_8k25NmUQpNhA^j`T~uLZuf6` za^138SCS2j%tY$dYKKneov(3S2-Yt$=5M9s>DDAO?HcgJu?Zi{AgAhQE5JPq$kAV9 zTd>lK>0S7>h3ut3u-^96ILMPZBy&HJHkK15w&W=>GsoPvQ^E7S>*YuC*IXWXy&TQH zytMIj3fEAM`;vS71(S}Atn^)q;)=>GHG++yhZc~zv2&s0)oQ2~)b;DvFC%0qeGD;c zWuIFDT0DYGlTe$xF4=2fLAFaV+xnyOd{*BDrIw0>FLEEr)g+8xJon11A&sj=t5Y!? zyB#q;IJ^Ga7k+_xbF3p8vFvbVWwHMI-gx}>#Vd!n23-Pc0|%0&E}rjR_dxv1zpZrg zJb&!=_2q9vqI|bl5`5hoqbZ+1yUlHyzzS8@+}LavV;_{e(GT}Mg9(H)i~|D7+n!-U zTYuQlDKiR@OFYLoIBe&h?ga(dbpl||d5iOl%NQ5Fv_HL}&ZgrlBjzCcH@FJDj?-1{ zsK?;$5CsK=4h6;ZbXwz<%s_}^d;fOezA&VfGf?dAxwcjAFZXTV3%jnVF(x?R_39;u zWHQcAEJeB`L9U9@UOq{ZFC1VK);G4~QP6zOCNip^9LUqq&~VN@=Ry}{?sEgC(y{;M zR9A}7i-aK9z^K>Upq4b;)I~lw84W4lSGjZ<8MGaC!EzC`2E@6Wx*=F4-L{01b9LVH zQdb+nhBz*+Fy73J`mWW;Do8~;_0QPB8k+j`5B$ClT*WK2a0~s;IKgU57VC!fMOT`_ zv|M3co+`d=Z9GX&^Dh1Y+zjI!oeY^xgbY?g(|pn9YmGhr$#{a%P3#JKe{jyZnm{p& z<+P466s9L50ewcvb3KbHM^k;YxjjW{Y{fULfr+Ctb=0Ln*uE=OdOi+40blOsD+j$S zYl{?d=h{%=RkQ2P(t_WPHe5HG)4bfmeKYMY~{`!59^%AbLjiu2hJLgNZ^=fc8@Am zfm>@oZN-^_S8l>yxw`)__OznEXH?Xi;a3_7?t5vM=GM!3WGu~)hP-&pfFZCi zYv!?NkxT#D+GKR__ESyUie+vyrh6|Xs=058cdY1$VZD#q%+~uI3#v%V9Bt&?DQLTa z%lyk8%F}Iui&;F*%X#}(pnGa0tVz-|rZKW#-1gI}cO7>)b2z(K`1haJ?RZjC>jAUx z(!&wTN@5*$6#7*qgFU?))7+9xpb*|nhDqxrxf?`SA1~t`Cy;`)=@MMaR3eP%-Ju2d zqr2y5Cc#(wEcwOx{lvC54oMjF1etBTkY$Ib_)kn9g$~!-2BT{%(KP(4cqNSiL$Be@ z{`~cAAg!ySi~4Ox9wuaeY(hn&8mGRJffv^{i!{2t!f~AI98)t0wqJmjYN}WcOc|P8 zXV3|UWL-yG@UN@AI{o@CNIbfOI+llf8xK!GeSX4)i&0Ycn_rUsOm1u9OWWT7?Y*2W z>VceFDuP?Fr-A>9Jk-Xm|&IWNbYgk@&p_hb@w#n&_ln7ArJloF|l6 zCH%2=(3n(?zdM*b(1+^ED75=R7FhD(akZ7?Z0PDw$RUahXLx5{K7S}%h!;d+ND^gz|XeCRheuRb@rnzaE&fKSllBrka0WyPtFkn&WFX77D((L3H4Y z{JxWjz)H^hR8yI2{_2F0tq1do(wK-7pPBI1^t401Au0#bZrOj)YIrlJz}AgzdC3=u z!x-P7rQsR(*SKEreYArBlcud)@6x*|H2LO4OpT2Or*!|}nJ4^ATk2#&EIy`42=yD)`k(8w~e}?6hl!Uu^17m#yVZW5J z_^#S}lH$UixCI??AD{A!7EPfhiEcgYM*{z<;NI5*lfp^Q(|NT|g4GRbqO}rUy`uGD ze%-|e5A6J5clJeMVr*X^8fSV@FrA9Pj=FJ|0rW(jLDzGe{Oq0QSVmJ7e9yiU`F1T2 zh?y#$1KMBCOw~46SKvTb-H3(XHp_2*^WbjQ3BXl`^c?}I)NM3btS8ld(;Z|Oi298h zT=5z#@X$iSoUzVUA@vmp#%2E~Kk+l1lyaF!ua-b7a&zm34j}sBIadXCcG-7#lagFYW8Dp#E|@R$inN z(^bv!EFABxzNg2k&|_G|(I6RSzgH)$-b<@1UD3Lcd|cciZ%O_^Ss9vAcV|H}>ASA^ z>sE}AXk?~AZ>6p&-LXpZdy!rEKn{E!Jo!)@Lij4ZFx*E2p+Kw0X)_86P9>+Q?e z;NPaA9jITj!vpA@f8eDJ9|x(g8`LaTh#!IWr76 z&!#HbMs%bv)Z_#SudFy%*t&CRbYp? zHqx?t>$obGb4`ZJo6Ls?erx^^+$dpESf* zVhm9!!%Lj9j@9|Xd2DUvHVaXI>^YD;FYE9T!g5CT`t>I%SG@yLgH3%@Z)ola7B;0IlaeL$RKI?R{wR7)6?qyW+=6pcU7N7*|1o@v$Axk z#@#H~t14`U7AxyMb)3+&punxnb3U!MQfraR9=G->IZcl9Nugm?PX1l*@7+5k`)bA| zqh(wyO_qid7C}%dE3n1zlH<~!HC$1pww362f%98YcG~~pcy$Sfu4;pEH^wdpRW?liPoFC!p(-4+7s9Gx?tA2)d*YQv=A-zb)EiM%5rd-pf#vvD z#!gNzB}Y8JcL%k`Lkjha%{##vv%n$pnEl#Y>DGINkdJ?tqiznGL6iw3?#@?U5y z=(M7^-;Q8Iah|MavrN0K)~zuil{oaJD$gJy>tSHTz~!?NYv723I)QG zlfU;^tCsy5v9V$n zh7sKIPS=t-Rq6CjenFi*?I0#8*?ZVv^!?7U@tEFvqgrX4B2j|}x{$b5gpCAaW!v14 zJe&}tR&HBiG@!wPI-6Mi6tm4k_nukZs_|nSTxY+;whA_p8o|>21Exf_M~_-MYGDT_ zSFs$55$Mrh((ef85BKVp!wqyFil{cvzW1CDG*4T_4tiC@t175Itum-6O+Q~(D+Kr? z`h|uX5qo%IfUB4!3+vmo5nhGn6);_W%l2Em31B``Blc0Xd^Ev+U6}Rz?BY^|9fEU5 zfbFLr)(q=OkFSl^FTM?ICB$AZF%4naQkK+NAyjD`W~wz1O4ql3T0+s*ID2bC$-{t) zUggZB0Ulkcve6}mxe%^H7z1HH-P)yH0!~%dxiG44Z$ui=1~+W6zZj!79-4)~z7x3a zHTr%J%&sSTJ*Xno^iu}ARaB9Z657)W8f@YRjxQay{R1}NApfo?e^{i!Ud75^!_5YF zuP#El!@NaFL_cqAFn2FyZ^LRqC8E6IOChmaZXE@EdKsQ;mKk6x>$X(d@2$S%D~H%F zQds{q-bGeJLIWF@YLwiUD+7m13i;*a)Iox6JV43QvV^?RI*v-eNcDHQQ8+6bB(wfk zfI?t;rB8L7Y;#LRPV%c9!_hB>Ac4D~k$cEOYmXvJuo98#>uzy~a~TFa_?f}{LfOVH zJWa?59c=#RK~KYaO_r^#nR#olHC-v;*-} zd5rqXT!YQTvoZlLkEQ7|Nf&1nSvhq%H`neCo|gq~b|sAXu7d({kv@KYt-#t#mPR=a zt&=+Kaer}4tfdD|0@o@lphjKb;c-@0*wlH87GNi}4mZ>}QH{ZIC#re4o)bHqv(z|?YFjR3Q^7iVX7%V zh)*`v3A=9hC;+>93TC)?41pMVXmk#Q{AB{I$Xg3!Q`Vjy|Ev{tk#S#t-!>4cwX0L6 z6(ud#Q2MrGS8* zH`~tOY2<|a=tCZG9xi{k>>FHz_~C^e@Mn5qJ&N74J`p6X@tcR$K=@>F{JHHVSJ#r= zB;?A>1-*AVyS(%SI%BuDLK2^Q0aFCUvF!NNos28rN*|wiO5RBFji7BiG{+MCiqF0| ztCd1Bg;0_y6rgSNGg5seJLvM&ZVbBj>iBbh-EM5gCdteic)WT}1S_>ZmsPxz_tR}V z&{zZ8PQ#N&v#+XXiCDuGe?aFyZrg7mzgUWA=yScLc&y2lcRTRSQqs#ezYA9ZeJ1Pr zLiDkd#~s24Nt;^-)!yvYD$&q|QJ2GYuojPQ zbh+u8sIZ(VK&>clDOUT8u=CaXhKDVMhDEaD&T0U{VVE=GNk+Vpy|bgb{g2 z$YLL|Na+|CSL_&eA;K5xsTASwI^?CSS6fm%$dC_gjN4>aFql%#F4#o zp%1OL>(|bm9V^ej&BmUv&Ts@tNR`da%?~YtFH&p6+YH_+tL;+X+NifFNDaIgR9R7L zIv%2x?On3d?d(t$ay;nd+N3nU*XE?LCL zxyEBAT64o87%KWJBDdp;>DeE;cM!N{8G8&0^kZ(BDTvj$O-3gsUY7N6zy@GeVfHt` z9P-br=NK4ETz4_|roUdV5<}^~CgvaSS080Q6MlbHpZS09{Q5lp4}Kb%9&4Ms#(p|I zI_dRp%ilOCcx((NAz^iDiA`L`1$K`9`ZXUgPV~>dd0q6+0n6Z*d&m&`x0Nd;rTM-| z+>|dr0mS25lO%If<=h82e=T$XR?NS?A-?0ZmcwKtkk3#$7;;axhPVFCU%R#)J4S-8 z?O`sO6_q{Mhrv7DMn-}tc&dm&c_kBWLe9B7J-APGt2Uf@-4BP}taS7Vbhw|w*Z#9H+*evQ}@;A;F2qG(PqaD7R)7WhgL&&yG2J-CFf8KQp%@UGgw z$>Jy9KRwtNk4U$+Q27_pl)?|0k2(>lHZ;Iu^LfRp70fgpUxy5acJFQT5@z|(zD&VMh?flnbP%l69V|`RxziU7t_Os zBo;>YLn}t(V-R`Haa;X#6?Q#K^~%xy3UACyE{OS5=bC!)ty}FNwTwcYooqLVZPjaT zZR=GAnVTf3ZEGRJq9*|T0Wr3Y>O*s>oy`u z&9}S^H1qm4Xg5q^iSShP6*dmR(hE}6UN=;xlM2PLXPA5mh)gH>7dA(x2mlu``UWXb z2pH17cujDh_1G%=o-p8frtEtayd09Q&%2hK|5DhTucB zYIZ@FL|grnPb~E>EVK&q4fDj`zx<`wcW#8_QT)%^ev92B>bDbw2f<1ZeD{uN*T3r` z01pEM{6TCmZL{7k&Yj4xM6|vnHaaj;)WKtE0))zKR-YMl!q!_@`2kjw?pGcI)b-|M z`uv-FrN97a%M#)p?sY{rflNYcnh54g3E?ZJTAt-l$k_NeAE*~+AcU1tFWzM43{L<> z2I97|M3FQGoBY!$7^m33xgxF7xqqjMS{mv@fygX@-P1CEA4EtdjYsa~n^Z*n73f*L zU*%U8;BCV-W}KMu43sAU=Ixmisj;^{9^@841sk*(MkuiFEXT*^bM7eA%}$2aB_)QW z3mfJ+Z9DE!Ps{CH9>dSiRCj?`ehkEsow2drbHEvpMa8NC9o^kWYFs5cQZYT$h;g|o z0Z*Poa>YdM4UYir04=o|H`!}v`AXEN$(~>!Ew}3wm~1nQW#(zQ#g$l$_U_vkhQVMU z_t_CtddYY)*O*mGI;sQZi*&b}=>UHKgdi>{%$3LuepV@9ub5uSQv5}fgR%K#BY$v&~ zFMwy!-d@sp9tfFt&2pm`J#y_)NivrsvP)KByjGhu?OCM{?X zHjWwNxf$W7qLj8$wOM^XiJ3<2&tYT42QgyuFXo)d`l4FwnX}jH80PK2(=de$tt9EI zfS0_hgaH$X$J}6Nr3C74Y^6U)I)5G(R|(vOxQ`l*5F|H=R0(Z>2Wb&&Y$eAzgFz*} zPAV;0QJFXZ-53LzdcKWiMj)O}pB^?#mO*?$Gdy#G(-RUDuX}@WAqkk4CYWt+nhC@| z@2SW9axgTru<|U)kGNc50Q&?+tu#^3n6b;wst(Uq*}o^0@z%#={4vzjY1Vd7aU*%e z`04XF)^9qvJ0gNzX)<~z#H*}aqG2vXOc&FAq4ht26VdQ^IZsa2kfoW)x_NATzq^v# z%tizx{Q2|P%#y>>u;^nfKK#z)LXt)8H+g&5y%r^%G$o-LcXHGpYg?UIXiRQ+j8eni zWZ5vYlPeU-Pw3^yPr*-V6YV8f)kO|?Z}|^mh_==({Ol?S*)$jBmj||-kj!~ke&#Vl z&=bC5Lbx`gXpV74y*#CCdf#b2=8dRdJMgoto41iQw-zRW%zks5WbCe~=-rT>)pu2uT@OYdR!wA`zoS+BNe`Fxhp zA*d2PDYW^Kwk@OlwQvFYDRKS0(s~7-M5qpmAtu`e{8o)i_HEZ*SpIYCIn8nhP~{-> zuOhL%{nQ4CrD-Mm+G2Y}?dtY!B}+r+2BrPJgxU=*-`F5Y9XWFQ!;{eW;!jg>A73QI z4vYVFBywrSc6MohPouR6{$?Lh{>_qKCL;KNrDb}$!1*Pbc%dc?4;+M)+xj@NpGGsV zT#)uj^M`?*U|vpTFddpLd54q#=G}DBDW7FJfz>}b+XGTxVc}s>U=k7hQ-(kOvsV|J zh<=pLRFKm$gZlbuw5+?p%$Ksl8oO+^wpvA6{eG>P1$uyeXS!pseiEE8QeaW%tu8V>$%1l{R;r zw=|^;$fd;jjEG9;&~V3R8YuVknyQE76;dPVn9R%;XWWvMI~R!0K!f&`Y`MIWod<#9 zK=QLW8B@h3aR{rqhP+Z}PG6efMD`jme7t@(my9yr@_kM-Ka776Fwc4>IR$zvfkT8zVjKqqsyBO z#PwLY8N9zL=sVTa&v6cJKD#cd{CQ*7BF+#W9; z&ub*cs!TH1&GoX7#EGTJ5@vXHqOy=dB8QMt~ zP%nMna0cH8z6K{!nHDgd`4hx%C5}5)P+MABZUH3JxwG~vmWZ9d%c_*R97Rnu+=H3O z(hyv1FDdK{@?_AIH$I?;nJOvrxR}%7vyeQ}*UBi1r7eP8`nVs&<0LyfY^wS`VUC)^ z;_~siL8_=3Yr6}GHG_wJ)QVyqC)Wfk$PSa+K^!M49=C<;49L+XMk$Z(j zVoMzA#LYCTE~A83KumhEH8oPR>f;AgnoimQX#+N}z`?w(d7}izw;Wa$gAj~r^?^QiWhX)} zNe8Ef7@tkKad09#VWWL|nu@DqSBPpJn?9m;eZ>#!4WCc{A}uFD~`olW>U7k(z76BsIqsqoVQWN{rPbr zfv70LxzibW`Fp2)phN?fdN$$Xcc)|>P6|n|2ty4dQ74x*6L=PTG6-4PsCBd`lfTi~ zIQIg*9j>Dc`>sO_XikvO6Cl=L@4ffo#U+k8v85hErF#Ehq+GH&|6!yyd>vk?o5jra z*R=TKKn!onJyn{{8qN*AXWA#Kp~e zd1#Fg?$x{iJui+>LV&AyQ0uBTOEz>wv&k;vKR*AC(6|PVv>M{n`Xf%t$Oe|#5#OZGn;n2QSD9Q z5TvG0-z~`dSfb=DU)hjvRXjMg3c@axH|JCL=1X<^2n`Ou{(5zn#RJ1a8zbm<$~yfd zKHi9s={Pxij3-E<|3qcj3>ehJfcv*mR#uk7?co=hLj3ONs#(~~;+u_B7^+Hx))b(O zh!~0uno^Om(qI=Zw?91;7(pp-^ML=R!vN0W9%ES+=_3>34#U zwX(5U4iCkwZxS17ygHvuM!(r4zHhf+yV9juR!{m39XYwR&8EEW!z{beT7G$rM%MgV>YTQ>FJ7MYnBTDeo|jfnwbyJ~g?VOl zSCnH?<7WSO-ln2u1~3dEOoo;wLCJlpBj?5HQW;N%@|ZP5X%a~kR`}(gEgH` zMx&2;y|Zi;10hoIn9hLsNp5YKgCZWoV$<}AHP7j#YTsd((7jMk?#kyQqkN$qexBdD zZ`3!`Pk?|>7Bxn+V;gdz@M_&^1efB4rs0!kbt)11mRqFLYXFzYNt<&Y@*gs6zuSQD z;~mE%1SY`rD-VKs`MlYHfq97DoMCE8DClxr(j%02{i^2dtR+GH;?e-f# zIM{ztv@%OS902aend>Z4TMhF!HSVx9422aM6(^p^s2~T}fWtJ0iT!hQZ7x|i)k}6A z=fY3PRNef>*2O8~r+Ud&z2=g|{n{MFR#j&a($EjTk!7%Do()4v=-K7#=NCZk=k_iM zyi5sdzisVCDgm=5=mpU`#4UGMiPFFYx<5FU#tOU8<%b3q`#{&n{ulYcZc56q0NDPq zj$Se|p_G!5AtE*5GyQ#MYm5vQiYqi&SPBxj^%w!5Xmx;OX9rWiEA6tSwif8zzQYvBfYt4BQuSk3XQf`9O9OkK#4yXQEwRj6O~oZhOv>seWuMAyUY zpI;KwUW@yqJsy>K7hY(X?`$glD}||Q#=MSJBL_fa&578kwMZ7fJFuq^0t>ptwpj_! zMIR^z19T;GLq+m_RS6%mRFAu0J;hIEhM~E+MZ-Cj&I4oO)un7+u-Re=vbE)EnOVR= zzF^Gd15P|aHg`3%m{{hq=FNoEpvKrR&Gd^Zdk^{C5YS0IW6OKqzL>A755C$|DJHsc z#RPBaMYbZML^0sl+U-pf-=(OQG+0TddIP&DH?^Z5-`A1FPY>KGBpt3A!m|~|r}#1x zDb|l#fVc4cl2^w$ATgg|U8npFHSN~;0X5YzGiDXu3Ho`rZ~Szq9>X{fIWcVP@eyAQ zOwVW&=M2wV_3KyT19Ptv_rR)4+wnMcO3OeJ<#S*8fRd@%Jiy>DfK*<*R!ZLsEaIN8 z^;h|+!6h!{bKk>(Q3(v7Y{_Q{Tuis)U2~_F>uoES#)SyYP1oAv;)Xq5U6e`M9$JS2 zYAm;R6MTa3AaT>75_rn;l=o5P=^VCwRqB=)d%wht+`V_`^#@N}u$j|g#&k6a(70=g zIo8}uGJvZQb?2oA2^-Bb(zb76=D(h;^J4Jx!zm*^0!yT4#TI^vUah}wkUb}um*94j zVUfA;=x73Sdm->FtM}~>v$~0G!VMhUt6fKfUvkLnZkuBa-9Vd*Cq?=dC>UrR^p$jR zteZUmdOP60afW#s#e3koT(FWeC_&`3LtmqNOEDvKmVeRq8gI^Ay?e zt-D#@hG_}0r9JD1B0?9Kg1rnk>dr<(-G5y=n0*Xj=N|G6{*r09@mgpQ*xNn|Vtp>#*dC zc4v}|O8kX2L-6#HAm@;cbG-stHB;pR>yu)R{Tt7OeLwt;4NP)aGT85vPy42JQ5))61aJfjeoL#+4_bx#z}$B2elZnTej8Mgn7xtFOxmwc=J=mowAndt|ChK}789y8@* zlQTl#9(Z}zLuhC3_~j{IfB$Ts;r!86t>IbA8aD=hOlEd=?bn*E4g_~2(AM=oprzOU z1zH*~{4dawdC6a)69ym;mrfbh0JIXnq`eO0j3A30eTM#8VF`eFLqK4zU#lqU|Mg>c z4E}pd*!2&#`}fO=KX{POD2ex&QTTwv>45C>r+mp=)SVGzwm}Do4y%I_7Utad2@L1_ zpd2Qe|3xI1g2xpgjcxH3?*1jI?Rbki0{S{-T$Oikg2(O$kd5*2^)utkXJ_LU%Dk6( zjDyWKdd0zF{AzPw-uwh^w&286&sT{`V{5 zpI;M@$llDa?Z*mQBzFpCX5Ir^{?7n)0(Tk>u)&!8l7#R3Rp|r1c?A0pUvNd`tvG-k znd@?F%$b2T{C9)*0Q;d=>z=1fkj`K0ZDZh~D{4b>EWjMO1N$N-AZ^Hebg+x(h^d+Y zl4zlI);h5K#mkq`V0cmo)lQH_oElQ8VTr1E8%#5{%c(PN%$!F1P&Q>`W-@NflU+>z z`lj2O+K22viGg2T!iO*Xq=%RhN+iOB9~NW*Z=RFhTDgu)15I1);4DcSkQtco$BIW* z`e@GfIiChKQ|do}ob3uXP!O;&bUem`XQt?ARQ1=(vL}2dcoB0R)PoZ%*vT^UH1eS( zSpuZjesdPI|D6Bl<9k@g|9_sflDxlK*HaAN=Ue(fl3duYB05nPkK%uvo4|aSLGTbi zK<@Obxskj00CQD-GsYY=Cd54JXcNP9u37HSxh%}H4vYM@D0R+x&$x4-*CTTCfiJ_ zoE^7$4l4+20nRE{-m}7PSP>+D`FJ}M&mF|BcW~DD8#3`F6NGoipKIed(m6!q9|D`X zY=?e5ibqHfAA$fP<0!MSW@-9pe`8>wuPEbQSVxjP{-xldStz8s+*U+2PPkXojJV20 zy@3agP47O>hzMD+QGV08d)8IWpv}GL7D%pXwc|ku*fMw9 z(_Ce&N_A$Vz`W*w8xXc7?X;t?D^v;Vb?-hwJpaU|tdL$#D#r`lgQAJ$d`gLUi=X`5 z)54V-yRvF|hKGmy{*rG|Q1lw}KWU|oYyQD1AwA)G3d@hZZF(ji2pAJV2Y?=jq*6nW zMQCgU8>2p^c6*fzMwzXjn0wTOn!`S}MgU65j{%Tv{X0a4*D&pXiVzN+nhRLhNRThwSq9uS*Q&Uo*xUb1#y|D+@K0^_CL-IFUo*S}lj(+o~7|d$+ z!rL>7x~5gGL$y_xoHwU;Z!D?KpNWdPGllU5eFhwo%Zw}&{4I-G@pT0U&rdZpySA}x zQnIX?l2zszyRKv_BQriskaTp+7sY2fgmS~qfyK`+Tg%sT+0=@k{sS7BioOvD5=@pf&XfZ}2ME&=Y zNR>_i%)Pl@et69eclqhRuC%xPvGUQXxwr!jV2DoFnvl|HN z00h{+_3rVX$P`e02QN3<8Y~+}* z8_jFnk05Ix1p|wh(kbpa)I7*ij=^}W2(FnrfVf*i^_l6cDDq&T*B<^x64_-n3;xND z^uxO4Hchn!{RHQT+MJ~kEqwD&1SP|^~&Rs_S#c@d$)Drj#w<%=)poY{8M%2c9X8X$0D#M0_C-h>=Zmd&qWlf&_?YE8j#Mux(1ytg4Q=|sSotmmA2wP8O^?kO!HSs0? zuAXgKDw|u6HLTD~aXA8nWQ^4d^ozwnr&;XdeJkxCRnW;?32HIQ7sAQ)>5_d7nKV$spoU+8yt|XZzeLSsmMi4Bb_&rGRc%c}(Hc7NUm-4sVcW6_{ z>_B=IpB++An@grd2n}I+HTfeal%IJ?Y`%7=s)Jy+ZLU4>Lz*{;gPeLC2(-}@X56zk zddN|FFC(IsmIa1&=LAHB(cr24SVGb zkv1=Ak4uA$DrY~cJ5zG#6@VbmCKNp^dj8@?T(2_2Vfg_-q&gViSc&tSr8VMD50V_V z3fW|3`h&dvEc`SZxm@;Z}RdLC>_EmMuUubXMkFMHI(97IwE z)-*d(Ar{xJb(#HEF5xQ9oRvm+KBLoBLRlJTvabq57ZB@QR&zstV%^pB4E*bg6@+a{m= zNzv<(`YT=;iKQAx34A!tFy@f-OXe${F4#*4XeDnO|HD~bb;CC6Wcbo3M z3c~-D9QcP{YUwb3`$JYx{JPo<(6v0~mO$UIe~PdJhEHI*)`?hlqkMH$mF@`XOf-zA zfsVU6FY3je1!h2}sm@4R%oO?nnKZ68eR(&FAYTF1hvRez*f4-wJM%tk!W&Kj}%YUOmE zLs)g}_xhi!fbE{YG}>v%Cj9G++`qc)+QEII$E%vN*u5xsyH`EdH0t~4lxXLJN>f|l zH}f}yZuNiuH;(5jXmxn(>~{z}94EnUliu3M`2PK1^gGGNR7*e8H)zlBh)kNWM&Apg@dFjXGbju+it z%P|6l(4AiJ_7opt^t_=gF)_zb^^+{#P@jnHN>%LK4US2VY$uS!TthMh-~fOs>bH z?^#-L+}^2>28ru(o=`rkz=|tO^F267T_ue(8ei~afnZ^^^XZG>Tq&ceLXf#2^DIoH(nJ!1RkF8|5(~P9)olgSh#>fStErK+txL#ilFvW2NY!7wnoI)p*jcfZ;=Vm7h(IX5k?r zPeBLTl&lqSkTh*CW{Y0;rVc0FtqXAlsqVYz-1v|ZknFoZfE6>_ZUBSCEx^)9zZZMbRs|zW{HyA%B^lHbNE#83@yZ2sr^2WM z^sMV)(E_PeU{?W(3C~l%X1?(8_iY7D)oMgRGu2S-$dn`2W3LPyq1lz7xLl*Lel7dz zR`V1WDCKqM*)U`}FIAV}{sHp3>y4qIq51KDvpnDN3*Rm~xYB6iQ>vgYcA_<{#ff0g z?5N5~bJ3*~!*X$-P=m9L=WZZNIzTqFnkU~^T317)-=NSGuU{(fGyRZ*GPuB@=j#fb zo;JNSo){*!ocEc}x!_v9!G4lIA}a}xK^!&#A(7TZp$Y|J6=<4zP{7||rw*#{1XTZp z=^%2kJ6j~9WL%U$>k!NSU$#rN_YaHmx9+(`{ME;bzjx1hUwLfBj6Q%~Q&qe`XX?B3 z$Dzv_=pvKsa)Y?e%$xLqAMIY})DI4f(LpgAejVe;=1(!+hq~T37j`Hb_*Jm$HlPH3 zY;uyHPJvuv8?N*$elweaKHqDY4Yuwwgo%^lhLu@9q^sF7p;US46#vgzcl1~r2TB)o z$P&)Bj7Nb$Dz+6eL*EaU&Ogko#%)j{c3Pq&WQ78;#%0l!e|mHyE1YhR`H?5G>Lx9Q)EE92{2oz_7shW98nyEs-Cx?cP6jMJ9O? zPP8qk1o?3p4Y>W%+g8axo+!$W<2jz?@D1r!B=#Ycy{NBftyTX!{z%;auvUhjeAEXa zdc9FikqC%W_bNIV0VEPpaKG)TxWxtQCoy?G2O7}-uRJ*=QPO8jmBz?;~acTOa(YeCN$j!}mb=HmnP%Q`w z1jSg5{pW}FPo>ZJ%_?%O512*s`5z*BZr+=6#56B zN!5UIo3h`u_iA?Lwe_7B4f=dQOTjr$$6lF7=ULVRE;`48s*Rif#ZlQ@AiGTY%gwJS z_o*^fIxrl9GZ3t#$MefT&qdNDc(8e7qjRviUlBlYl7$_C$>BIKF6VF_PAKLUa9}&> z^F=??z55{^;M5wRfYi6~mKBrKR-k!!>-p-a*n6w5U~MTkX_iEKv$D8Rz~y@{oxXk# z1<4$}VNNA=Cf~KXw%%$)y6|eA9TbbJTaL{i=KB8J2A95-Gf$~k-ZqC_GxfuHC{|rc z$0X!3M;Y{n3@%xS8K^NSpD+U*?AJqBxbH2SG2Ib?6yS~kmqizRhYu92I2ZjF!y>x; zc}|?L^FPrnPmqS-Zn{;{}`&*L^c;`2vxA6*XgT3MiWF!jjz-i0}7xJY4K@9WoZYysSzZz&G27UO*`E+uBeb^bMW#ly5pA8GSDKNVHJ zckiwWZTo*qOfV$JfS$Mt90k3R)D!4Axt;rQ%Y%!&IN2_wB>&H3vty3gObJkygomWe zrhWX(f*v)+pGT8SAO!xi*X_RXf3Zm3qkcyWT(SQ!CzH4oowH}fQqb}L)A=Aec>gL* z1nqLzpAAibQ~^9yn2Wa6!r~Gd9IGNMr+cmG8viBXw*SBq{`o1t_G3GCoQC@vBYuAa zAP=XQfAjCJho_kT@9(ew_s;*VPeVNcr+m80?=YYT7AIOo>~ZO3N?Y>(U%ztZ!n&r? zbWUOOH7FtMudq)@1(y=jP5XT%{tmkUCQTOXmp*(P+D4#!e0)^H#`yWcRm)j}WfS8% zO{YkgD-D(UwhItV=a|obP!;?dbQfwm6%`rR=!;o;D5~b;5`fDICa<%6Z*0Joc#M_R zI($%3)@`EH!d^m+9M#3CcK-6^=?DaGgmtO{+h!!A50stVj{r<{Pz`W2A$}E_pk220 zST++`aohYLK}!S0XKf(C&6F-kRsK<{3c4bIby>8j)9!igtI?V$@#X@k_+bL+Rli7- z9j=1l(E)l3t^Nr5sckmu@@p{c{|I6Z-#1xO+D7^Ifm3h*U<1h4p-XOGT$5zTA~ihR zq5xj5LO+icH@YLB!y(8tz{3Yk!ObumLAW7K;Xw(sf@Jd)PUn&nPwD{eEo z)!!*VuTvBUDpQ~8HBum>sw15i1&;8!9Fa+O%iXAr8!d^9U7QqYO)O&w5-~1S zH(d;~)gc_~vu5S~E86Z@Go|%vW??Bfi>2-R(GpPge6_CwVK_!pMHO45$9U;@#2#e7 zHW{G$3{)yr6>V)9!9;$o%MooK=B54CB3{fwZvP>rGhnLrYTO>i7-15rJUlZKWJqBu zaeHN5<4644ie1!VJB@jI46O>J+yV3UToW88#70%)p;zDz_^TA-|*=7%l1;!6kIs+{@{<|5S4-9GVC-@(p@_CLxHJ$mZk&+Shr22=0+`8Cr7#njmVp_oEoXID(&_*g z=ql^Vnht28Uj}{bblK*H`uZZAuXD0{AuIyH)gsrdOo&atkxj@Y5wH<3t(l;YB;>ZI zf_&J}Z2hmB>WK5-L`UWSFQP*eoaIwwUN!h~C?eMM69^}mfvoyD5bXvF_SGx*962I5 zns%NcezHc#GUM1?nx2&`v{XeV`F1Bq;0HiQ?qimUZ{OXVr{@=dBA$V6H$ph z1s7rMG*EBZ7KfVt>*0+XHL1CXOy1DmsKl8ksXU{ac zs*(W#wg6=D(b3Ty#R{HVB_;?MICVvgUCjIEccMVh}Z3h6n-O&BV$h0XM=nSimw4n4yJ zGK9wkBOthNH!gc3x>Gk-dsoG9M0^Lv??l{q($}9f47qG+KzVDE)uevHjAp`LgY7<& zB=3?_wB}g_IV+YX^Xm#66xIK!XpOwWBF=Eo=6uMMN0)5D$y3Ewu^?|Jj|u0YXL4Wy zKX1!~Lw!pAUHVpMf?M?slIkVw>nS^u_oi0bI)gk#wtI`Sz+VKgo!6~5-fyz8!D)gC zLhbiv<_!1Kp{`Y+e!MjV989a(k*!1&2-;;!!X-65^&W z74f)30)TnKfLV;rl~;sy9`UX2e`VH^Z=0~D$6HuUMh4k?Sdm6a{=>6GO zf)IdrydAvp8g?5r46H2{;!;JEEPPLJFD2yxZ??#|YS5QaVig%LIG|v={Dx}-KQBM` zXwHau7xwA)e3W{p@2@HH?a-AUY6`H5=x(d3@1NfVftprc#J1fq12i)HO+nl%S47OC zm9kObERkbYR#t0MDNg}e@t_-x(8yO$cOA$Qw0ZsT%gZNUA%XL1ymT^ zA$ajdB>Mi;mGRB({S~=dRZ1+r(7Q#~YSaN`T@8kqO84Oo6FmR9iF!||y1n#M;NUEi zi4A^Ivb%c`g;}y)U2O~?e-8k2S9Gq;N;Fz5W#=y%oy*7rYsRwSrocrx?`=)m=Ct=z zM~cw@YVXYBq2B*J?zH|o9d$}6B1w_$O!n+YX|X%9WSdELVnRr^P6{nXku(OAV=qJ5 zcS*8k$ucswLQM?DHpEzd?=L!>`@8r49``=(ANM})Z~m%>$73F5%zT#D`}KOhkdBiG z9&C``NF>IH;MV5e#!&Pbcmd(t@YlD`JDYX_q3R2h_%7z*M;`?EW!lfe=6p36TKsqn zMILV6P94d^4`FbI>SFG+X$`S|!iH$|TfZWLt%Zfz#c`zbR9ykVVL z#Wnh_`l+(mwJ&4-f#q!{)Q<4}KgO1s2O-b_hP$+>%_pFO&Kl8+IWY{(}($A?c2*CsN*^G#VjBfkfXqIhudMr ztxgER{E!JIHG%w5Se5V|>Erf4(*g1Y^-RAwzR|9@48=GB)`IUnvf|?6$O2*rap}FF z(|8QXQh?tT<9+pQn&*R~JpC4p`V3N<1wXUf_%=6+?RGArawvdVGBO!_1mVj7>Ac&s^2sd0D!_wSgb zp>A**fR-Vj`113IA%NvYTEsgy z2-e~&+HVOj%)54yN$xkLS|69gsqKrGx|&1mX``=;VTD`A6D!xP39IN-#Xm%qbA~8? z3ao?+22XF44O2=a$~{;kjylT|3?WUY0SU+ADOY9tw!%TJjEC`orV^)t#HTyd=5!3w zzzFVP;+v(;1=B85@bi&8JvepWK~+rZp?$7OZ@NNh?HP5o26f?P0Xt$2%0$d?O=i2I zFX3vh?iOV~<_+V)S@gc&F_ch4^E*{(Faz`$7CgjhF6spfeirw*U0*Vm+xO1UX3}-^kKm2W3 z?jsItgBHo^1gD$Nq0Lg1$JY!wc)__Q63|6Yyc(~1X~UIF03i?}bbrJt1?fwX?jl9( zYwQlnoZ-gbK%RXBK26j38ew#C!jzxID=if3r^MSIKJ2f7FQy8r)ZykOg@WOSUpbx4~lJcQ7WcfV#`eZU4C z{&uHYuTsDp0S)#nRFG8J!VzjCG5jo#S64y0ulLSJU@z-wZJ6=!&HHx1?Y=CFo< zd5WRq^xtDcM==|Tumj~b7FA5H#dB_CpMi~%Wk070bB#%(0@FHS7}o94(Lv4nl$#-D z9vG_&Bs!1GWqLPi&w9K+Ia$(6UOeo$bb&7s_%G$hg5WkmcxGoZI~vk{wnzMt0hCX| z8d1LtkG>!=dVfnd;tqmo-^0AX&@h;(i+xJRmYN@{d+d*`-;I~R28tL@kT&i4<#0tH z?zkK6L%J6Ljl^NF;`q2at)s;n3>SKTs0vEZ0nl<(NT^od{62l`QvhP0a2*-idtOYt zol(wT??lZ!nSL6bWz)@}_%`J|wiewf>#(^TLCktIYCQJibFtrZSTm&FRJ!_+J=?(b zDfdauuTqX}Rp7v`t&&KNn$)@@+<3|1#B?%m8ThWhDiB}1INs@n^NekbS-)T=k=-#d4e33heS-&KAmCo>6#E-iO2-68dk1wo?>Qfy=qwzq;`+pqUl z#iH7r<{We1qhO6)NhO0ZV=!8N=_0o52ps5X779qzh?F8ZGANS85qqi>R^+lH9h2=s zw}F&i^SLe(TVQBXa~!sM!BLc`{oX>jyka-pNAevZJ?6K!@7)E}wYyR<$T6)Cb;V>vcZzX56Vz$*qoxmMr&aOh=feU>wgDiv&%Vz zjoMpuCk+;UTJ9Z`m;s%rKa zn322xfh!{ih>p!4@Ag${L>zmIaa69^qgckU ze?I%<^u+Go=u$9KJB1{m^{*z}3cRgurT<83!z%-mm-HF|xv0Cou%(`RlUO&=f)XCc z%PG4cvG$9yEJ?4}s&^uZfVD@0=uR4H?Z3#suJ9qN@A}98zBcIpKfmFc+5i7m?{)fC zLB{OF49d<|-Nef3(Qt2>5zO<`9(p6@SH0(F%mq!d^UX?KPa{#zpq)I~xjjULk`MdE z9td9G6>SU6U<5^(XsiW72cW4bT=ZoFA8m@+=;5mZN0*DSb&X0yzUTgP?O8|c6fx&M8kZR+0&ZBde}1@7KS$qLY%z{cek z91ciC0U{T*LeVQ>BtA#ql_#<>WSrG)3#cq}Sz3Ec(+xHo>o2_5Gr=e7TeoUuCcU1COJ zlguJU0c2KVDcV_^7_2eTVe4i!pD<%pk;DMo$c|LqxXjj+-++=zv#OA0X1l? zA{dh4ZY5^P`9L0AI=~3A^L9FJ>3M2#esh9f$y@%eg6SD4atcjOsjX7q$H%snFj4=< zZP&$lS;ut%twW?0(a~3ZqZvq=?xm1HKGHBvgfVZ7B{T)O=2F`nu3Og`9*3#*ZATvy zC%vlep!tS;uy-^ksV4WE{GM^?b}eEzal#zb%eI5^12$rVlWxYHZNViA4K)6%DNdG* zXas-aw=`)~6E2?twXq?LkON%Q_$sp7In7YoiQGl4p{PDLcYmBzvs);}rRn7kHPS=B zhFHF2FM5A}_{aTmalEddE9ExQO2n%#^UvB z2>=8>Y$Mu7WX7ah&eODuaq2VlZ@B&ocV>2B=56K13*LG@?_VC(YId$Vs zXLS3go~GaML#d5lI%?hYK#3e&{k`ou_b+YFqUMEHGuL}(hdq!=w?`<9+S~gu8Xmou zf%pPe-LfIUw{9)B5_(Yp)I=P#mUm?P z)rZO6%h~!6yaR~!)|j|^Xx5C!E6u>vcxu5&9J?suIJJfKrXm7LJjkPct?|D7uZ{q% zSL-fhJsPl=TzHU;k47d%xb&ZuuegiYdJhgN9#K%}%9bHdl7(*1rQx_3h1+&NYaK<2 zi`ObZG!!jH`__(;cb+S?NBOJ}@43#ln?IM9Qw7!&#M7+Do3ZtNzCM}!cHv#CLR{xH z=}F^8`D0&iy?Y|nX1tKWupCK1v`IJ*ca}UKjP?eZZx%Q-R^wsVdTN4Qu&B#Dv=Lc& z-2K?ud$uHYF9BLgqorOu_qF+y`@|Du;{v@-Z5ixh%1B|mnqucm$Vt@a$M$B2@>dr! z$_R}@HMSjzrw_#P z0XP9=x^&rbt!g0YWvU8#6}DuN@3)R*T!~{DpPI^!Y244j7$sz4d(oYxh4_N4)AWenQ4DFv zyj9aE)kRWO#*Hk=HjKEh6Cs_fnU4|#`q=SaDDjO#bh?h-_Ff;}Qa`_3?^evjmwMa! zJ!}cjS7bFaxvS1jqaA>FK?;oDDQb~SNs;KY;2&b^>IHZ0<(qdr3voDF>9z4cCJXoZ z&Lr-srX$;qxP@NO{akA1rQ1RiHC>_g?*3~9)A(KEWv9DAT$ zxiGNjGtG07{*;3ByKG^Ns(x=71Xto|^?=j}QAcTHL;|B36TWSx&*acJ@-t4!G~jDE zk#N%5w5H?O9dU$6^CX6K$w5XCMtJ2W=xIDjYBnKBIy{BuAU%~B-UA1$e_cHI55ZmYhcTQQQ# zadCO6m!f+^>ij@U`BQ^7&SVUmM+A~g*Bq0#dG4IOveEn2>?=6dhJe?%X1!f+A&$R9D_ zkER?c8Zx9sM9Z6>t#YR^Am!LS>+5U%?yD%r`K=VrM+>gi1BvoG3?X`cY%o?ag6hCXz_zjUdM6`H( zKICFBckbdcZSsEs2P`sPRYe!sn0$QesmO2AcX{c(yQj?Z7kU~}987K6Cdlu{ws0% z5ljBB=x2KL>W8P2xSi@bb4a_qrE*KWJ!_^%Og{s5FHBa0y|ecU+kA^uk>5fg%+nNV zCWo5_T_zQc5-S(wJbli)D2OWS-0Z5E5D)8~w<+(DE|$;o=hE?T&*K6l4V=Y9pQl7Y1qj(&jEwk5%xh zwOG)CVw@ z5HHV#vp8PY5HD_h6fOW{O=cH3wUDUcd4o@*+B3<#3YcCqS4eb-%nwN*qjwIJ*@;*B z{F`Y2!Rv&(T|^njpIGQ`6;pYGw8~;A^D-p-vdXDuaE*TjrTzA?l6-+_X1<| zRU+n3xI_D@&NdRwInP(M21dS-jQ|8$_OBwZpQ?3kG6E#F2{QDi%b$b(R#c+;W3kED z!HMXBns6OIvjR;I`m6@K?@|OfUyWhcQ&PD>gQcB+zd;%L6w>erdbZO{(Y3+6zQMtx z8@CpV@b&n#a~Z)46d?_J0{>ukh}Kv?4_zsBNm{hZ-m(|PV#C}(CjPl^oy4YTaJxvp zn;0mbGjR#JftpiK842#z)kF~J3XbyrP79pG21x!$mY0B*dGr96DFErt8>H8*A`86Z z949aK+qjC1y#UMJkB&*qbN85kv~#b>YcA-#Kn-ujUY&zj#x47J0_U72F;6ilRB0lA z5>H+jl!%pbl$J%v*Jw_jbqv|&<@$?@c1@!JF7X4y9TG^`5m^hNJGN;PT?upCXz|;m z3}RAGH7zO~?#Vh|YM$NXLkBIQngaf})Mjg~Uu!KYYdJiu;p2j8ZGQor(m4EI{f>JnV`aLA7AKkO9 zH`;Iafqnh1$3&rMEUAwe_}tn3GHL$>K&^uP>JiZBM^Ak7NM^Q7PS*^rSj&RtJQXEDtH%4KZi-!)J(X+v~vFTtDT2_HQi7aRK*l0 zHWIAe4Gbd2KfR}H@{50umQ^mYi}ma7=>|zzURFtTE3@BbVSIMMFC!hLVO}Qn{V3IB zxIy^_i__ocJ@P!&&C5{!YK0d7S}=I^nK;)Djj}T1SadAmyRqh?@3wiC4cWY7H1{Iw z7gEQ%L(o}i&eLL|;q7Yp2Alftx1txNoBUXQp!^K`+sb3C?Ae#KV#u3+TT{FaJwVok x)yD>S`tOf_{b2c>_}5#4ybb@89|mnfOtR{OmRjKtHpo+}oz_-OKY8)y-vL295`X{z literal 0 HcmV?d00001 diff --git a/CodeTrans/assets/img/tgi_dashboard.png b/CodeTrans/assets/img/tgi_dashboard.png new file mode 100644 index 0000000000000000000000000000000000000000..8fcd3ac568b350ab5f935e06527dd379219e0875 GIT binary patch literal 423754 zcmb5V1yG#J);5X+_W;2mKyY^p5F7@Ay9^dWaEG81+#$F_g1ZFQ!Aa2I?(Xix@Xy}a z=X~dU|GicBcGdLE+cod%)vIMaOFCRtSq=-G6deu@4og8^S{)7!4GIVMq5u^c)^fS5 ze+m17cTtyIc2u1aFEG+sM+y8=@u0KCC#ccX}UI*Fo7XW5F0d3~>prE^pl0Zr|<-4T&N@iEIJ%4=F+e-m3DW16x^FAZ4njrciHRuV7A<6AASMp z*e~Jl2R)?Ig1b?jWV8TGcz>7iC)3$JY`uBCLE^uj-RNUtlz)y4Y4!hkUzp$6Sp37~ z?Vj>`3q3THF|(#Sg02cnv85;UHF~VkWsGfSvH|DR5@t0$>wrOYbBoH>S$9QiI&U7@ zatt-<-~GfO9L^rbqQ?$}zxexKC0czkCC~eL@eFkJ>aEvq&rI_R<}v_r#wYx<2stkH z!9G_tl_{dj`rt=Oi^l3!5?iIZvFLnLCmfel(s0ZFMeMIfipU}mrB}$uUlP7-)CZCG zy%3MX4kV~vIjMJpQRE7?2dUh=YlVZ4tHQD52nKaZ`F8^0*~y&M7W|9_Ko zJ@s7F-E94px4vccCe}NB-n{jr|J%0xs~|(|9LCvcylW&bux^Yl(5BvI%~;2UR>^~> zuc{^OR}aVA_v@@l@16L%V*HsQU9*_hlOlol4-3TRsqJV`{QPbmYGx%zeu?5Nu6`jV zBgB_~CgeZsWm+2f(;RFza z0O}FRKV41Pw~4kpZ68=zlyBSG*{!dy(}}+#BHY}mz|1dLe&@!g@+lm)s1`De$AI!m z3Y!19!g~9R)cO}<($a(^QE-A&Qjjaz?K%1Q5{51>FTW-yM_>?*H4b@EX=Xz``SyLU z!>8I=ma?__(*?xsu+#@#5^MC+!nNg_=VVBDqQc#XTImKa`32^urIl|NQ)BUm&M)q{ zrB^E@trhqt6m2-ZWc#vG&j`wET$G zV%RY`S^Kkf_lGEu4+6v+CD*%=a&Nwt=;4~|i$5ZAUVantK8Y3JI9X%f`E<3^M_fOy>AI;E1PYWKfIJ8gW?Mfxe@5D2GIw+1 zs*w(QU(G$!c8p{hr=yh{HT~lUi=1CF`ZgUsK;d$M?@YzqDhk?wWr+`TuE%6aU@?_9 zMp2-fo=XRYuiwxU+gb(;1y6@@m~o)}GxuWp_;;2Lr(r@aJ6)4KiqN>7qvlCLQeRZ2 zl3$D*98x-#L{|sfI2t@WZ{GB>xR4AY_7slw<;zL_?wQ1@th7J7K$CQQ_H(Ri^N+N; z2ee}s?k1#C;30G5NeRwM2rrZHTcbmk$W66q>@?!l=epc5NCI)GY_{kuzv(j3($M7g zbDhQhb!6?W=c((N-0+O`3{IfyR07ojBB2e~5VDuTm?|+js60G8N6U>EtA340MC_ST zl4fr!`;EHNc#3dc9^PcOwsclmI$WV0MING^Q_yrOzhI6bdwk>L<1^ms0rwD!DV-xG zQQgqcRbfB^8;6_V+E;KY)`i0sP1i=`rP2d&68jl@Pc}oTId~#qLv?j( z<*&$9v&VY-6e4nPM%>jZ_&zp9rx z2{Au2CoTr`SDNc2y_C9=-XrEn%!c9>IWWnvb!vkD9s&;4`Qzab2U}8goGjql-t0Q1?8JKvlsf>1QX~E@h-_cl z%*n`LVP!4fvLYnh4ij^i?pW_AEd{W*5@{wUoq_7kpNex1ZdKLDS4JQ}zsl41MGTxv zgF1dIx~0Zhk>YMLRyC&W(0ZF#keWf+j7s=oB+%RiMMO0Zc~# zJ@DHOY$B|H^r>t12+>J+%%(wgX2*T;0qASe_JqJ2o<69-bW=eJY3d*KSX@z8Hvm36 z8^6ek+dwIZBr;E$Xc-(Fbli7lefzc@cdC(UG)p{sVnW@*S_8s)HU3(Vz^Jo7`Z2C> z2}Q?G1c}Jyczkkz!a5H#vt9c(6d~@_-10nSLj$ws)~nq0#Zyg3$J;Ln?QFKwg^VK{ z$)i2jKTEtQOn~vRW{Q@dn}sFq3g*Kxh&l3)3{)6+ayAsd1Ad2&Pt7rqn~*<+Vt>grD3@6!*0v zqmigzR1^s@OViPJizz6wMa=!@TarlR*tM_wsUknbZEJI#6ndF>lVh5?yy_ow8&go) zo*roPTS?-}B+rJg#!-)U;5|-PP_g$BTLUh`Cfmi9+vt*sIZY9ehaSJ<1Ux7V^zmXj z{c6TSVKNh>e>Nj;(F3TA*sjyv_+vO;83w%7^2W^ReQn_h?~82y4NS(&Z5F4!8x4ai zI4>EBDv5=!@T8w0_%W1EBnk>>Y^2W4KX3^M0-~aLms+MHiiOk20;u*82Xco5UoKv8 zXChKbIB0ocAU^xxz1#`;!GZ~a=^a?<;XK1Z#oS}T3B{3{o=0hicx{HCt8{g(;8C7r zU%WpmQl^kgim#_<5|YTS%4o!ip%8bLtMn~D5N!QABILJP`JS{t&Gg`WZixzNJ@rzb@ zzE>$Eacrhy*@mX7fJ4H{4 zZ{!!7JRki&No_8qGrfHxcl5oLyBiP}?;f3F?X-X^3~8mkOkFt)Wb?uyciQzM=ZEXp zIiyIDNI3r>Z!UKQHv<^h$q%b#jTunhzZaijvLadT=6X-wbMc0JrEodBLT*#-({Ui) zCauX{Yh+lMFcl0>+}HfbHNv?$9Rc=Bpb$dcR3X zaeB{i{BHNg(bW9Jp`DtRHW(XyPX01H1^6iX3f%l6a#c4dg&rrk=9Fyq~!7{BgE&o`U#hvtC4 z>;_mlK@>3%xmOdnlXT(c=3X-~7GN!BN@;ZH>$+q8W@8-n*_jgPcRc&qZ-+Mub7l$) zK@&?xoQe;yacgoMYZ+&Rp170QmyI>>Rl7vCv9oKzt3a;W?HFh7zDHlW&q87eUoWv5 zr6{4m@`e(1YNj;K+lqxB;`8d4f`a@|+yUcZiM)77w72zaE$E3dGaMYX}2~G&=eT{+MzB zmlGkM#(g+;`NdQpsmJt%Y&7R8`9&AK1wVp~i|kE#CVqdb6~d$6-o8W=%M+`V7sg8TXLqCQPxeE+`SB`tk*hD*U6 z#@l(lAE7^C8p!(KA7aN`^c8%EF6OCvgKhtRBDr(c*>7hm2fVY=H0Ldwf|wLA+|XIxmQOMzdESZ_d$ z6rnO_Y1!QYElkn)=+fWPkyz`%S$vGuWwfekF^6)8!Dm^~GG1IJHoJKQjDrzzg>UUt zw6q=;^T>NjFN7bzS1U0K)PB(?nf;|u)(8gVIZ8paKOMg?whQ+s|L&9+qvc04IyLkv z%a;*NkJH~yN4y{?ER6Dlo?eOsD0F6HF>t_9s0NOi;PsOi_7)U{ z+2!kJQ6uJYg`V&vW0O@Yt8ANJfcfP!$vvJp;uB}G@k4DBszB)9Tbj}<_qOHve&qCQ zG1y_61n6hw|F%6;WlW?n-1LSA{Iy6xCX>haaao|czP@KNZmhez`=!eJ)PR#^U&o6n zE^=WX)sp%1xrK%9g$3D;{Wj{{*={1xJu@;6X~fkPnY@C6THz;)r7393!TNab^1|&G z)D0JBXG!3Sonf;VnwmIO=ir#m>uXu=hZo4^AGcUj<*O`ivrpralbNYbfqu}zrQ@Et ze16$ulEvR5A$Zz;x+!FAIBHn=_k_~Tl`+MuznxvLW@j_q(IAJ%s?i*@rR$6J=$vJe z^?T=I!SEI7j*8X)L%wgh~jsC5&xrv4+NcE9EM`q*()#d zs0|I59S@V&MPJM?jz}5~Q`#CmqiOpT=cl_JMOHg*4-k`Kq;|iE5tYOxH}r7CmvAq9 z^}Bz;^V@S!%2}B)I6^OhYM{D90`*EguUtv^mOrR18+Pt7Zyvv^;KsuXZE_(5R2#MK za5j0^@1!6k&sqGjk~ciTlfUJoE|A&nEPqq|eS}zT@9t77e0Y*C z5m{>bj`e|siK)droj5)oe`|lF*=L)k!S+#Kc)+vwo6k_s#N*>Y$l)r+F@6I2&b-QBv-`h#+p zes_vLUGo)ay2g({iH!%&k5e1nJ(nlyYG9S)a^i)D*nto_XF9b)`+om zFxRg&OraC=U*XNo&3%)@F^)pTkej28ogsNZzD_t4<9Ci)Qk1zIHlguN)70K#*|cL3 zphYWTGW#iAa^(y3GZ4rQDPO2Wq${k}Abj24N!l(qA_)!=q5m6c=7vT!;XNKYbaX#p z*&JXlc09^vXM53J$5@#O61%<2whyzJtu)&#tTLg=B0qjQM}%w`8u|PHbaT|4=s{%qJnfp40VQ z$Zi*Z%^^-C6^a>JTgx*s;jpF;(Gv7FU3VP4xm%szBq9Hegx&AE0P-~53id;VHRM$tW?*n|7t zIX-3xQPE`L)9#Lry?b1naamd6wY$nJ1tX3m5d#ck(pybkO`Beg87N0={t**2$DeJP z?X}W7=ZpM|sIEY^j0i?`zbt)Q7rF@anpzG~Us|(Qi*zrqbg~>=c)@wcR<|6gVfE(= zU2^WnwW^>vQS(s{nq`DcEz@hs!x$?$i7-}?$0f=z9?TBZv{d^3{qq3W&v?&t};w<#hM-+9++&jG`ZMC zE&BJ-OFvPaoOI4^ai9@xZErIL@uCY(*_+Mp)>m&Nd4_km^ z#KlT#w+K+6H!cXX%LCSNnPV zJo@Y1TY|qg|1wW*XlJbWJuVj)LsEQp`sP`0IMlwN;t%TkwQP4zX+dQ+U@>E~&=D)DzcwRcJKl z0{A5dW@(TUttg@Okxh!Jg%(yy>$9Ez->NIDc%(N|EZlq`_M6A)yW#97jJd^m77gq0 zY-lzJN|&0EwyyoLFf*d_wSpXOiW-{6nB!YBH(2gua651*s<$U>n03zTS^@jp}%Alc9GcyTedIzfJ&#B6obBJR5ez zVvX2vI{4~$(bek6C!wtHiHs91F7}1h?d+DZNp%KyJDbo8bF&dBX#`UOa$cDN^~+Im zHD+F=4D2~1DD{L7;OwL171C0vW{<}a2j`Di!BNr|VmBB{m~}FJ6+D8@+Lg$HFmzK*!CcvlfN(LS{l5Kkw_#f zr8~5_>;#{3keT-j;~Nf--~84wnmjZvfZPFeg{F1@7Qe=3qt7R_HHP<$!$mI@YwX=g zpd7gfGo=e~U0m%pyNxxKy`dV?Vq|}`er}BU>59?aei7CB$xitFCDGdbu5s@i*Cv+* z;Ei9q310T+4JJ)3?blpf@Gz%>In0>kYBum^|;Oh>VzAVV;jp48F*%^Ha%4sySDPJG5GGuK4)uT8e&Ft}i zh`$p4NMja;Glg_6*z{y$_$gBBr&+c6sn8O$R*zlh<^>eJ;l0e%JwH0@RD#Pss&l5> zBMT;JndBRVM;mV*y&R4l!QeVGzLw~(I~l_8Pco)6@ILL=s1v1`cy9sZyJ^Ievm`&m zx^OaZUcb*yfFV`sjU9hP^^BguLH&+1HSu}}mq2;Yn+eTuAOUH@^(LrBq=B4reav({ zL$UfSZD38`3%hleD(HsgjwZ%PNv>d4kQI`%YGP*GTpe6#wmNIP=DiIKF8sNOuY)iv z5toWr-X8bQo<0C5wXhfc#M&3Y^%M1GDbTmDD7ZU47^{Bn6f2Ncgve5yZ++0!*EgL# zIaQl~N4n-MlYd1R2*VX`#q0x?hH_9JM2PFD=;;gh7K?HWIapZmv9XmeqCfTZ^)a%t zeu<8*cv~U5WwEq)`8&CM?B{e3%*_Tljo52Ho$pF=Y$)mH2+`~WVbG^kc z9XIL0)vqw0?!{F=Mk@pQg-cvoQm{cIwnG^1#&lM^rrxhJtDcs8u=eZn3`g!Q?=aPFAS4xRdMrUm zzGU=`-)^Icw=nLw)_J^B4Jq*h!!K~zl0^O)8(W39?H@ffG&MzVa*`VwnEa%i9T&&m zD2JJQfUe?Uqxr!|&MRAe2JOGY-IT~Q?Z>ci!&mPwkAI>KXV=$KhD(GNM<-z`kHFMN zbS0%Ir@Or|LH8s1ym`x`qa#k}dC}VbdN52~o6nw53?_Py!7R%i=!VSJ_U=Ye&dvI` zoLtw^5_t}Em$tdF`<|mi+r>5%3#;eMl=kd;$E_*bk>WBNgr=>neRr0JlUPvSe)8n- zS1fyx4R(NQwv*kWSS2Pb+hFZ@0-#QtgG6CJcvK*7W%@I>qEM^*RuAw6S7@sk=2 z?J6o#LH#DpC&#Bqy6XB@mkLOO5JK7MyZs2XO3QlB(BOXz+v>jqnlsmTFh=Q%MnycJ zm@(wx-MY8TtAwP*cGf`uZ*CfzgX-R3vT++@ z`;+5WSV>8QgD){v^@7?K8`XaQ%I&70$oiTw`#Cn`?=fAil@u4hgN5Xpyso*`H4>60 zM!UMY@a8KZqAeX47tZ-bMO}?@DpzOwOsm9nUf1}dJK?LjcDA&BO=fXW;Y@Y8tpf{B-?z% zmTHTrMzci%0|I9QAO((Wz3p6@gnUYi@7g0rpf8xLf6lp^Kw-ZA`X3M3!o>2`-Rw*1 zC*;>mLTbwI-yO5TB>X` z#51!bd;S+zkZGDN3s})FN(1lanE*fI^ z;x_y>_(7WU8!9@1c9STE3Xk`Fv-~Lw!FKxQARfxSr{Yp=Cm6cSQnT$544WipZj*Mn zPWt|_H)$p}DArp^Vb1?3`YW@1V*+@_l zGQ={!jL<{_dcl`AAufc2sbhKC9>?GMw160NIhqG{0K>KZj9>$=Y8w zzuN8}vB37d67k)vXUXx<`cgY?!zdjr58GxC(i^uGo&8v z&oF~}dl1db{~G7Z1jYjEO z@lVJ9Sx<#?{>hQ}*NXY`U5sz~_gwrJ2^fbAW&{M>P5Pe7|4HMY-xQf~WuXFp1ohw4 z!i3QZAL#y5On-hKNdIeU{)^oIU!J=EDKprd}!eo^mEX5&s3&LVX5ii*4(n$A3 z5>W9Ci$59yC`|hJKH(C1$dJJ7>Cok#i}ud{&3Q}F!xfsOmlr8Fj{_4O4hJkG>FDOx zbAM@>6klj1`+Vq+{V<8w1yb+^6-4nx|24w=Q`38(NvZLFbJ$WOtpdV)n0)#8olcZW z3=_8VdoF}w=cxX_&YJ%einR3qXR(NWSTbB*-`-}!nVI2-B@EUF#l_(n8XB6yKyf0j znj9@udD`0M{%^pfq@{F_J~b}n2oIx z^M55E!j|25;J}h0ow2+Cw{;s%5}AOys-9j+l$)3@LLdyCp$!tN8X5)>=CI23iTb?% z9vDjVFo$OmrT;_RYjm#St@oMHvY@u-3@VXM0qJ8kZx+>1vnQo$F#k5|FLGIoxJZf=B?eFN)LCHV46EtQGxO8opr zoX>r;j$3myNJL5ah^7`uS19vN{UJaxQO=YOC{FQ&urc` z4JE?{?g?P|I^l18Y>SbKq2FE<{)j#go>EDkghU$Xi9fJ^_9KyoeuQ?gpnr`^4_CLi zf~xiORpCHHk4+*Fmf9RiEqTzBtZ*PT9S*0wG^Z88g^uZqaWX-7Eg=hy|&}#zuBl0X-># zWrFYemgEVDh*&iKXQNY+$6|sAV22+)RXwrCqT)wTj!_mQaY3M^ zqCnEo@eumjIX#WF+FAH3u1L4HKR#dD!oJZ-J1r3C;L z?aM2I;gbMJM3(yw)4%j8Jw(?@-H)L_?99V3*b?WoyulFk=0~2b95B5y-`q0kgdx5C zO9sTWv_=z$)7Kz=z2Z79@*1Ks)6)(kMmFCtF%W5IC%E-&DU*oP_h52Rckyv(LVWGh zm8nH}bTCI>$gEV{`7tA_Zb{<4js1hvrxo!VQ)dqgX=4li4UekM9PJ~i81<c9u(i&_f}BBKV1P%8*z||KhWK{v^uIoM+c% z*(rFZYq6#|Wkvk5yW~WGkcVm?3nfYYTXCB&mfC97z&UThHy)RR(CXJjiH57en|uPa zQT>_0-yJ28W5CZO*o+0oDy_iQy9(Wo?Ev+Uf;nM2ob@T900+x8cNy$TpBfT5*a{;% zU&-Rj6|k)r2(!{6+K9MHYdQ9@31B{rDJfwRd)!hifnU<1G=7{Wy0d<=z8CtpRv8cM zURj~!81_YykqH*`fbx3X-}j7;!VjoO!`yaCm78m8Usl=?ws#d2W@|i$=ruJHhc+qZ z&1nbB-aShUrfPdVV3q4VHk04o9+fu(N|Haf8{6s9q5$erw2`1z{pO>aqNc&TnRl-E z%8Y&QN$#c)FzebQC~Vwm^q9)Vz6u{#v?Dw(`5be(gZXEO(LUu!atZ=Na zsrI8#8<{f=Q^F|#dU6wS2+y!kfYclpc1S-o!YyLwkoA=o;b>@0kBQPI)k6&W}oSe`HJ?J04lcU8}9Aejh#}(g1DjoBqPlkHso?TcUMX3#5@d-RpPd%US zt~tC6oK{>N=tpcl&*POIAj*sMehrc8=K3+fb`%pKwU~2p_u2C0mne?w#j_#gSqiCv+Pl`x9Byys@$$Yz2{Iq80L1qbF2|xtQ2hS-sQG6vfmgxPDNi; z^M`-yx#0}tSBq6MMRcdVD8M`qQ{<2`g~SSGdUAkl&m_(`RdlxuC9evS1zrb)Mg@od z;yZf$z~`RR97a$8`M$|Zsv2Z1w-`w1&rm6~OU?AdpyrAWkrL(;n;Opb{&4UAE7!Ne?HcGj^I+L`v9;!rs0Y2k@ z6-9z74t_mHRM8jfwn<6K3*9bAotu4;(r_d-Qjq9{Su(i-(~abaIa=9#BX;==Vt)PD zPg(CxE$lHf*mw5kCz0_3m7KO6zx%L#?8UcFogW}*ZnfY1>fV4&_t;Iktv3lns&QG% zeoT*VMcdD|@DsLNJIMHbnLzY=SI78oeCaiRG1am#Gvj)nw~>?2O*Rt2a_6WK!pq7Q z+r$fFdaTGlDq()9P@}ddUkyBs&6N6>M7y+IDuNE-7~NQL_6FY*FUAx?F@?j}aU(6L??LiwDuG?MI?Ohhs=16XJ% zh|1kA<(d5u#zegL_~F8|-UIYDv4bqj@mzgKq|wS@W@ZKz`v3SSWhf7qMQF!Q?Z4HCSz_%D9ZrMvj5pkC|q)8Fh3SQ_R;xjn~`cmXu&YY2QQ(V#v^bDwpysy5ch$Z0|?bdBp zrX}~yFdTT&2+1fica$Z;2DOqqjN}9p7Gao(e-?Wct5dokcN+@P_c5(XTvo{M&^k~s z-iQW&h#S>07+bCNTQT*mzpfj|g>yitz0k#_*N`1k`)rXfdvZ|}i))pEhfjrcmdFZ@T@%&fmYaZ| z;eqwXRcS-aV-7_p{6DLl=9#qYH*?8?wqN102%@TLNKw`Zf zi;ePFN@Pmug6~Tt{ta@kOgOn`pgTYyP~hFWE7Ug=ND-#JwCm3EHJfsHAW`gWO48xf zo4O?o6}jp9GXBT})GuM-Lg6aUmkzAYv#W8bnu}<`P4nLKtE+v<6&a2jA9!Id?UY_e25)Loh zo)3is_^gvr-*{bwhZiS9?H=8E>{r@Kl;B}WGG}*9XrDjap~5`NA2_dx!0>roZ{$zC$_4u+dho=)HA)QB z)*j*$zd`X?ZiL`~-RcC2m*?l*V(>PtfOl%#Jh%Q4b?L4YcNSr zQp~#^MGTU5flHoWvl5m!M63=Hj8n{clSNzaM(sVOUCvjCYS+lX=n1R3Wo(CZNl$*S zAg!O|EX>3qkDjc{2HyzeYh!>>+Z<5=k^ETAbbn{uWePiwQs14dXgKBy8%Y+ zo;fCu8l5K&{&6bzg9Ge9&(q>i*6tsxQK32`5Q~XA-%gFq!@>u8Osm-QFATi(ghj{MA^Y`IJ`2I6>qS;NCc%dhqOkZ!+{Smmx9*8|%>(X?s7!0) z`=?BMG4A?a%Wd_S_YG2cvy|9e^G^FZttqj7ZbM0GE_O=+y}^HYo3eoiO5pt{)7j|3O6c7K3P?AbPJ>SqvSQ^nev+djVl5WP!~zaoEJ-F5%(Gedf0e>{(& zP$``n1)W^zp*9n%z#+%P!rI9D<|NU3JPnsi?_`>VmKN(!pfbsObN>@Lc_!!WJyBd{ z;uoxpnO_#xEPSwA60AkStZZx&@x1R}prgXPH>m=*7D7oim44ozoJv=_6svbCI1`Wk zC|ldayU23{ZGBGo%4fi;rPw_XagNTW6KqZ?_;{R(PLe8_u;1WhEBUO)Z^s%>EhZzY zcI#Pbu|Er}_)2`&KLq0~E175*#!wIeaPK2HfY%kBE2o2ag8Eo6Pj7iOCW=Sl##pFZ z8b~u05=G`WKSn-NIPpn_@MYD`M&9#Mw;GqwFd|uYwPaVY;kOhDuhKF{YXfq>OMK1` z0SrJYGbXm#z0rHj-*~Y7i4tsbl*+JWLl=ror3t8)wnXCq*S$MoyUbuCOHyaU;}pET zPuT)Df>=RSeWHN}mESEgW0P-e7!9o}jg!`7uaL_{HND$)3eX1X(5_)kyK5VW)F+L7Tz!-} z**3Z}d$ejFwLX{xF1j)`holnd1l`RbQ?GcwSBd_LHfEZi)9yo|UUbz@Z3{j4;uEb) z2t{~>=_p}w0fhMGxemWLRru_r|JpV#u+|tt*6P~&l(tGejMe0{SEakSL>1^E9;9h3 zsTA)@o^-gz+-|e&w!#3NI8@`WRxhnJ#o@GWfqD;owdu%OtgYSc9`C|q5r2MhfA0yf znuOgg;&O9-qns@qLUpSsQh|s!>mc*s$l`YVsPO`kQp6o~7C(KIMo8rWf+tGF{yh5w zdzr_{+f!HlsX;LoghON#<$Gv1jLIo@v10N0K20sm$Bm>x5o9c;!rmYM)2qG7Gp4(G zgX288`1VmtFyZuz1lWrT@6}^9WI8-7wUxMR%tC6}hUVkpb^NaQsc+lcZut2DZPQ;% zYJhfxrYPMn9yybY_4Iew{JcCT7I4l5Ju0=uJ|=<8`lmJq(TKQj?Tz5+zQ7;+^C z-oMPwV!WAk&is1QQ{AkH3k!8fp4(VaH7E9)T%Q&^(M)^0%kZKv$(pW^UE7O~2hj*_ zDtdy$e8D0Gm^V_SdluD?cMDID;Y-~P0kj?X+UmQJ{rd5f_MOe;`$4bOZ1xHd;pFm2 z*#-^99X7)#e*j;xrD=Gv4q+tQsaULhjjw})R9iC#J%=tFM?rcrM}G|W!1pA@vzeS% z>r@5>G+fWQ=j!cmj-aZ;;&>dOCY+Dv{XKFb3EiYFJMU9&QZ!F}YeYAEX)6Y};doAT zV@*$fInBnA+G{WXu#{-=2(HnR!OvpaC&JOTJSv!Jf`m}#f|r?3P`C{YwzbQ>H!s2s zC^BVfbHNy2JxRqOjHKn>H4jf7j)os%3E@pAE@cCuZ?I3WC-&v2)j0$Rt{B3;fXAx! zIrebu^YDa(acBBwaj!?j&sH9vJ9nhe7;7+k_xAI)TQ#i(uOwi8ow@7#$Iqve*I@wm zpm0S-N~+_0&98MIJSW0+RH^U{g{n!&I)ITz3A<;GCT|2Nf6>4TH}4 znkoP7VNkPVm_UqJts)~;&Jm(3dV0EPb=cAb!)^nmUQH2LjK)ahBaw^QzMxl5#iZTO ziH>eaL30XhM16rt@mA?%T|Qsnm1uJKJ2?rviuMjBd+Z!;Pvkbf+qLv~`0jzAPRjkJ z&x~jy$^+k*hKYO`4vZtGO;&PcUJDT#oMPDZT)Xz)VN_)4jLB;jOhx|uxVU*euf2fYKk=w&5tRL8yROa1 z_N;_g`3k`vzx_NVPP>|kHF?&Pvl-of!UvQITD{Du4@*gDgnWhsy&Sgq28)emih2Vc z40wafu1Lo9*?o~aH~OulDTKe&3glDRe1N}|$m5hw!`3Rkx~C0*GJnk=jy5uO==Z-Q zVdSd~eQs(dE-HW6xOTm7$*nK|r?2cHTN&2-+kUM4{bEpN!kZrVWMh*?9Iy{cAV$x3WLI+17fuEk>?k88rAzh7CnfD5BE3h7HLKID^UBBL%W&E(CHwxr( zi}-vRcH4_ht}-l95J&14t6b_4U3ZOqMHNM~Gg@`;H_nyR0V-5Z3lC#pV4}MXxm&>p z*@P>gA)`E@ou4}~GBL%A$nJlV?p$eH1b4ymA+1BO$iTMzdn$s2E|S>qIx@y`X7)av zMtYCH(hgwPfw#Yncz-vVp~KzcMCSKD1Zu7p1@~=$M^$#8RTgkm zy=jXJQ^s+dgaMNqx2Bsyb{t=XM2O70x>GoxRTj6-N%hq3c5~_;WsV-VjinWbl5ULO zwc{3C=-PTp&5s4stex=LHX9v zO~3<$Q=X#`XF_4S`C7!0-@w{r+qh@kuE;4*-TIBlt$Jajt0$p+1QcOW1sFg1CbF)V zQ8cJZ=d|;N!g%dnCcmBY`n8c@7Hh?17ly=zPe?-JKt8Tce!una+^u>* zznpsbl_|xE@wwFHX#Edp$3b+G@O!*9P*c~(lpCl7SPj5@^IuG*dh(B*C7l5;JD?u} z4cRARB6vG;2f9_DmmIcBb=aQw&CaJG8+>7_fikgjbaZsiCTrEE6^a@Q4fJM8-}t_| z5uzv~1}~c7>eypHue;b(CER30n|AClzHLOufYxEMh#5t!b~Io)+kTCK9raF+H+EMT zC$Zw-CjnjknZg(N5+FtB9ec~%9Kfx2dMqXv6$DHO&zt`u`(jna?EhSw7(k+d22uOE#gOngCDc#+*X^@oe?$|VJnzQi#Jn!?K`gYFx0@np> zR?Ic$m}A`cZ?JH+?u7hC)zMK9h>c7FNp+t*dy}lbRfl0vxm5BHpReMFA@g#@@_kx^ ze`;of0X>uBX)wOx9&2_N9a6S6UU|-2gXk}HNYYxJdidfDUm%~2D;HU%*B+t&sG`ED zyaoqoB)IdzL}_pXp4o-DV5A_bj-u12{scj zLa4Jpf{5DqJ9u|7_?twt?Y8#v=c`4%7{WhQ z(rBYLPbiD%q+R2I6W7Etewqo^8v5WbC>>O7wIY$(E6nyx=SO;0n%=LZC%6h*?b53Tr|=O|$ntH6Hku=~3)EmNry?c|E?0AOxvw;!;>9ksUhO5NW$9y!*d!L^fCRik_msqE( z2yUc-)zkG0Fme>Q-GCmhAo!RECw0N^Zd_Pz8WD!x;(Rzu&d!LXLOUkR0;>OZST1X= zo~oshh!eG}j6BImj`Ab(hhD#>?AbAfhIs(*uqEX!NYd&XhwU?*=EbqmuqOk`hY2$$ zg)}bB%IH?7v`-m>(RoA8rFkqH_d!V3rjlqN@Fw~DqBVFAn+hqmgn66TO_~Cz9PB>W9uDB?K!k+V`TSU zzeOnPzD^-*ex-9sn=CXQm$9$Tuc@1#-|SdazN9^gD8MWx!bhbB518L|!=l6$@Q7@Z zb0WwN8WO6yYI{dUY<+6C)?B$b_ql_$+@ryWMEBKvWa8 zJu_9y+Nys#WE3&2y8LBkM8nCQ>HxW~56;MW)5Ta)idS#qOG-$JorsCYJE4x$RP?>n~m4v^nUW?n{e+?G$@ zi5-Ln^|-m?^QqqFjOCB$8*b82m+}86YNj;-L@5I7kqA#E#Yb-RmvZuTMI2)p{7evx zTx1zXyFc&}#F2MOqvvtSP9y*{;1WWue0fL2nk zVxu>^*6~|0Ml5sZ@)jtO@{N;vXoA_xL#QIQaEH^>!J^a&0!fZH?2Hzk5XS>3ynXy# znsl+mS}9#cKZ@fG_H3~`{_#9QplWb}@>7eo=~p8v<|Q}Trq`51@w~P6r83yXH4X-e z(Y9~B@hL>FF=)yoX$1v$joNIX**L{~!V*Wst+@OT$1Ik#G*{fE5X`9A;C)$$J~z)d z5=VZ_{;R_T396@jgiE^sPd~bWo?aB|%Dj44(5FTzA`SP$x8+ic*@uXqCUw75z*O5# z9TUZtH#|K(g1hkIOTUE_Ii{;TAR<#Yd32^aQ|d1CTpiACW!PFUhW*yCR%sKdE&T_`3xiYY#-=*e^i zx>IB!=EHf47)AfC*K{)ZQMO_5lz+QybGC<4DJoW3!T2pxYrSNvL zd?EjM8ET-|R3M1VzC0f=(`&3z7gc!fq{B<8{7|ppRgN%I9c)YnZX7++&0aJEV^DjH zmaIcto-J2gm$sxEojL~~${A)`6X6#!CGLic{Lt@we)oX7Bj@?X@bQjfV3Lq}0#Rj% zeo;m1Q|f|dL9MdSSPEf-Z?5~$@}rN{zGo%x`T3yiA=mZEAD5GYn;$J(<>L*$SLj}~ zhlDcL9#jwE*T_C&P3cjqkD%3oPG8F2z&(@M#&AeTL?mst5;* ziU7eyc(RSd-7V{(rTy_VPbRPah~RFe*k!%d195gQPlzo-u^oRPvU_bit`TF1g1^_1ns0v#W~7HKWK()YrGm z#jl73Famr|EAeK#Cop}rQV@9uMhAo?+9)_nd=xox5xylyjDMKWw|t^LTCQy9JhH*r zcINMID)Hl>-q;>BG0j%hD@D{IZYk9fDl;^N=d~(mRaMw7smRi4aw)Nroaja2Eu8!9 zEi6Cglfpf=W~CKf>q-2ldlA|vdSL}dMPW$r+W=CH^2r>CfZz%mgTHV{gV|`PzG7R? zRNre|?eY>FT$l+N66{~GTBuA zoCJ2C!RBg4f6b%>#rgDXWEXp{z_{)nLyU)30!Mx%&00Zbtsi|R7zBM4+83owsJsj7sW2ZP&nsRe zMcm3;YoOhX^b)zI$%$ug{ijaL%~_PYe{d&&5F-0^XnDU7CM+uik7clXn4rcoAAa(D99jmNf~VNaHPINH z%`mk3I+j?E0C^Ap{sQwh^w8}(5c|Q$815(Ny9N?3b7>1^BUWPgsDrr z&mNRGluN@iof^N>5~6dFJI-=}iQuwRw7FC1XVH6hsQ$VdDFvhZq)bESHp7cfSd(T0 zC=lD2p%aT|GPYiKISbcjsSmnWz?;gi7V4nD)RsN9+Hn&;5o(rt&C4hU*W59QTcolS znPc5gnRO`$NJYYVja+m(EkP*zP2p{L@J$gJj}G7M+hON5Y{>Dh?e;#vQr@$i+pHN` zFOkKem4l3*%by$=`83J0qu!i6N2Rt0gom?TKi$AYOAL)uK+>f8)8l@kEHb<7?O3~2 zF8MtLZucJ|mdV;I>%G_@PSP2gcE@M>Up2kMW45SwWSolWM|(U5b>R!0G;^W zD2&DxAS1ZCLgFS#Q&Stbp!3baZ*Cx;=J6tUibVEtcp7Xv5bhQn?m1o%FY?Of(bI8vQQGSqpF? zUagYX#(md3#w>hvZ8Ddxe2+@>t7!J83vn>P1mYwyc$d*(BfE0DJE>ATE!4@rjon}r z!^S`P^KJ+kswun_u&?-7N*wefsQYYwLf?eEE8*(k=9v;(Z^DH(Jvy#GRJzQUz3Ax| z`piaOMkT6N?JW zUnkGrDT!(c*(~3+DxMPNa10*v^C%l8{e>@>OQv(#Tznbsh9u%b9VT=R=fjhVIBAo^ zTJ+=`HKuaH`6AtOlZni;(xZdtl+D%ee~Wot+l0jS#+uRXy2YV&?69;@uzH3*KG zpB6c@>_OAV#$@`2JF@X!Jgu-?uRH5s?si~W@UYwScp%~+S8c!LWn&`^jS-th8Sh7X zAtb)YMRl8NpNzabhsMUbCCPlr3l_2PvfDV9w;V8YUA7OXlT%WEx81ondj4u`L}ThQ=>Q= zdi4_g$h(Rm-3|ebTyzNGTChqMO%p5uE?0&GYAdT|I_eNd`x&?=&hH;9qtfK@Fu&xV z?s2v%2;`6b$#Ae(o=DqTC^g6{uordGHk*YJLCdd$@?oPS{{tQU#5^H@GY}^9RtEJ8 z)QE8nCg#@L*N%u)4AQdJcad{nn(#hU4O85)wD;%i zQSjBh8xb$A-z=MHiSv7R=5f@@CikpW!a(eEOnG|3^?svPyaTQ$Vn7J_HLUx>Ec^|% zUcw|jzw-&4HWoQI_%c0Zfv+id3)bu5cPJo>DT-ertEyTcZ=mf{WGzhM9OpOqy2q-j zuD%xV-d2*L-S!O@*4oyVmqw>AL`BkFzZMVJ+#K%f519LiYg_K3FK%bh^j0m_c}mI? zbxu!{D2`3#3))zsJhW&BmsD}!aHPi%Qz@ldcQa*8;nVRi7#1aKhSX7-j_xo z`$Hrm+DEamze?`l7qPS_aO!PsYkMX+N|iG~Dx#v_u}5xxJ5hg!njAEr6H4`E@0A<( z6pwY+GV_Vd22*?&qR(b%_gF6?8mY3e^MX~-6e#K~vx}8YIiVc8ERrK5Jq7_afGe=n7Q zKd~FZadN4F!JD0Vt%;fv#H(^IO{*2$=#o!Ot5vppd`uxkLt}pOU?uf&!`TUZfTA*4 zf3T7GUy}8`r^%}uvy-exB|X?45)T+n z95=+k$XEql9Jz7p6{<2jQSfTJWqi=~@BxgH?jEKyOt%|#ZRu1tCM)qn#|6AFUWOq~ zI!mr*s~XzTy28A~7y=TPYj{y6Cl$TD0_G1X)NZ?Aas1Hqg=ldLeB}c7jezz6RaegA zYUK$`lg+VIn@x*fY^{&r(=K)!HwrmaTQ!_=Qic9Z)MKJ8uBSU=%xX0Txs3fN)QU!f zNc$?0BA*x^dzeh!Qop(w61}9E=YCr2HC?rkw$(htI^_a(9-N#uJ`z5+!iTSPbqR#t z_wZD-w6bWq)-3Z%<6$hYa!Xxj6Gzhck8?bcsnS!8xU!d2*b{Zg=q zr#zs1@(N<@wJ%}mCVb2&>fm%ocq5ae%qf^1_|_+sEicn=pm&C6i24{mFT97WTWJN? z{>s2V??MjyUhnACgY34iq+u>$osj=!%aR}0;eZ!;voETfys9WgXua`aLl84xd#CZU z+u84xPZ6*BYFZYuW?O&U+Wk#o9*@V!cctNlbnAMI2ipr7SLmI1v(1W2 zQeI=|Qvfq2-j>rK&BT*hgu>_YLrCJy8XbMpGjHoy#*b_F3FoHS6=_9z>-T>K0TiF2 zxLhoVDu!5as;WAMVKgHmw!wDqJ8NF9VK(@D1G!=iV8Rx&N0M?X6xlyJ1wq6;Q^?zV zok;eN=4(^{AI>V-6rZOI&iLEptpt84U!0>*U?d^hbdY%pX0l1XuFA z*F;3^cdR0p@@!i6{-d+D+9S&H-$xWW5zR1t8-k81dNWXk&nX~w=LZVLI(&+x=C`Ps z0qP*UH`;j{r-|N$xA-SQP-BY zTDbA#1_Vh?2^7JJ^g&E~ORU&ho?o-Zp62XV;dg zR;8_sj?ReuJ@$9R-Q7JtiS-o)FoV4w&!?D1PWQaWtmgu`#ve5KNH=q5+4zW|rbDYP zirF8QknGGKU94fUJ2x5WW4fWt1u42iDBKJhT?Bn-!fwBi6@T=rBY; z_58Sij)gI^`dXbk=WG?k(iy|qhDultNa)(jpSst~^uG~FDonRGm^9ekI$$yp6QIMb_kLRY6cE{U@dmANJt zBBLuI34pz^AA;EGanoJV;G*s6>KI~Tj}&dQ8nma=1Y_oeObUgr2Dk@=^zYmK*`eVf zA>JvM5GL!A;g0QJ+P)~R4Z;dbm^xD#ww8JN;#D~u6){2&f$-VBnXlY77)GV;dq)&( z^)*(6>r}@iCt;qT9{+T2P%8A{EjOh%gU@~>77-)sYgiVQ`9iPSE(OBs=Iif%K)7-i zme#@^-6I?BtFJ*LgGIjx5+9`&(^qETe6KHhk^&C_FCR;at^@{jR%n0LQ}n&LyDFL| zy=b-MY4jE}a+*g&Sw0R~Y7UztX8hXfA$6iH1g@Ik>2BtLVe@S77>y{q>97qx+ob8} z@7H+hET}9*P#jAO6l%Q!;S1i`Z|&|5xtiU`+}CNa_!9HI30`bKZDarbUlVtsv4fpm zknf4`TUuI5UTQ>tC~rfc#aPjfzt<`n zw-?|uiobRy777?1);_P(u{}AKQdAT@h{C>w=-U5qJ<;>0W1>&=L^@iZ#6p`8;GR&< zot^yvdiIdH2iW7{1eQxFf|gY_kK8;9l%awNlV7J}{XFiS9#XOv>>X}GUBb+|EW?aHUDf~ctr37GoJA)ekxq1h%ZZS;Sv(=&4kRDI0R`V|{Ev*}w zA#QeI6F*)v5@UC|czk0N{6pJ8fZG@>yPBa~j)nVcDTn2b=QDHzv%y7?+x{LwzV>tM z9+P!}frUA$<)H^mb-ZOEj#N_|9HT8+6C?E1Yu8%fQg)O8>)=x^F6s z>ub9(X4~eR^+2*B|Xe z_iHXT(U;uGp~VbfvzWZR1mbvp_l?@0S!EN|%Dl!<2VbP{$*jTX#vMl>tS&_^9BtM7iX!aK+&(;Jm1k2)GKbCf>RdaxwqmkwWc_)0P@+9O5pI!Xh4gA8T#*Pyt6Q zpp)mS0`*>*J+;kHYBT&o@-Zcf>%RrwB?1Vw0SkE5>`Hm|N}k z9QfS*1Or|Sr29{%*z;n>sXuamjC|+P9-y@oz3l%BT3DXiHpuASmEO0U1LO@>tFQP# zd0#Lt?oH*IM#QI2-$S2EA2fm2{%dRV?~1(QVr4X~^UFU2+u4whvCW#72scA`dLjU% zv?&_F>+q{xR0ZY%2)Ql9{mh{j|s1y>@)Z*H%K*&oK6rrri7=M`?yu zSs${azCVyCK_h!4=+8CIL#8ZN%=ICDzEN>tEkJ0*6#THvK3JDm-#j{RbG$>CoIG2c z<$vgH(dgzHT-;S@*;SUK8C>-y8Hv8)MblFh<~;2ux_lz<}oQZ!X3Q++j6_ z+j|9!eed4ga*0X#C(kH=vC9p~8xmf|3^l}jf^R8e6k=J&x5MI#Xtr&`?Gno>L7 z|Fg29M5-t&+naRw#3&ZSNtA zd|&WV0VM@G^f<E!57?Dmb&j7+qrunI0OddN99yF>khXzb{YE!Wxw%ZGpW19_R3 z`Diic*a2&-fz`V=>h()`LwKhEifd4_e`Eca{`GT_Dz21*bdaNlhN`NnL2tnj;7yAm z{zM)CV`UBAT8Sy|)$eUs^`@FNXF+)=0W8X>hjjkF0SIW(ZVuTG+;J)HSZ1Rc5`7~a zz=m4octjd@$<6&Mt3e0(El}Yj0pC1n$0xe1y%NZNorcf@#uLCOFE4i}26`C0&o#^a zou2jrc7%hwbr4>zEs6!`Hxfoh0`2qFRpTAa_r24-sEUG|1ogT{v8kz41vKqm+i{m3 zGIedgn;N^8mt~)rF*AjL|NqhMlP~uMX9cMWh9c~<&t z{^Zj|i#XozO)q7q$`l9ixO2V+EGy4zx+V<&8O*!Xd_0&b3NNW2$W$U9~@5z z_@ljjmGZ(F4-?B!wqa}+wBo}P-$QLPUqBVf}ddft?jx4R}q6WnOv!8{Yrj6**=2?%oTEld-* z)VtU@?R}f5Hu_>_Mz!k_oyNTQy*5T{sw%9bc|50ob7#aU?Hli}cozY{RKB-Ji!bQq zWq7jYS3Jn@mYP;7Q&8H0#gFnvF`2{u=cU&Bt=cDas{hTeUkCH=n=rOhTsms`8oNEX-KPqX z?X`k<0XMNft0O98*>CTGP2WFOb+%7Hm{a?5=Kq&7{C`4m`2R{w_+Rhg-+Or=78&pn zUW0M$AC${~dltY;6vz(TMpFPdq4av|=qO;TM+{&yxx0G;;{xSx3zS0hkb3bc3(j|W z6hcCZ^b~(T(t;>4F|n{5+QD`gxUr7*_TnN+Pz($V@o)bAIJ+JIzJv{Hb1UiTmB%lo zfA{epteG+XUqJ~kzx)4+74i4V58eXU16HR2DggIQ(?v#tnjn)W_j1PB^;>&$I4>sp z_}}ap7=JYA{~HqHfG1K%u(c#%7l3;B^YFKj^Rg)sw0P>RtTei_ixOCd{Il|FNH`C^ zuh=k>@qaG?S-9z^%~CYL021(qCV3JsbW@#s-r|y5H2}|HJ@%(QOsgy;j{}IN>4AX< zLG|xz#-7(Oj@2o|RxZdRZ9^EMdA+fbI9use@s*aZqac&pYOopuFEwb?EvyVMqIE4L z8I~*ms?Ej6#Z{lHais+$)Ol0nFPGxkJ2CNQT21&EOq`c%4nS(@=oo|n54Z9}kc1D+ zwE14cr#dtIzGx)>h+-51Gf@9=&ecL|nWib~aw}Xo4^~pyKcqe-$^YX+`a2Yy^nFw>=cVZ0J`-D`tESgXuUN5v=%WP7o~a#a)?odXF_&E5If$OG$&h@wvYe}S zrs|7!hxWdQH3pcyFS~AKfXZG7uGLsuZS_6s-a3ZkIqMXA$D4&+uhswbYVBnaG-rnf@}YDEG}VT=jG{pZ$`>py_p%@}i^F4@Mt!O|QpCeX&C-sYZ= z+a}lG_=2TtU;sbU=h`+t9-Fc5WpwZ9eFa)F<@>-SV3TTp8*sg*H-<`nxL!=29TOp# zWl)Uh8f-{3@Jv+xlWj7SlM1<`5qmHVR-W^K$<*@mFNX_zHpFqErT+)iHDy5pw6ZDk z2@fl@|I9YmZk+o5s3$xZx`X)@`_m<}DaZApS^-W#tfGQg&m8OJCmFstHscIoHMP8@ zML9J!9JGg8bn3ib1kbm`J3Df>00#!WB>M63QTPn8`-hRf5NH!Ng< z7OUpY$0=K{8gS-FG+wbcunubrkm<>^5yHQ#X@e7@sawtCdSE5X^ZWb$^QC*=U;h?0 z{tJPL8j9%trkKRo{r;Uh7!dX9!6(D5#iq>2P?N;YFu`^jotAm-T9Qzj1naQ8(xY_y zL;!(IF)4ml@XKFGuaa>O9OANCwitV5O=5m(uZjdGsb4fqXnG#Tn?|ImkaMvD^Yr9z zXYbN^hBZDgG?XwTX3H)_c83Vi?eV$3H0nqIZ1lCN5DmX`lGQQ1Ynh^9zY|;Q^ga;C zdG%z$ybc`R2Rgi}FVNE#&t*KnQY}z6KkX9 zU{3XU?bp&L1Gah{b)>gXV_sp@lh9c+kKa^?Lz#$bSg)HtWtqB1<%OyY9$Nn}JVfhA zb6xF?eDz3{Klt6=ju*ETgcW@8q4T=HE5Sj0#N%g*2E92;J`IiASrRBNgJ6Af>_gmS zssR6?J0SH%1^}F=OEnj3YinT%nH3%2#fG{UmQtPnefoihpa)wbh_!gsRp8sC{aQ^@ zrIoFJZB~9ehy>%TFEp8ZNevMNJNM+oxTNwZUB0m}es6*E%#$0cJI^J=MmGVj@#ZS^ z2HdQfC%ll+?pU=I_4)K98ymr6ag+1e(Mv!t@Z6D+^%gR5ZXwstf4ECE_a1Rk-;Mk zzpfivd}Lx(UdZIUXJo-8dOjyqmj%-=`%DyABF`Lk+e#Yxw~C+9iWrXNCsf-L81-P_*3ehD_}8a(3;8vIv<4<`(c zYJTKi{2OYStv z98TBWxRo*@DY8gzW!BT@{T)sulWlTWi<()hg-SW`W?DyQ0F+FtgYY_WVMPzghS8kf zgQ=_f*3gbkzCXn_alK54sXK9U-%lHGZeinP;jGJ6@+iYT;*k5CAiaBBM6v=M`-PeN zf4Yk?=Y^RidAQR)M_1gY=dXHDq{QiGNNci2OTxUUa%)vJJoKDs<&S;Gu-j`;S9nno zWmJ0WzBs?u*`1}Xy;)G`U06sLT6d`vxwyzXK=12%@(9(}M}y$&RQqxt$6_;5k=%#jHco0l|2@|CcRkiK7^Rx%)L ziZE|0!X;W&r-!+*f7Cge3*vlxC7bo&fIY)8(b%eVU(ykMg8iBFPm(J$3q17 z+00LTaIj#J{M+dMIF{^f^AOfcx1}+cr0{S=QUIv=Eh571W7&R8|Wi^h)ynYonygK>zgW9S) z1DP`G4^AgmPh&Lvk9(P2#DTsPl7FdCCd?V?Y#+^Eg0Sp9`l6-UZnt62d#vGo#;(ga zy9a)7!eN+M*BYgdu-4<7l{+Ka(X^HD1YZwCZRR8>=3mLBI=Sg*Vufi6l*W)$&C_)n z;t#cmj4L`}VD6DC4gId84p4b6y#MCQH;A>ic+oDo(}pp+H9L|;EwzPrv7ZpFhJ zi&-W?7hX;>NIwBwzFX^E(gJPEWbVghkGFbbAN&pK%2$wzTv9eK(2*M0w7jAr8)*aRyrTtx<7?StNn5h*rHx zpIiO-RSXICItpx!_6N7!POf_A+@xv^xeVuVtxq!|Tp!mYiv;TwEEK&P)fZk5oi0-I zQ{07fU7!Tb!3EP-@%*4~5R;%?DE0w6S@fZH^Mg7LVFAOUsxfHB)qSDkd3q@>|+#T5yv+xzN|AO~TKZ!-w(Bhb0XnCn!h_vgg%b?em0vz;fQ@ z;`}=d(WG~kO`-5_t=d-c0HN6Lt~^cpbjGsyQ6=?EENc_s2L5cqU(~QDIjn=A`?jDU zKUJasCfFYcP1ON+WZ=6$GI!QszDtt2lOF5cio`$S$!B`mOHg{uapGJS8;eMLE>oM_Hh&8n0)z|5-ayaCs+Zw}+;i^S&XKF?b` zF7NR>o1Hdos;(}eKTn9Rrwbi5>f-&T4N#Of7%uGp%NPXM*v_VKQqjxdO#Oox0$zMO z(yM0?D^D2Rb+eH_xddPdF_|==8;2+A&~ptF8}X?VN|v)YFfG35tmfjyCk4rxJA*d+ zud&(VDUOqU1lM|Aj~TC6MpT~1<1=;~WU>qh8r-1$xsHE*rGs)FyC58lcRC4gtt1NX zVGgr$!16478O*oI6o(Vqy?QKK?TXMRAye^Om*-}yWkBU0v>*1Ye!@wsD_m^|G;x~^ z*t7>N=mJ<;01!^hZM)plbm6K~U8Y25-95gSX=r8^yfY%!vJp!j_ z+RNlB6k#79Ju%@I5~RUJY;J{Aye5a8>I0)6&~v;Zw)z0B6B({qP42_#L7@vtQ(wucUd=74J1l5w7+4u!Ps*QphpfC?#` z5a^ywZ#AMw1V@TSH^F$p$MmOZBpKcod(WLdL8Dma6Q#GLK!BOMk&=}h!E7wOZzPpF zL3#iC7qj*AZOz==sbeXy_+GaIUvs;+#c>L->pbXzX5C$upKCwBYQ5<5l1pq~4TV&P zG*OJevlm5?lah!Y)T8Hit}4h?h6KjAS?PQZJ{Rj3d(a3u-w9Z6kNRl!s}}KtD66Ce zZ6R$)hsNcWt08SYqZ;9C$N8bHByyo7OJDzC{raYBl3^$N#3300bd(ftshq8r<)Rtt z*Uqy>eQ>#Ch!7G0X1_kV)uSJ8CUiw)V{`sdPr^?cwbV*{3y+G7Y##c8b647RWk*8B zZCAC`j7%s<*uR_FtlA5N!>6k=ag1i}Pi4Hab(ooqGpP^b>|=f)CYtezhOFoX@hZ1? zSm{Si?Qm71vScnMcU!tG*Sq&%M5QJ44lgLFew`u#ZU3|&8~j!`uR@p<#8l9Gl2VR9 z${2tw%2wZSgUXFYJ2Q3P`A=6zl)jgol7)^QvAS2Y4rwCuDs6#R4VeJmg@wm&deP4b zZUEIIJO^XE1LHP&&s1GwjPSri@TgFJOSW@lQtE<1u?|JI;d>Meqh42yN^Z4=e)qZa zJ%&DA*`h1u&Ir2F0mqKbjS240b%3pE}wq^e!-A3k@&kM zVY%{y8CnhVxh7V<;gESO|`9|77EF+k5?$kRGIo2@+Q zUj~?L00=Vee0F#k-~pnm$h_ox_I_odbq9c$gc~Dx9io}KA6#AQ+e43Mkn|(nsW?9E z_8VNaj8Q4q7MUsZ3?jqVUv4|bK&Rbg`LyT|_|$wXH)Y*6rFN7BN6pDg>tO{)Z2PsH z(7}c(idE(~HQVtL9-iDP{AICv*!_LgP#oRi>N6R?vwVs1wr_qDgcn?cNSP1}52KIr zUDgSHR69U?Ouk?EmS&tzM-Er$LzB@Txko2VsN`9Y-dplFu}mH<`x{YNdJAtleFM0z zM43cq%R1-G@HW$3@yK-u8Rf!zk5Of-3|g*jbnqa}`-}(8sqWep0Gq3^$$jlKHjs{i zL0P5_K(JvFV?!qlexx5SE-dYNdh|3jHxK?L8uB+-PM;*#h`m$K_0kGiwr@6od}`z% za--$N4t#6BKERHBakcQq=*ivoU0-X#M}P{2Z_yAJz_k|p+1UobruJ!B>4OkY^3^!wxpcB@0lfR6^qEJku0Ol3ph<(Tcwhy6NNNvZ)cK;^@KQWIVQHJ3q?TCzyOi>Q8Wo zOxy~~2$~P)v>hvcly9)38tqN`z3)Sd4oRHR_nUhxIUe)*=Wz=sYr0lxXA95*{Yk{i z>Lv7S%HO*{tiVT~G{3Hm(Jf0D-hEO`UXAW;CUe>!8MLDxMHB6V?dl!D(%l4guB)!e z{8Ysy4YFyIBrgr6ojP7-vzT}`*fAD4y{Ho%eFBHY>h4DuYQo7+gXs*rR;X(Hy?9{a$-;6~>ICBv_Rf#zDvXzb3OrD*m>7LOI zhpNFdO!Fm&M}>$un=lK&kM>sdikn`%?;*Q+#XlDIc;qjc`(A^HztMvy$IR7IEHi@L zZl#{nx=wcO5u zAc$EFjt=4jjqJmZ7ns)DTl#*9ruL{f2iB z^Z3&Z++HnTj8~7bFjljdB?O`jNp}XCS_%1$d(LZXw+lZjuJpdYi6Vq&8N(1@8^BUn z{>XY%1eeS{heIRP8>@D>eI5Odla$%;$J(lj1xw1S-=R<6oN0fq#JE=_BDgcaGLMRI z4x#&+V&>oc%+ax+O_{3o9sIQ7Ni5-96KuJPh;VTw7J~`uvCI+lM=DBJP59QWeN6$C zfZ%2Iu9=swyM+IfoF>xFIU zVraI5@RiCs7qP4e^amXkyA1~$nEQY9HTTNPtx2c?q#^b53(*bv{2>75e=HPj!=sBc zuQn0nOBNJ_(lcMPT^;A( z;Ls;E>H;M5pU2#v93@c*xXO>4!ttTyD=+p;in8G^o=(V;=S2C)irxvERuM@|l@NWc zCJEitsvd^tV6SM6W)#8aoOpdby0T}g8Y0kwA@;OmEwEkCh9&j9)Mvb>isDokVy&@H zoTH(rNfy!hd3z~e8{&^oa)kPtHp(gbn$ib{4MotyBjmrzWMBz-Taps?^y8WJR=LtKDsR>Nf0d%G`%GBkawOwwT1!BV&(MWM@U6=#3c_Y2?^=NfZ-C+fOwo zKuC5nqgGgV#xi72o+o^_Pcd4MrUS7fxg5;MOz10ovCEr z59mvcvDxuc&Noun0Auz~cfZC<&EX(y*s$U(IL7u-BnSLx-QBOrq`b2LM{cz8ruE{$ z2{vAG{ggz%4b|Hh22X>z$CH2KyYa8ij&a94t}Zu}DNQO`8={r_O{6BtZ@BbJ;ncDk zYe7>Vl zaV71=ro4o)d~9`=FuODpYv6pjEfSGd>5ot|Z4)U29Znf3YYsyKlB`EadOU7es=&mk#FKl00KJ$o39$!6S3(W?Z(JlnZ%cQ0wIWVb_`P-t|&*db|S#Kc@i2U_JyDafhz9cLspG+Yw6O77{TD*u&2 z1XmB2G}1vs2*03K_+{7$<8B!J_{4L9W#v1F7yK`pE=jN!4B5R^4|j8^7JQkuR)(*l zWO5Syq58MOH;V;OZ;UM`Hh!c0gdB%sw^uwK_#i?pHd(EX8_VrzHF%X?Wp9cpe~ML5&M>+UI6Qrj$ZI@ z+fp?oRO1sO=qTq8aSHtuv|jXFt)xg-SMp&uBS?86(J*3ZzFzVSTOoUTplX4<%VViS z@UCB)I@4IkrhUwUF2KV!5}PyT0a`Ta!xa`5ev%>m)Xe25LOIIeu0XDKh2jSaf)c8nr$2C)!Se!i)UG%`McA%Nuk9vfSZU8`V*@?e>e5B*H4+#MSA zeR8x;9jNR8z2f2FtFbW0b@ws+ebXZs#zpQxv^z|n1FC+$Py5V0 zNi8`&D%J1k?3AWAPqEm~{OZ`-znVhxH<(|5hj}K&Myqsv7kKrZTJzcnVTyk7^t@z9 zdxhw)2YTGQrvrqxH;zfg=JtmbSb*0Lrib<>X%QqOB<9W@xpZ2wOQryX`$%Db-^P{x zvjTx-Q`usBpIONV^**goe_!+lW?d^oOUr^tg7U&bg!%cDmmF1gOJ4^lvAVC-*9xdY zEmWO1%4=3u;+O|tv7J^dhjX-z>gpPpmgm~AfB$M}V!HBCTDnU4Zwnu=Kk%6d%S};k zdMQ!eLj?_#WvnmB-|U$XVap{}6HZPn$NnT*&#e&p86F)e!Si9! z*8tszuTh`gI&JT#{RKCX-M;d>R)Mzl&CL?I{j8-fw!`V*WNt@X$^z-`;iSs7X*|{i z4I#gZ*AfapgNvxhdVBDE19^?R*yy|%cG@nXyO!^yO(q_&B#r-MjVIqcRGb-!ftou5 zuV&fqxo^NFV-&;kDgTE?5#WPgNDq!qM+`!Gt=No0gxSr_gm^6a-!omo2k(Sjq>6o# z`RsX?M&8@I%+hK;J>6?I9m2l<7d7mv`HB7N0H&a)>;`gwvbM^vK(#mr_ej;>`9Ju2 z3$UoZu6byL)I5q`SLwfC0XZ{+{Q3-uQoB z*Txx`nKS2{z4uz{zVEfx$~m0b?I{`Koh#CyA$(QiYpnH*{tOL&c|={4@n-|3>w(X3 zvfFfW0n_(C|LtvnZc#HjW?`a4eJFOv!5x~5>3Auq5)`wY>xRW4 zqVa#UbD16z#e*8@e=%Crz6q~gLV{9n~2l{1~IEbM$x|F?D{XHQVc%cCyo`il}h9$*w0LDH}F zE)OSk+h>k{dwtPK`pC#f%XA^bwVQt!< zi)ZARuRmdtfbGe%E71~~(UC8x>_@_XHjMxFqcYpSu1gry#pZQ87^~@nZY_rKe1Sf_ zZ&;j=zz9|sZRNgRm!AfoZF#SMCk7}>4IIkd*!mhWM;iIl@Ua%d8k>YQ8WC=&KYB|XYCHRL+&+rVg zlT{=?$ZGq?F}Na*Y9Tp))jbV&w~|+Q_&*du;1uMqo!$bwBarDYv-lyEZWig^sXEQi z*pD>TxN}g7mmHO{6w1#kc1k>{3+%~$f_24d$no0GKdo3Y_n*Et|M{)={D55uMxCrW zN7C%6_8L0O4M^ZZ#qq!Jw7cabovb1^l8Q2M5L1^ZG8o{a?E2a^$AP1I?iSeSK%wz} zeB1!bUpbTTVG^M*oEO}y$+|sL&5(LD(C=M<-{s|Cf}k(xibY@P$vZUYQ(lf5-Pnu^ zY$8CVuzirwJLf4Jo>8xVRmfja;B{>4gy2S9ZoM6NDK9w_8-mJ#5iQcg^xOcK-uDe= zC@WBXO%yXJKtAl(lW82@`z4SM60R68XsGG+9ESR4NVqKxY4z?;_C0$AQ<_TtCpz@| z>X^uXW`q1pNkzpQJG^igehCm&ff@rS-%#Y_Gf<&VZ|_@?8ZHZXMy|2hPjm;@S(U4{IAueg95oWE_7{;rDQJEH@k2kA9Mjb3+S zeMQrG1fwKxAA4=iJ$1I^g3MV5B_1av#qj80XE*ZLf77F}*IW)oCDSW``yKya_zl)} z<3&~e`J62$_WO@;Ba-Y1$%1eB`86`~6KgL9<7^4_f>|Mde<)&M!2avme}AO(75?r& zUi#nuh_}=t{Le@Hx8H-`NsI9uhW+dF|Mf>a(BH!kl(s@6#(l)k?_#jW%}yx5VzR2% z|283{RFAr4YD%ou|L?o9R7jy+|Kq^1D4S<|(uo3O>{&y8@%gXRUG&CP#tGS-Nmb@g z9vBYB22IBDY?7Y!HvV&JmCOJ83o3b*!F<1dr4$f&7#fPvJ56Fcy7dHy9hYl-e6r4* zU1*^UZVfNNwcXu$&sP4tX?X8mjH^tklj*BV?4`|R?{N>QcNk6(SoTIO#dYp#h239JI{ z&;1}65hsO8y$)9uafGB~@JL7!ygK~uvu_?jF}*`V&>I?D7q_+$NK8rAi2k`WCI*=f z_BWNsf_lNFa9_QbO!D%hG^&R>$MDY1&Uy>uMPE6!ui&tF%#zS20$-$|f$kRrla0T0Pa8X(y|bB_J_-q_SsaXcp? zD2ODQ@dJkN!UKH+qqNkek%pHcW7y)s(arHIJrZksE(f&~qIxT9zLOQfl7v1jEl+{e zs&!xt{iq%L2`Mi9#wWU*lH6P@1Oz8zGc%>6sJ~O>{QYJd@pk;hxMTZ7m(=C{xHaT{ zIxf&Yefs30$o0wmeA8PQnb!&<&yE@~RpzdrJVZk1X%VS&8=AX##zsj@BSRiD1HFO< zbqRW=EIug>$xI0T$*EU_b_H^lJ5$?{*1W8b+SUm9?Om&<;E4V;i@_mwWpvHW(N$Hs zOF0()Iy>0dK01umnR|6H7KxfJ`5}{82{Dr7KBU6<=X?G=j?}vBdVWj>o zuUaP?jgW+3n`E^Epiuqd{=OITN_H@V0}TmSW&L zoCEKma)ua~!GFxax$mz<+gY7QdE!!s#>O7_w89@9QL)(fJI0lUu@yP^)0gS%XDv;Z zQoVi08;BVHucUYwQ6_E zRipS8vgS)wqZk%ady7^2Zar;X!a3>Zl=t(q6Tq3ZQ&YW#Gu*TRlKi*~)fR&%&6nlU z8d7j>FJHc73kd5K0exUnQ%W9ia*jcl%7wcYHwgvqt~bIX;$P6jqgf!r)+ao~?d)E- zpTL+|433oqV2e0v<@-3G&>wzb@9Ky{k~=;!6=ac_`G$OhIRS4+W9@22jEt6EaMFy; zE{e)ipHr#~c6YAAVc|OoH*6hVTRipDS@u_w?ep&MKJGes3*4jh>z+ zHb5uoUNurZUHcgyAH^vhl7qe#YqIQCvMsL@BeO^nqWMy5Zx#9*BEGNV1k2SHxE;lq zicVV)O@`o;d*uD}@qwS7@!ZanG#ChxSN>7w$2V4W4RZ=&;joW|*4mYgpdjo-MMkIO z<|0e{z*t;Ny*YL`?$8@*86L*6Y`A3Rifr3sHp^a9gDG7@kQt;k&uN_Ow6Ewz9$1hW-CGWOQNBbRG(>o_fDrm9B_YIGgRr&xa@IE%|r1{GnvR-UPP? z21KM#9|{j7CZ-t5+2rfalS8YEjxdjNw}OTZ_x>ba%sc1?>RUeNjGgUEG1Qn(Tg!OR zi5+Gtx~&qm1%B1l5oCEI0)o-;@wAX)J|!j-iA2K5*kJ0WymxCkOnbX~Yws%juAB{+ z6sb(47)pn-QzE8`2amIq6OLr#H+vQMctvlhvb0AjCU(wpii%XmSt`@?^77+J=o#oE zIi(A8n6`AQuIl&(akF(3S7h}fzL&4$kygG4QKij?H;~lZ7Ta^AF@Wu?uC4jdP=H=z zXLeBIv+HBs9T^UloxWfXx4?_?=~w3O2m~um+*Q~?PhR~NSHYo~Jo3c+^c5rYp_?3X zvXxvj4?E?kGQV6rGHxwREdvdks0Vf=0YrCy91Czgv7tsRj*FN>&G(&jVC$E6Vloz0 zyW!FaEO-m<$Mrx-%P{YD$0TqUf9I2*PvU+XMDZojGXtlu`EC!jq_nbC*5j5D*d;Gt z)aa#c;|_m)4|_$Y}BNdmg>DwWTGEf`8b607XBNLkYdU!%fv`U_Ka^-LV3+fpyj=^yZS z6~18j{A+~9lR~r6HMe6}A+g&4;!UZ`A`W?vPRqbtUl43rn;=qNwoM!Ndd=mGPw=&9 zTfO8m^zy2`vlD%74e!|ci-C!$27|Y^w*iv>uO1^yOC7=x@wLlhij;}C(CV)HsgJ9O z{7suyv{mYkDe`>!IM1#j#}O-8LdKxz%C{Fy?c30c_X1xWgqd$D)HE(^T3!E|e?q+* zua^Z^Cf6!pt>JKTs%q~JPfiA`;&fR(-+E%wVgJQrWLT8P<2IqDnCd}(T^))<7+G7b ztVI?X(ZX^HN-tI3^EBB*6LuPK^*Mg1|`2FLMFHkexr zh)(W}+|eZg35#9t7XjEW93Dq2W+9Dw;`i^x855cb%Z->?e%j@iI-T%2Iyh{h;Q zLC{2M5ZopQ6Nqr(@=bxgoQ4fV*nA1RuNAz{2fQ4KjQjq*XdS^GD$Yg|It&Z!5JD1> zoZZ*&nUlKH2RjNeCbze@pWx&B)l52GHu-%kUV+H8_>P`Hb!QBn(vtb;ai0P^LJ0g# zEPUe7KtLWc@Z;#1&C$wAj*pM^+BBAmE&39LcjxMPx{eG>M^%&G0s~vtQFZN zPq{I$X?P=D3D`V{t&`_}y{i%;h{l)}L?u0tHFJelUsp%T!SQ;~vaa4GC9_ujJlUnr zBRj&2H;>U$|w8BWiRS{H8$#D8hRg>X8} ztK1mRM~5bhCtb@6q7FxM4DNmw4nWuZGMf=pTFW#x1e!&oqn|a7$DExVDKypFfpv6v zdfFD*;{b9`V)%1_kMij$!mYAWWMC$#(s&GY;ijkkZJWAl>6z{7s$tK&Vn~)aaOk>6 z>MCS83DIg|?csFH?;aAxZE4}?)z(h2c}f*Ky!P~X+S@Bt>Tzd5%2Sg)anU>)2sVJT zou5xuTZG@=?*sQ^pb7f^j1g|~=4 z=M)z!?#pA&51Am{Z#$n+!9e=g05kAVmW1cf@_|OFE8c7G_Ek?RQM6b>y@F=+j22-9 zCCvcfp<6#YBMl`N42JhVPfaV+tgRLW-7{s9Bq3n&S}m_y@6I%nfD{DPgwvFFo})n< zo%UUw_H>i=2d0`0j;XTL&N(=pULM;MoX@(wJbt!#fh7P&7Z@F2+&_Ot;|{(E&MGtl z{!Xdb6xIVrmnUHkF2?l7!4~0dnxUVZI*7N!oqf3k%qfk9WK=lfeF`pdai13mJGepQ zwBE1n*&WcDW0}(#SDx*{Br?=GuZ^g_7H|;hCc#=b9frCg+Rh7Y~_6o%=jptDIrjGj2{ds|h{9s&z5f28q;IjOy9n9b^ zc$0eAEGBQ;g$VOL6+6pey1yO$L{JI6l=`gay*N%3h3*7Mn0unm5HU1zQmVe4BtEx? zm3EcA;}dBoQmE_9(KPV6^pe)rtRR7B%6u`ovC)TdW@lw3C_J16EtK5*72K<1BXm;S z=ldjn-2t>lE*Miq%dH=@xj3O*Wj}NXT)W- zxbUFTxK$y}CQ(wJ7;V3#vNX!4_7>0c-Gxx7L4*XQz%R_ECillEL0tjz$b_Rm{3E@I z4fvmOe8R_WOO%UJe8ee@BQfYsDn%0jx7)hXtuW3CDdLoazZ?C^Cl3~jyB3c3{h9%T z3=B6g-h+XD|J-Id2L6uYRj4qyoM&~AWYX|7=h5if%t}1a)gBL?T57h@70+-W!GAa%(vzPFjERBaqmuyf zjAD!&KA)lSJqbS>ICZymch?3g1iS&6!_bUwVX_7v?@u%f(Ri927hdzr)-mNLp2CF$ zLCvk&>P~@!^(U<7EYRQf^ULb*y7of5zBI_BG$ZYc`|B{sBP5oTL;}<;SjM+m8uRp8j7N{ARicUQ)7Mn_-ZGRf|Oa^}_!sfgJ z4gZU^wPIOmW$JrI=R|TuA2>i%sx@1q4<-uFtS~M{re@i0p`S7~o@o|%^r);>jXt`J ziwr9B&Q&qes5&uH#L2ZLg!Sai%$BQ$MgV(QNJ)9B-nJz3I+5m2yfkXH^%yVR zess%@h4_7$8NMR7q=AfTA9RoKN{21XS>+Uw?FH=$^PB?im|d1R-JSpK#CEeC1CqJn*os!qeF2F7k6ZjHm z_ZG0DAkqm!NVY~pU0dulrEVKsyxQ)EUR`p^T7l>!G3J}cs2kK%zYwPc}PAV!GR^$HPqShq^427Z# z%}X}V9#@d$7c>V%SCL$ZZNh9UJ5hd#2^*wQ%xTyn{zWIWk{{jj^4BlQdN?AxyZao% z4&3RAziwMDEqzyOFs|LKQG3cPj6@?tDhd)Q^Gx&V1dpWYX6fwe;^RpG;>OSi$4x|X z)arkbmX;wCCe^xWqnfzRidZ!*N~@sRXm5viq5 zW^;9zxp*p%-!~8h`8%awt(rR8x^xXarPx6x>{GBhTtvnVN-h#AnvY-qGg?{UtvdxxPSATXjTXpY< z5)I3h&bjC3EVUwT()VdoVGjIM}?l;~c8bBO~LcJ1owsBj(e3K-s3!+S{vA ztIA*SzOclE({feE211g+$xWV$wdVEp{}y-PL`sB zF1p67wu=i#t5lCsL%sq?WiV0dmAVsh)*L5J5I&>mabUVy^AUU8*-(O`a6Z{+G}{Wr zfe~kW=)*hOl?A)rpX_HTP8xjfyH!?IrUTpKu=VfGG3Bbva@(i6{sNTe zwf4P@)58>A9!QF}fz#qAog%>9X%6kPyAsjTVsgKkNe&YJxJV;>mD$ji+pP6gFl?yW zEJGF#pIOuISGe3-(mr|!S=OT)q5;xViR18!eNNa))I`ar33>fUj<$IQwD#;dejxe%oX=n|&nsF_)W1n}uUVsjf6n7h( z(H!*jDiSbf^`Kd>trTI!3eEb9}!2*GgsER$JPGrv?KO+|L zRDF>G;pzq|G1NLTf)Za{5Th!H{%B(`aSP>EqYP?MQ}055p-K0g=Z!Xz0UOcLQT9PR z@8$s;ge`-7#*g!$`I|?>DYu!BBBEo5-gd=D1D~BkG9QD5D>`>rhhB{lL6c*ek>MO{@=T^=DNLCr`~QCYe2dYhLNI{GKfD-D&!j}{oX}rX?mZhP2U~%*O3_80LbIOg4Q_;MNcU7T zay({cx*jwua_Yvf%f7~5qeqWC$&r?v=ZskUZX40AENZXd_xR!%`bJa~nfTm|qBNP( zV5U3KHKOMB#ku^n{>!s`cdZv9HKWDUR=GNRiwe!tMnwwE6|z=XU8Ww*ua;|1*<1t? zYw`;6BGtKy{3WBImk2F$i(c!|hPVWHX*C&dg&hxS#7^n+UXr!d_503|P{n773vY`FxX979)(oC>NWKLk8-! z1UIgU4roQRIcD65soJWEuKxr;bKhwl1A7ey-X{3C>^EUL6xU!f95hj|8x4vsrtBE; zW`C>)OttRiNEBI^y0tKVzYZm0*RWNILpXuWaHsag{E5g!*>)0=^+7L^^+e>GOX$qn z?rv~TM3(NO6d^L2* zj);q)_MGmIj}XQ5l$PBbb;SfU*dYwgaj0i(q~^H_$zeS z%M8aZ)M~8~_Umxs-Hy9vl7VZOMg0=aneesdr_XaVS`v^+vi_+agq5!Mkd7lo{kVd3 z-2~CNTS!=9z&o7m?qFQibcSDgddooOMCx>AV{?q9ZfzlrXKKYM&`?dxXC`lGrrtdM{?GlVg}O7!g;!iVjh_C9XET%jDHncQtp zhUgH*XdVd%L_Mh>lBJ+o{_Zim-Fv8Ce>dcF`UTv}1)JT$ft6zfxitlQtzM%;%a8$z?Qk$}{qJ^WeeV{~-?)`6$V>7}{yPTX!m^E7)wmer%IJ32~~ z@j@=*^GNyHMu8Ps;wQWCY75_2Wcm3yf^HbXgvattVN;Zlzw5 z%OQJapAR9A#va=_W8sW6$GR^Ze2`2`^9^#vBsr|>{m?=?zpY*Lvc7Q^|JgsE&Gi=zBBlr;V**Z<_d1Gk zLC;Cmw!}%*7c}(5!U%L6$Ei_7@KHGF_)22s89 z4|hHi!1fD!;|l8zZOvE)beR~)1+?j#{c?zL9JywzVHy;!W9^ez7lDM9#ipdmbx?eQ z5uNE3f~Q{g#`2)dzjXo0c?W+KZ%yl{PPOF3vpI}h<$Y#1T~oPtC}BMrZbm|UV&QC^ zn~%4*oPv#!L3*l8WO)#(iK&Sr<-5)tGCSu)w9S+VGS>+jX`P4uMKuEL=n3_b{;FUsaY)C3ieJGYGG9MG4#qSDLHl`vzRh4ppg-W zWlHUfgSo2W0TV(AI*xMT=lRyW21FJ=h~VjRWJ%pRN+rV94RMZ-ZI{-T&`C5%h|FT6 zqrchh2#YD(Az)_@KuT(s)T);hBV%IvBCNc;p3!0*OtlWbGUUiW^GmjzdJ0KE`o!Y+ z@k)!G#blJ{ayAizbT%ka$K!Lf8`Vvdv5uz3%ntk;Lvh@ecQlpxE=RAQjSJ@#?`*I8 zNtSx(Zi}05t?n}VG}7@T3JAEh`JnmC2xg=a5wiE`FTaa zy7?CLo!5r+9AjYts2>0P>XmXYah3`oW}XX;X@P9tw7t)xcaJT>Q9GHPFr6 z9YI82R>p39JZgQ^S{@0{D<#E{&}_9IEu(uDYB;K@+EWIaD=o0dadM^{ebJ2+`}5z$fF zHsx0genlh~Q!KAYJmoTsWvmw?FdKeX)#tN4i$)&H;*S0B;X~`iMT-O>TZP&UYz-2P zuL0JS25a3t1FhrC+^HEEZw*JSY&kz3G-)lDy6A5$9UjsG%}lyT@Uz^85}>YQH53Sf zKoD8q%7#yI=jb0z)HnTj;h&zSyZUt}E>iLE0=1=~HppN+Ojh_>PH6CTgL@E9)7q+G z+zS^HBY+VA){-odMjD*gRS#e?VTz6aCN%37z1VMalN)|X?5bQ-tt`}7<8(I7X`dem z&I+zBV7qyoT~ZRm_a|4sxos@GU3GKv8mTQe`uO5%uFAgZt!#ZjU4Ge#yrDI*2NVkJ zE$v?1d`5`-r0@`9r7uI^?gj3Fy|gf6=Yr zUj@)BJ{_NV;>vEzqjWc<~i)`M#a$%U~^$BV?()#>(Y zuibe|C1*C)4XGFz9|EVufuw(94Al)tTxv;GK?fD$_C-i@3Qh==jb# z=|Z$&(m*8wph`N=h%$v^yoGIvy>~=+z&qttlLqP%x_U)XH^F3>3ag1kBcTV^5mXU? zO9l|(Qp>eP6T6+EU=I%Jln7@~3es`==%72|;LdsPU_u|3&f0o&A?S@tFt`k&Ki-2^ zU3R_3+M1frAx{x>rLESGU1VjM1#Szomv(n4nbSb7Y&6_g0tIFfW{Bz+>!|#o_=1kZ z%jh$<*l_nMe{v{38{6;F&nM2Th{V3?>>GW-8()qhA_x`bqiuQ-YphQ`@FJ!ywmI1n zw9PHjF%zfqa@7R{k|!rw5ASv{UYXrf^(j@W|9ke>8^X$~*;_saV7o~Hsb`assR1T~ zltxA0W^>bT6O%U@L4;EA)}i?+utz@!)V3X-8p7(S?iIAs=sbr9=khe)0D)C(`Z`>; z;S`lb!nEThntyp1M)r7C8r#M>HR?Sx2*&k22k7O1jGPVtsL+1OTBkl|qJ{~rCuj&z z!Q(@mvWx>HibkK{U=Al62J0&`Ac{rT*_-?}kX#%yGt$Gs*TczS&(kzn2gh&kKDK9T z56IJ%YO}dut@>&Qgj;#8Z+A^66u2?CZhDIpQBN&QEhA=r?=7at)rQI~>HzSMTDA@~ zT|D$V%}HM81`X2704cyk5@?*BK%$#_IcS8S2?|E7X`NBiCb=J z`uUYDGeYE<(V`9wg{6+b$=+S9#Uh24NQLW`BrYDow>+1=cVB5C(1_s@%?*SIVV|&hFzdMQO)9MLgq;I8fOB-ssn+<^iCkhej2S}nVm!@voE;kM=P;nOOsMX>k(GrBJ+UL1xo!<4GM2%$!~u79Jv zU1AQx(uy)qU(K4GZo(Zm4lE)9g0Ngg?BS7-U?vT2ZSKu5xpB1KNQpRZKALnoTQQ(* z{4g`EZ^fG0vke5tfOY*ob)lf3aOI}p+iUICxFbbGTVpyT100A?`fY@ank7onO#7j; zm_f;b4;w8UO7T(O6Mj(FcA;S2R?i_TblI}>j`1o1c(03zZt4DH*YSqpT9B~>pG@z{ z3+8R@9404<8B0`z?zwlQWM8ble2L6-R%A|!A)s_?H04J_#JpFWHKi3>Iwc0b^!0V( zNpi&nXn#+a!UJVzXZn#2o2!1h$mpnV8{@6xkPl8BQ8~dYptfxSAgX(oopMH$T{IXp zHD@E)3e1m}pEt@d!y}iMvzNNB5GU|^RCT=qJHnQxGrXXbr69qMP8nV;6Xz>;>b0~2 z7WR63DaVRQ6B}!bJ@>mgRlb?BMB(_KB8A;u(&nk>73J36jfLgv1c~X@o0wOz2B``V zd;38{4MryD?2CJ0Vkz7=_S-q3Z2_PRK1;U;>-nZ@a7rcm%I-VS-ODt_z4Ic*dnM1k zB4>z{=x%q9>)G&+%~&>V$_ z_vdl6(sp%IQ7ACLY5;0sm(L}y%L{cAVs%0OFOIs&FF1AfNUxDS0d%|1)55k>#@Pbbc@+ zA^ae_q9S8PPsIpgeMO3O6=U&xW6Pd7D{W)BKW|#ZoR0lAuo)SE{1lZm10LPrPOw<+ zd@h5O5F<@if7x}YNP7?C@jV}SYyVkkJk}pbR@pAv$H+$qB~JxuS9I`(?sCY_^7T8B zhNA~V;Q$rXhP}>kW@>OC?IG7&*koW<#A%wl@MGCW7v{cVR{6!fwptgVW|SFB0`irX z<{61ZT8w*b77jBhbb^^wbN5O88@jXlm*|-U79fE5M-Y?D3$SVyi_5a8w}h-Hmk5_{ zrRqJ$t3q*P@$lAo+y&kl9%9NUx;rbf^D#6dMO~y`9tR$W(E_#AW?Pa@*Bw22zjhK?p7V*{ z?wU?`csDeVI_c>ATP^ti0`y;@dwGD}IqH>rr1`;b&U{b?mwuwSVFON@P*@%De3-|N zEy{sSby3H*{GB!j9zOBC_Wq3ordM?gCYPl;4MdQ?o~&!wF}V!E-q0e0g%kyrF70hz z?IbSm%CQCwdU*~MYjyG(3h#D8e{9FxR>koRZ_GwZ-h3N!_IPxQz%~6*C6~P+gsYd> zoaIB+O0X?e(;>$KjIJ^>YBG%f+mKNRVLZ#(k1ic4)Xm&cOU|Lz{C|@)&Jx2ks2`9S z7&L(WoX|B7R4MHiW(?tVxY98;+uiigkyX^ALb?SYEPJyTK2(CHh>VQvm%DnHXOY6d zz`%ChhbU@gH@BOm1A?dUE|O9~-xDM32Hs&35U3uxKC=pkqpIxg9SmQ)a;}GwH&XoP zT}Q_ttm8qGad%G8j{mk?4l+#m!1jF}M|EiVE9I#ZW&iJXQFYsQpsxRq9~h}+d9Rb+ z+H%*dKm+x7T+TTXK$Zqj+uf@*^7HdIg9yKWj|Z511)W#b( zuq=K9Fev^vV&Xxqb{vBVyuPTp8}~IHf&QRvx|jGGb00@ohm(>j4U0jWTY1(4h|D_B zi0D21R=UE{s)~SwT|CIhw9CuKMV`i@9>7kk0jxH2?xmr$^Ma(5e9g9L`C7*n0+)ir|Lo& z)O9^Blt}h23@Kzxce!=_)-U3I=N9(u=v#aPOTc(#z^w<3?W0>VSt0L|UvDqT|2N~a z+~`hSejA7LM_{F32wS-+Fr1WMq%eTxH(QTqG(FL+=z9ZA45ogGEQ5ym12-+ zS&)X0%={7SaSB|dJL5Xa0xvS(q8J-z$K%WF7i+sH^Q~Rn0BDZ?M{|Sh*_n$9QfQ3p zs^#*ea?b3w3q0%LkmSMcmFb!bfbif0WU^!A?;3siaH{z}rsH%(9vSXV1`-mJ_)o@u zI@WiQ`&c5tIzKu2JnzY3*s{fVr@*Q3hMbeDz zeE$)gT^b-PK4BlUlG2Q*FQnLKm`Qy(E(noLiAyXTMAY_~TUs>pA3;^lVW@5N6#OZW z#E$rLrfCOl zH&^+b=Yp2)m@|?H+!Fml%_W1@t}`QSDkvCeySr*n=yLLkB4;v;G$W-p#IRJ0*}pZ( z*xN@#)qixFCVp2J2C_c=@q&I_7V+!lzP@1Av(%@~SG!tCoPN2A8_0@v$FaHbq#oZ& znY0QzR%Fk%i9M|1ng_;T)X06*oeU95*3n_053ASu9+3D zb21eUr2CdMgmtqmH6VcYUUl}tIS*~WytEW=CO8_-@065`0Q*G>X!w9gI?_4uDHY4c zor_sm^|sL8U!wQ8DOZrt>wRo2^A}9} zNr27&0SF_%GM+F&e%`QJ?u?s+yiLcDF<)nJ`HaDYzeiify5!yH#O$5zq%l3AXs?`Q z@nsk;gxzHtQx~3qjC)Jh%;X+rgF1EuxfNB~ST2~BSBTWBRS-f|BR$@d*7wIOjkdPR zQ*&$=0-P-@2DZJw|FU@=$5b`->ko4|IZwy1_7)ALJ~FFsD{j_7mUVBNmM{Uof@I9 zBp&qe@z~dS?z8k9QY%!X%D-YSt`zxS^8mI9|8g3h(3gSgOsjU#5va%Zn@IUBuhmBi z5IejJmhHtN$A89LF;!|yQgr}*I5sw6xb@lCII`9C3#+22URUQd5WP;;TXL6`vl}d; zYni=|yWF%hHQ8D9l{pnexEbDAUteGRm3l+V*nfc1koJGkZ-m?&+Q8`bnr)qy|C32J z8X|SW2AO=o87_Gv{#9b7RtJXoP9khE%HyVT=J7egNpyFMFg+IN)sxM}Fjx?GVg)ou zC2n0Y(8cyU$`;`V!Z%07a%z!b9@h97gaCoI#%*TID0&mMK9mF(A`R~-}e?H__l$Tso{$o&a)0k?Lx0@$lpZd&&Q z*l0AMz;1xLP}5rh1i7`N#aw7#6`Hx*2M5XTlbL)d5cbg{8B3J*RX1`(mF=SXrRQ~Q zGyhIK@BvJk+tnAG*10)vMs za@rIy+78pPv3iOdGkCWG0~cOjRT0rS36Y24G~91 zUbc13U7(Mjyf637-ysN9Z)BeyVjD6$Up=EGjQE^yG+NP1s zoDROza&VC4rQvt9DnQ$;AyOjeX?}_cGCEwnIQ|l5#BIZP!K{YV(ey>%w{};9?<0Gc z>reBuVrmsuyyUKntkTV<)Vq89)Nw#{r|dKZR;?IOMDP)iu z*D)cbT0f#6TNx77ADYr5g-)o*iVE0<4|b0pd{95xi1?fJ2ljgh>fR(leJeElK^?zq z6~W**_Vu>=)X{2qb9SDeRqEfAFNB^mq{$(+%!Bkn@3lS^-RaQ|Q*Dqiaev@+J>hen!c=BCb;H(!J}vPFZd3~IE{)$6j~wHtSG z80OLRv=7lCTKGeS6;bp{Z7+|;+p>Pb$?0kSOdi@vx#+9H!o8z|M0A0~7b4Q)IMSNi z6T7qCiCEP<4`0H`@HFmIvR!O2T+9lXNYxK2G~dXvd7qJk)SXQIM}R+l38vC}FcV4s zOd{4OFl&JX1Vj?egGV$mDN4d7QunhJ9=Kt%zZ^lhh-P7d=;-831(Z+4u$^L1FBISpBS2~pDS9vZ@#fuYydRtpc25M%p&#Y&gcmJbDrk(4ZPH@8=RrR3ny_%F|9 zu7QtTuXRrbZ`Sl=x?dy4T;UDmfnq47?p$isqsp~oOV$3;jvI`!c_0gl>nDtxm?TPr zn=Aa`T^&1+v^(w&XP4N`7`h$~^E=*h-ywmlN*Ihlc#{wA9Zj^y-3-A;JyW0LTbMed zdc|v*(8i;2P}9yqRn>FB=K{zDJEk1h#+sKmOEWCs?{Y++OBn?e3yNus;8k+eEMen= zWTd}oI))qgyI-H7zFyqT?y4P7@1B+=;7s5_gTBZzz8j~G-BhVVqkzD zTy|XE+G2v|2I__IpU&jUW6HX_YhMEnA5X;$0j)Bg8q5{GVRdcnp0pPK(OpaIjX4!X}Fy3Ze);0CTDM?Nw9 z-9E{(CqdSBhr_Ar)X}-HjCm(B#^F;hR<1)%j#0XWxk(We7%Cc?cXbT20&|jtot?x6 zo5iNp9}Zg3#eR9uLA#56;Z{hTI=EHm?$#(lAc~Pj#pj{VQM2$hI&fwVJ(ywm_lYZd z3(koA&GUX2!j2M#h#3(>cPEX>0Y2&+x2GEml5Xz z*;<)!0Q(cl7q=G%f7d_sWcll+Q>zumucfw)sqR^qokXLn)9I-hJAIfb$g1E3xKb}j z%X$khSw0+K2+o=NKEL38yjmf>m%!Z%*6-nbjJLyngHF%y!vil|C;>LMpP!$zn#Rmx zPggV526qt#3>6(&OjOTjvsis4uq)~ND~>sYTPJ7dr6Ws0CaH6UMRIdXEB3|L+X$PT z&|b(me$h*dl8Um94UKUI( zz||5GKhaye2(s4udQV5kUsO$TK6<%S!>v7jQm$`w5+s=WEov z(~XK4w_vmNo+`&*OdmT~K4IqixE;CW2u-B{T1IWxD*H;pU6M|&pfxVj@j}S@z|Lw^ zp78B3a|c{Ao@LV+JzF^&09Mds1wa9ur!Gg99&hxR4dK#dKxUPO;%{@7XNvws$gln+ zTutl>P|iK6e*Un?C&9{4lvts;uJHyNA^yvwTh4ne?eM)47ntlZ7I01epQ6TpCsQCQ z`~xfA?A>i~fpm?=uh@k9J3H5SxTc!iC+!jV+lb12@^J#7q5192d|E%Y zq#;PjYbpfaHC-c;l93*Dx65{;W>1(yaqp2)JQbVH9m|DMy!R05{y0twA)#C70e|t* zXf968#k@a@`|cYvoZ|04Y=~1+KOO7t`S_pT3!9u$3we+J&}2Y(sd2@AI8-%z=<((ul_O%Ifnw;hfis2Y?A2-euG;P5+UmDZL{$=U+f#xJkwCqkce*;W zp*S?5_dI(Crc^kN3P$i$mZa8=ISoev*f4nm>zbxcYKg=E7>ZPg$Vm&ivtn zMyc(DJ;t`K*@Lo-{sk3$T~9jvy4L@4B%H-QeIgH~q38r8lMQ*)!<>=dir=ZDfmSQE zq5wBEly7qSOEMP3m@Tl2W(fp4Z~rLOIi_bmEG;i1nv#&njq1m%o$@uJJ?|eJ?6?Xs zwc3YGkt5v<2W}5$a{nqn9k(sLeWCGvMgeElNa1BK3Ol=cMe0bNSVT~*&+Y#J2hEJ> zB}Db&)MN6t(Uep&;|x9<(~9ef#!cbdBHo}YZTCaDC*hc-?YAd?57!gu7(kjuNm<$m zNuZ;#M`3bAI(5sDMdF#e2%Vk2z`w;DVOKC%q(DYNYmKD)y}0xLF?QBrQLlTuS5!c0 zLAn&_l8_E*6_75G76Iw*lFkt+kp?L#=`Jbh?(Xgy1_sVE>RNm4ea?Q*`-ic-SQjwN z?}__+-=BMlwh)~W)NBD+*lO49rCOU{btj!QC_(mwgVXwe5}bW#MB7bbpIMc z{JO%+C?gV4;3Gt~h_3&bx;xM^*LpSRSgWr&*I^E!+_fb=dR^ZGA6FDpOaT&k_X<8wvX=!V|f z4yywf*##N1iFvk^Qd3!bQ2kp2gZysz@wVGzlFB@B9Uox)Jh%iF(1y;|R*Bw~84J(g z83DeAY0V-ea3+qOCfM;3mKzP(Tc4l9IX|<0jphM#j8L`*@}}TfcG1aaW(MM#+UT3E z`F9`Hq6U`-PWV_kP1ZK`vHO%Jji{;r#Les%!Ey%k+mn}TVY%m{nZlvSmC?2GmtcST z&%GVwH~-$-5%DvS!uuUR0~<#L!T;@Q-akET`Zrs1l@{8;5e^@&l(%Qpi|;#JeRRPP z8oR}{G#S6zWEF1U^JgnJi}d!C1hH#ISi+QIj?RI`9;8vYy%F1K<|zu-t6rd`8Jn7m zxJxnwmzyh(9)kcWV-bZTUUDBX8<*ZDHKZX1zh5b5?tPM@3O}C(Sl49jPt4D<#(PZP zn=LmscuOK*$=WDd*njl)4B#d70y}t);J%!!@h9-T4Hs^;faKo^T7zB^A-gbo!A+Nh zxfnl=t)VDa3yeRT2L<>OgpN(X&R~LaVvoq8{c?sQE;+!1{U<{89`!Oen@;R)i!Nff zy!0!{&dntQ)IJbWp=9E`Ur=Dd+k3twBh>jw!US*snwQWQ`sn@ zYhVy2a=?Sqo@yXCCB#PqC;MM_%hmXR%8~+!E+H4NM@k=xCzQa^qZ}iK}TG#Vo_1o0c zN-Jp4+N^u$`}g%EkDHT|YaW5m;B$HY&gCg_{70KpyjKZ4nQw(ko*5XV7x(B@^^6b` z6I%n-1LSzH`^F-)LW+@gLH8wy>9G5t3ZfXUrjO<1zdu7*zhMYohz9T!2Axq z7pW5OCcTkGF zL&D?j*tjY(V7xp$o!t|4BZ!#vC!Lv@nUyik)J_+38<|J34$c__?$@$Qct&gC=-yL~ zX3bg2?!#sB!M2N=n+y$n{53xd1V9^br$~SU~m<-#`-59=MSH7T*Clv{q(KtE!0KX5{wwTg6QT{kiV1DeT2f zIiOYbdmfr%vC2=@+mZ-rl~kKbps+BosWjuJ5xuu(3_d&bA3&}dQgF|fiDM@_XqMEs zI>shM-(}y2yo7?F=L02LIdILR23C4W7Z+m5P{JT!GwEvAvgl8FhFCelY=VmOvCKR8 z5HBCY89Ui|c2|Ciu)n z^=}uJD2BJB;{OqqB>fZ#p*MBy`(`C|IY($jOyA){jS(o+!MRZNHodsDEXnDT=<%!g zpk(+W%FmzA)kcv(xP1YHTMVUM%=HQV=<_&CAao0BiJbGe9j)@9QwL z{M*;qgKg=lfdjKnhX+$dIri-MzS@%oS3k0x4*brnR|mVa*X$wsC%JtFLlTC5{WA{P zeT|vduK0WQ7H5q*2OV#8*YI@*r{%{3RWvm57ijl_+95EOBu_q^n_3X=po`NRwzvz< z+tdPwu`^`3H5cw9+KrP09v?nPe_qGDo+hcjW$&dyZa{F+tv7sz++03)1wmAz5Q zlFx6KX#ePFW}e}Dv7PLpEnUtp{bua+rg2r`GFak`R;=UWn(3KeZ)S~Yi@R&2+wAp} zR8+)94q;$mASUboZW&#-SP+|coLJFV+O0JWBC$vrvk2)?i@>8Axpt0%jn2y7T zF_~=vfra}I(4-kn4Z|D=8M_pmVU;#r0{@X^ywa~@uwoFuTpk1OW`v`7BT>sU-)+v+IECF|`*z&>%f(hSFb+`L-5?Xg;QE{@ z?w}EP{rsg0Nj*#T)eY*0sZ=Y@i^m%3MVGS$hCiSkhgT(Qz4O}>9G`jg?zy?cTU>SA zS{)F+Q^cYzD!H1`*i_HXbf7KHi#NZYDfm-i)ZXNk)PAL5;NcTc$tHY%?&73^?TyZd zW{Ru(4e+6c@29#LAKG5E{9M4vo`F6cT)x%nAnbvyke~G(#@e@7aqU8&Ga|0L`SpO1 zT+s)^m41<#FcLr1r!b2mfkXI*j^_pG?4$;HBQqQ(Pv)00LZkGQuX#=#EN{o8n8cuqdWXToc50?vI04DZh z5)fr6*Y%vQHV+%WO;!^t14nRs82M97@?COpT1Q=3G3>;T?f&u< z!6Pg(?Q=Mo)KpMVKt^#z0oa$oy{;beyxzaiGezku|2S-wm!;l3snW>q>g}H%R94Ks zj9L{#+*~;SFK<4LR%nlTjt7u_jg5c%@Bf|)1`Fkks46LiC0}lPevFUw(*ZO%_Rj@s zK_cW@3-+yt7)K@>COKp9R;2(==6tFi&G(zU=VknysY!9sIn$eRCqKVsqa>iCMpQr+ zDH=S;uRYGWovfvi>1J!{b zgv5Qwco{tt6!MO8JoA3QoD-(0+@Okc^mFYCfvZyKTH!ux_3}~!{$bBx;j3-K%cYg% z`MR$HaT?z@mrciARNdAH!fN=+6+c_OYnI!GFryd}Ei)Ym-P(-{eje|VaDGe+t#I0P zInbFch&>qW4Gq81!a{{38iyY8YN75HncCgatXluM%WaLzd9^~3T1Lcqgg(2rkPwoR z?*~&@bY%oHwO?lx#BZaxcAp#p&7=iAsQLWJ2#m|{h~o=%?mVQ_#Rd|#h?7%P8gz;Z6pH*XiZx=ryqlx87oN9=F>o zagx)JirYp?TYMt$5HsnOc3^mj*!NVTj>3W;6la6*n@-2AYEP#6e7}Ln!&#s@CRvn- zZLD3)z80j$^R&D;T;PMSXDPJUrfO{J)yE8{x`@j!PBU6Joi+;#b-#>QT%NkJE`$~b zeRaLhTE|8J`zpK;6pP8g7)0_RfJRqbl<-X=(i44k(po#BSdY=(wLMrB9}|2hOPweo zXawu(wrRBer$auA*l1WHyn$kIAeC=;jl5OwfPb7mM){uh+ivt<7sxemBm8UTI$$e0 zPq5~VuSD`?OVcWxh5cvE$^Dy%SrYirQ*ezmUj*aUMnqT;R*e{O{hG2OLhDtxGY}R` z@DcK^IXXHN!qc8}>HEj~jhTI8T?`tyw6B_$gB%xtPQCau!Uu%7@I7Fdz4i?#Z(*u~Nw!&_eJ0A3`J)$=aSKr&=fWjUAxZPJLvDTyg6CVRW&aj*IjPS8v z*}uL0^_8rX`fc~aC|B0hyUC&5oF#79mO$3IJv2el5;Av%izybshINf8*MsU8{%OB7 zuK$gW-{otv4{$3(uLQk&A!PJweT9TRi_@}0d2jmwT3PWqJkr{$^W_ccS2@Ugp>HCKDvn`1b3*6OhF$j^z` zH`LDcMOBc?>rbR>qg!rzlwYh@=8R%V+Y|HJHu$*X-4U0XD|ZpOtjiT#EqZx zuzXEwZSuK@NOH!pC_o+_9Z5@}lv7Q96hgp^7~ulO+*Z-i*3KetL897KCIC`#!_Myj zv5VBB{$^k}IT9ENo6Qzu);R2KJCI5(xic)hT)A^MkvYAw;j~<}bY^vf?O?#SS7F;o z7Qw(E{hMwzurN`QHS=c!^sxm(d)FrTB)yR3ZCIcW+% z+4Dr0prbY2*8rIOu5hEUC+D5&p~3|mh5>%%>8m+w2@69HyWY+GEhy=%0@h!~_3NQIQ zP{@{Zrnq(~OYeRX2TjL1sPkO}y$xGJXlYN@YoKe%xT!S)`+Mws4RoP#F2cYK>Jz(x zH|w&^Yo@YmkjDx}wdehh0bA=21Y?VU4eiIgU<+B>Yuw4#Qy*a}T%8;ifl!csxZ&HdzFBYF!cCX z84%vodMA3nyih6maxikr`Z2~s;T;T6opPMEpM$~JQ=t3NblsSmnCQ6C@a0Rm$9x#l zHSl#{li(so-3k1$ABqp}%hZ_C;4`cHFXKu9W3&}M(oqWlnN@F*j*U1woTPPTkXvfmmf!0KpYZ)%b8H1@F?V*(j^2*ABRSe<4NV6fbVNcb!7NdO-+=V)j2G0;BRP0LNmY~YH}JDTPoqo`s(>U@ z9#f0(GgIjr($DIz&t9BG{4LnZ*!Cs~hG7Z0pj&6Dam|+z>&Lk^*rjLdHGk8$L)#D}I!7Et8Wkh1vmT z=ezNOVmAPK0)-`|J{41Ag2+vQ=C`Ir+)ry{0aD&?8xhv-}8_iQB{;Z!~?I9@`-yH|B*sIMQSX3BRo51B`a+R1p_#UGZe1(>6dc-%6 z*|SMFoPKIY5F)US{q&;dGyVfFuYMjG*TVd2`t38Sv2}hVtslWtKU6Mp=*)=b&pk!{p?9Gcb(R0i~Lt zO%J#R{nOLQU%Ut?+BmWYxj-=`gIkKdNo^gG?E{qcdkKlD_P~QHp>k3d=SBD3^4fdm zP=GM{ZYT*cR%FNHTbK|s>HBB#!np*$e_(lzCoksbG_T!dW1u_I>=)FA7fSUWN^Qa_ z#aNA>HEh{5HE7zLZl0qQ!5p!cX!o?_x$fKOG&hWCdv(y_$iZO`YL7404wOX&%Xd2D zx#asDD99x;hMwnLUg;bRhIqCqp{40Y7TvjqHpmsyjZGJhGUjou|7r?=HXUS9&+fi6ie+JC8i;?~;7r)`u(z*1X6cHT)tRzBZ4R!%8hpDY|Q z*Q|5UjFQ<{b4fSrdY++q&+9Ev*-C#Z9^=vXLk7buIzm08UMF!N?AgCxn$VeVV8q)Q ztU)luVi)~5)20EWSOdXc!+N=vuXX?=rgid;jmnyf{Cczkag=I0P1Ip9MIi(XAu){aPgN+7L}xLO&h1bG z@A>31%g1`&(}?xx*33K#5e=-F;TB3ZH6}%}o7=DUmv4b>dBff$bYv^5&L)J9v86FA zAst#^bB~{U$gFHUugjBRH*n8MO?`E~+g4j!8xb#CR_WRf@MFB*cC@KxhP^B-+4(sl znj{iI98c5!5Xg+V;-bpBN%PE@aB6svEJR*qGrsrAFJWdsTS?zn&M9t1s8|6R8Pu_n z12;%aO~?89I{(D-?fA$@TNE|Bz}P|8z<}xu0LsdM(j}=pDO)%OXzs5O8w0Gmz``Os zZ{4qhXtXe!IWZwoK}Rh>m735eM-CAj4qRPjd>{BEIFWneIp|jVd2R(v8$e#n3$B5 zCloZ13KNU3fpWi30I{~QiI`I0Y79OsE_l}H9sY|6ZNe&?3=A*#CTTS^HPh>7FW006 z)5V@_H%VAw9IJrIpsf!M?*)YFJP|Q8{1;6~WYy25ln#gioL0{*hIMCrHK}H=`OuJ&ja{#_wvZn{KM<4DJoS2jK8tqhhO0lZy!)d$T^Dqa znf;mY)kJ0=cQYHpYFrd{VaT2^xHNP*9YYJv@M5di^xEYjc!zF^0`a_#C;GmGD>?{O z7uV;j*p)Ley``l5^BjXGnke0|Z4cRRA>xMT?aE0;(>=2Y1)n0_S^c=wwc|J?9%lxk zrlyBrAAIQMeoGhh-GUfURai|83S$3UUQP#Nar`MJQLE>KfTNC{_LYe(T?*2CY{G#6 zK_jBA5pn)dXvFgbAv6mMi_RGE1%cNKOf_C|T+tI<(M=^Ep2Q~zd*XwT)EmdD&kiCM zj2;wx5)E_J8!>r*RrypK9d@#jP0+wh+kiW8KqTmahrM_<1QC8 zxD|!UuJ=3J5-CYtltesrzy4iUM(K5P=U%VdcaYP|@-Z~aEn3=>YXg9h4Xr(#%sV%i zUqG-9Br5zq&OhPd_ZdR!kZXcf?y6kZvIEJz4 zJlBpwQ5%+*$9a3mb|W#Gxlp_hP`bi$BC`xeL#rxHH_?3qBsvsZe!os^Cnfs_Seqi_ z(~UNkAIUvQN1adZ*SVv)eX(r0<{#@X3I-0#@BaRLD>WoXhli3D4&^pmrx^F{iGs2@ zo2^lWCv%5}xw@8?A>gP(KtyCxj82!TTi+3x^^k{7{M}t(Xldy!tN_L!Dq|D#=qcz0 zitb=b&2stasl(#gKp@o%lp@DH7G+fxJ3%$e6kXBZ)Wnqhd|8dIVgE^mtg$9z&eVyw zilgkd-yO>(wim5jkDTs+H=Q17sciQ{YaDh#F;-2jhkk6~|YVge?obN%Cb2b6ISx901$&dbLWqrN2 zhJjE)(^cT-r#CT?2@J!yN}pDwESvEuR&4(@^eypU^HA;^?0oqSyP)8k;wXUoFJ*@0 z=9+cQ3J~;m_sy?{7Y}~Y-s-}HKq7Rg0Nf5S`)zE91}4e1N$Yf~U99Al`2FkNqq}c` zr95l3Q8=;pE=1^%r!DjgWuXkbOWO0JA$7KoXyp63>s^RA%CH3TVOe?U&pc1_9dqbf zF6%=oDlwr|?+fxK=s~3QVhbBck3JG$5@m{j+VuoL4|7>v?2Ci`0r@p@z$|cJ-|u^6 z84|orTp3VFDO^QJ~3q?p4~~#X4~JhIj-R0w;+sdPqp@nZmq|$U>PdE6A|ub}X(OTEeEQ@Sw1BFfYXe zcd6D>GEIdMS^36WXP#(F?XOzO3x%X!cA5Zsa7U;w;5@hc4coH&r2L0NP<0Kpu0o_L zgSjV6Hk+AA$s+DQ0N%g=lgw~b&?SDdqwEk~zjfIO2;PNZFA2jcQxBf^J%4~w!bscq zA00A)X{cp3QGIuJ7r~wQhJJi-bOL;;ML&W0&XuB;nYKviH5~JsDT5*YGXf)E4%Q3B zK*wLSl}>&^>+RM1Q~(;tpuy}eIdZFU_2}B3VB3EN`^zij}GW|dX{@$ zFn^c0hy9(MuO|v_!3$(*(z_?uKtB!?dRrsTua`s#TT64w{uO0N15Wi5&BCE(-6ubt zCr(Jl=BLj~1TkUhW4r%FrB!mj#lU{ZS@oHJy|9FN>bxZ9?r^YMH=?^{lt4NDU=K@d zW;r|@`RVZlw6rl$M`ym9Ewsp6E}!Tc2?=>2&Qdr8KwiLoB7*wS)xn`oHTxBOeW zbti}EJelFMrVYh6(-G-r)<-;lup@~0@~FTc<_6>;rpsj+9ol(jeHXOi$Gi`30l8E9V=P*kGN@xjx22iy@Bn2UF!zt>YW0-kGD0EN9 z;u;BfLd0ZyGUlkY-oV$J<(kIE2!Js5k?0y7*%1(AQ3?u>Yp)lkc-&!Mp;d*`j;$>% zdXvNAw^0H*rhp%iC(;H9SVz)+rHS$Dggc*zCsT=D51P9`D&kZ9|6OkFhSFZqfR_TM z;Hvn-I_{v5V%0ol5lk2(e`C+u$?JM|#BWow4l;`o#p0;(_`ku#y5f6V=;K^KM1|5F4X0Y1T$ym5e zV`t0YOqZHo(+6sO_}s@RyGlmNeTP3th+jfyTw-J_rWn3&V`tm_2XSTlAZ8ikNNN~Ft^*(rW)2aYY+(CjjVbsEG*Vq z*j@uzzhw(3i22dMs(%~c755jU_R_K|EvOyc!1v?_LXX&4Kq2C^R@@t}eXb==NpcBAZ@U{C#J z10_@XmkpG!9bp6Y7>)sZX>xi-G}DeI?I}N&t9+XR7#0&gDBkW*a5n)}UQNjt5rQP>5`_FZ4>gIB#kaRB&t5#Sj~5HmN+BXH-{ zblZx^0(9x{vMnJIz@I;?KUkvCKS*2Qv0cR*wS=M<6cp&!F8CjZhV0|?oj1Be(o%~= zW*v9kg4eP541&fz05}Jsy8>*Mk1K6U;EfyC$1lW1l>PlBSG4+DryT;rf^hqmXgF@i zM91n*YH<`9H?afmU+@WuaHIjf?Qbt#XQ(IfT;J@AZMyxwN#YX#E66^GARzZ;V=$CZtRrXE`Kc5d+4@n&F0RQYC;gzwjWUDZG zPlal90)MEEkLt=K+r+!1EPq_3$>%mWNl9hcxQb>Cb!A`AiY5kW5`9NRO}sBCqH!fa zqf01_0q&1xs^yW?EV<-ofIucgLj^jZk);wb0jS@1VBFe|B2hhw!?b?=^K0FU@cckj zh^X>cr|G!JpS>OUWbxrYVQlGb4*~5*Dx6eYL{DT4)};{e98;jxc5uEVD@41NmTyCp zR;s2ZnZTY2GaPf#nDwfpbowUews@fAXlcS?MT(!r|kHYQkYPa(l#}M)7?nW;C)T_pxG+g;~31MBy zMYB+HK}@kUgSYR{BiC6~R8<{Z*>2pK958rC(;EQ^0&4r!mCsv>T+ld@G@qVJD;4(D z{GEZ!7?-!rli8}C9UDi-*gj$FA9w)F{42rm`hMbwFBTbuU+%yT4A6KrAHc=3Mix8b{I=*Ode2~!A!|k*U?!?G#`(Q zq*TYUe5?Bs4Xwkpwfpgw_7;06#&e5r5?_5VsdlvF%}m4@2rK_wCX^ zHniwFEcDE6Kki6_+XKio!$Zb! z5xX~E*qw=YzkEUk>+*rnCAi4LUqL{F7@!1`_o5 zgN^yguGJyLyR0q}OqeD$;|G)@0;+!;A@r2P@1=!V+uvkn))NTH`u-7ewvawa_r39V zcg`oB4Y%g}&7VX0qU(onDPF?&JLs8Yw&0W?G$)^ z{E9omU8b(tG}x)=CMV4e+StM+O4FlPRJNShxioC@3q>&eva37jUtPNk>Nb zI8^)LoQ*;b+o+04eIY^t6o|421P=*Aa4#>;){skda`lIwt^SRf2zx3JmvAnh8v2_- ziW(eZ$(+mn`ygDAE`dHCAN{Pb*Qv0c6xZ&n-n!czw7`e;gd`5rF-OQ+XEPJo)4K~) z0UzBSz0jsKv^1h60Bxvj+C>3=e$>GA1<)c}5it%pBHj;C?_x^4dWFyBUau{%O9;|q zA6?i~!XxQ4DGgm_fl;x3`pVDEgW;hJ(alIX^5)gPF;HZL6}53`1H5hHp1Kw4r$5=f z5lDB^JwSO0wpRQkp!b^*O{m1J5(6V!c^?3gx}dAzyJSlzr@*taV*(*qQ%h4R1EoG1 zW#pGRbDux+#6>`m?D;a!DCNFMNk}fX1igJs#vfW(?&+PQl)1DJkkr`Z<7#LUlu1Jc zVq&2nCZ^j{Ju+VFOa^q3gCz<$5;JqY(^0-!L|n8tKr%Qj7G*_%I~2k7sTAj_ekR&I zmZ%I5)A!JOilBfYiuze7O`M=l97OXwN7_5&^!zF^z#&B?+z2}ieV?H13Pi-!fd{g9 zq%x6xGBb3x>AwSE-|JaUq&3d7yNAD5;(%);GY=&U8@Oi?a~u5t^j3da)8)J0x&}))t$(E zZ*?_%JAuhM;r)I0uh>K_&)vzYMIkVX;p@(y&&AwtMIRUAZ($sY-Ze5JaMlwyH1w+! z)_V7d6Qz3nS33R9#V_A{Jqs|=gQ+%%@*)P4EFw7|jo~PoGuI_JzZ;Mm?o#{u5iGZ@ zzT>fUEV1DHMnLj~zrLnD(QI~aJJSG#M>qYAEc5pVHWFM8hNpRtE#-2GhWC0>8dQ9m z#N7NyU5}UDl~yS_lU^NV<57-BE@7G%{z5mu6eviYTYc&Li{}n37@BWesZBxb7evEeBJ-;WKTWpk=mW0GPoiRdysq@_WQI`OoAf9xQTXOZ+NE`HNU+p#GoXSv6qeBN}$Qzd11Qs-q0a={Gl}} zDynaM{7nv{Ztt)#W-bfufawIL(i45706qm})#10CHncARDb(=p$5GYNgS(t1gH;n< ziVfz>`Nnz1A3_?)+uq{__szOk-TTX=rqF@Pupx?Ig}Q5P#uiHs9#mylViOGYc6r>p zb1ES&-lS{Q4oa@xjs|xP6s6Rrks|u1QoPt-mv&ru#7{%Y;9G@LxYP((#18uox}OmESihOeqav4K{$EYQlT#Y)pC z>NSz|*BoMv=sJ=c;yc;p0CNz6bg+)pkyLL)OrX$6adOAir8~8esaiec<;Cfmb?`4a zACd3y1MdtW;J~sVK*T^0ux}l6!oUm#w%UQ#;ReL%G+0^~gqy`j_W=Sm7BVnJBa*@y`r@GGTU+%> za_5Wj&kw~E)N~+)j_xKg+vXtoM1oKO{-520UHr^Tb8vDG!p)TS25Fm!2`Qp$vzEOR z5iHNkVeZ>HqfrZq#GTMJZxs709s^AuPt|t@MlO5rz7B{2jOo2aP%S)KuIelzzc%;S zPy}lqD=N;7&Xpoe31rhPok&yLIKgd zMU58tA`xH!@OtY36k@usGmx^!WqIxSh%Dh}_HjLXhrKTxUeV0D@5x@Sg{kc)bL+Gs zx@_r<Xb#rX&FEbq z_^M=v4MYZc74oNZKLd^91p8)tF#$rSE6;^{adp!l!21J(i0*+WQR8Jr*-Ga#Pc;G` zLLOS(pC5U6_;`ZNH!VI3mKW6Huk|L2*c`F}J?7Ors;?n~5gt(@?T@x4-oT z08|7$=u!)6pQa_Z6<+Pre9Tzb{}~Jh5OXPqHBdhp%Et8!1Ca3Ei$t1&pAI~mjCV=r zdOw(pQh$IDv2@0;G^ zWkx8c+cPI&eMXjhltXhDP8Yy06a?-bKgMb*435@Fo~`8aRiqX-)eF@lBFK-ZkPXge z$pBVPYmDnF$I{$z#}L?c-)-OT!+kV6^eq~zGt%PPeNC8-h5iSg_4PDw$al@!c0e%I ziadb4YAh?O=-AodGK8kvF9YxZE%#Z&-Z z6Tmk$xiq=IB!{b$f&d)>y~wp@l|#X=8i~bWhj4b)?+K&eY#yDse~zWcJAUiw>b?<_ zJ{M$NDJ^Ot3kH$25|Z8WvlizXzS`GXUWFy^`$q>qtZH{wW`V^=naptGQ_aGjFeABhgN1M83%6tasc>uNVG3c-+j;YIk~htFfd+I#$-^pT2B|x08|!lZ!{9jTHltGB+IUYeLG@jeqoa9!_33DfEf% z7|%$1;?|=3`b~bCQo;?fZx@OYPG-;%=O46IBxiM2I1@E@M%gylG0S_x`~!b3c{?!G zgR6MY*xY1hr@XF6Uf93Xr9qCE35C9FNx}hI0pL%2d!0b*PcgYYYlE~s8}KQw|1hL|IB~iw;@KsW?qY- zo)r)pPz(zqN$|E>S?`2{Q1;DS6iu-Shz{0WAm1 z28S(6Eq9e7c+Bx%CBACgt)lL*mXrhfrKkR!XOUjFTJx) z*ER=SSiuG|Xh5@<1RX-t801cuMOm#p&B%N8XEZi8B~kgTL1977wN+@XhvU{EH#I%eUPUF=3Cc)PIi@LxPDXrSSzp#5 zNq)w`Xhb21F4FsC@OVr=W_n%uh7{Xtc=+hhSIy229kjmAFYG?# zxCkQ@x(_;c{@}b8mX|}bvmc^9=v%U|BA9A_q}M_>JgnT+?OEEhVrFiJLrNMvpMw2H zMj8Z>kf*2735oe-kku1;oZled8-(NF3>+Q+pAX`sRtTZ!P2{J#^~Jy+U7`jGDbJb$ zDRCPzUU!%;dS(K@QL0;?GUlhG5O%WFl@-A9X+DIXJcT(M>zDyWM5 zp2ca+80BGrjjX(US2g?2Krd(^{1suAqeNyA4t~#Qg7&(Dg&D*MHvRbqEd%^GVDnV`m?HE1xbB zYTgEk4{c1LMAS5P=Y=xDwmBi6;$xY+kcM2UolFM5)+|dX8}%hhc1|c6n^m7W3Y(6) zu@wZh7YI%jxwor=DOgj}>`gzvJBt@KB`CR|939Z~O|+kH8$Yc}u8aItatva<GkX2#YItP#o*%C%gb*cfjVkx zY6*FuqEphfR(Lo#IJA5;sXq1dS#D{ma-8=^3#H63J8RE;=z9|r>M_Tp zuUkw6&xNI4w@D}m%>k=C8EI>ukg+`W@uFiwOcI0i10~8-UNAuU1#|itqyucdw|Re| zF%hG(XTcy)FnibAbJhioXIy<=S%ER{J=y&DJD>D4o!5iY-0p&!M9{U+B9JqE-*O(~ z;>*vc|2+)TP_>6=k`C7P05FvjyKY_ILpwAa-Fpa%N>3+zxrbWW_~*TP!)N zcg11AcD%#6o~s0ftax^$EcwBlEvSN1aDAY0=iEGQ<-!`R^WdU{Dc65^)`_?tM!W)EQ$O|$TjwyjQdD`uf z;K>MZTZ4#D)#0!=QK&eVW&h0czDBiFS4X1X8Uk}g(+n37z;{5jOl_IyEUQYxXIc1~ z=C_w0Vd8<~_v>n26LH#>Hwh*rXhpIT6;sD3_t33vp9Y={efakBsGb#82T~pX14M7n zSFu5?Hh(_7qUC%LpUM8D)ro-MQjnVvzi56z+`cj5NAIdB_n5-MSniZQSUp17cK7(_tq4sN+$Z2=nIXxk`J zc)`Q(r4I6=#^Rccp<#x`+4aST;yM6p2}YB!OyS1v0G4_{ezs#{Q{vuKs|u{=>sdi0 zA$|`g5m#p3eQ;w&1{~uW_dku|dL=VOcFFL!R6zWd^+=pp;%|We$mvu7B8h_ z7xE3USN`;kldOAtZjb);0Q^6RCy2?RLY$5i$-KB83HiZE6S^ZoZ9*)>{ekZ?=1qeEyMXa0s`LsLjG7#lDx0=+$#HxobBV&({O#Q zn>2Ds;l4OjMSwbOz3udA97~HAOVV?UP0U|^;D;f**hLIT={OxmY$3yOeg!Ljj-4K1 zJhMB-MhsYwV(iDuT#YK0c!>9uGU*stpZ}f=4*V*QgivvLW;4R_bQ3H;edFy&q;-!GYy!Xm0!#^xP)8?7w zS)CrCE3eH>zWCp_aqjsxHpu%tFw6Hk%E))>o+IE8Nk93QwspmL59+MF~Uo!fM@i01N(ieb(Yp;;@IfOo07TWvNAC< z8PYdeGgiBA?txrO2k^gTX!Vu>kq_~_ z*`!l!4-s4!(-IuRbEyb-hy3EUe*}vE(0{*0J(bB{uiRMNWS9a+t(>W6AofxDW({*? zZ41I%LNKoh5uirVI-5H~O@d#^?Ki?9olOw!y{~4!6m18T;!^P%!caSHkKxft8cEtk zPt)qrKh`;L|ApriWsqU$Z-^Q|h2B2Y?!3Q9almf(WAM`|>N<6FY}xvy{|vTPEucx$ z_bK!M9vd1yQ>!4OYSW9t_Jyyvvh(tQRLlywYQlN~*}>(wsa^)ROk#EDA)wjT*(3x=^@)Yci3?6eR+iJIEtB)d0$ z!y0CrM8osQ|lQ z|GKG;-8Q-7?T>o*n(sTg%z)q%8+Rku^c$!K24tCik>N35{ak0YZK1f~Jp zicSjn3x-)UxUv|(*4EXos3-+s?lptTQNSQ;Tz=uWup}o0C@s9mQdmvnMYt^~9XE4w z*1lUG@s(sw4?CkY&uCX!m;Elt&7R&C1l8LY&*l_^6ZFaV*+%Cohxc~U?@A$r&f#zX zk@EDF#b`{qO%at)_)Jz$ur>_?7!q4s$$07N#!K$=t06e9O+cLl3h8@XgtQL+PqN{$ z74Xl1nhkVP1O@7sPI~x??qti@;HGql!*HBe`=$Gn_AZR;?w6A1Rue-cjF0_HMpAdf3mw_*>J=M zko)*P^i%*X9<;+>@u-240H6s#gqaZGOaZ)SyoULv#aKZ_XXGQor&~hPC6|0%i{icv zQzr#arpwd?uJf#ab5~|Mn3(8RD73_9*-#^NTFud_zb5~dixAID=lDI&GP}@$sSUKS ziIx2NhdMt%Z);h)RxM>TY=`b_YSx+}xp^BukrcFFiW(bN)}m%bhJ>Jf_#ldNpEP(q zyUEJ&EQ-Ks8ddP!&w0X7BEkQ|*joU_)pgsVNwDC-9fG^NC%6Q6hd|Ka?!h6r6N0-0 zcXxN!U=6{gac$lv-}j$$&wckvRd=dLk@V`l_nK?YF~=N(7;x3yhSAYTzCD&#Q=>db zJU8L`*#05)pU*tri|_2f3#(xTpalo3jnIftY^a=Uw*31OK?4K(D{F62K|oJq$eE+^ z{NhRy;I?zw=mu0*qksA#fDDZ8eX(M<817!15b7MzU_NXh7hU=*%k-;auu&KhaqwPH zWLI1&iK7&##{PLKt9<5`=cBN9J`3Wv=nOk_cypVcGW?OPSY(3KpjXfozU?J5s6W-$ zPAp>lU$8hp)#?-MYjhG@;ZK=d1UJ7lgI@HS)k{)^ZP9>o>Pyne$*tSN=NEDTK$BW! zByk0dB6a)$rzWlA|H9(-fp6MtQFHdN4#5M6Gc;DP!K}FhLzsUItk(;EHN~~H1u&dG z0CjGkcPX*NXiPRh%I(`V_!?6jDH*|HJ1@-oRnpl7+o4>aZ{(5Bot%XR7ZO}*`DVqv z^ZESh+~pM#$BD{G?tnYL^XhN%VJp2G`*R=Lz1{|w;BPkdxoq7;(WsXNWiMBPYf?+w z*$Ha};IY*WNn4HIzHra6lpvk}`5W(j=o~up;N=l5J06B5s=R?`C-0di%#P%1N{iqpkze;D% z0u&xG6&UcKe7_*l8u>NazNV7*Mw$>fk6E=*H(M<{Rb9OqMFp27$`o%sJi`h5{2~2x zu6FXG7q<9~BH(d5&2R37gRf4#63i`VZJVUE0b3g_w$|SBj|X-zGNETCpX#1zWWN!4We=?`8k0g)xp(h z?66}1v7(}XEkb)w6ob{)C#71A+P1e(o78y$Un|aSg|%v!iFgEC?pvn#i}Pl%Z|g1D z+N#SNEUwqME#`Qzt^zSHk2Dn86{ZDSN15DO0@xQ5$$Sjs?w>~;xJMsE?OoG%+#Z76 zfYcRmo?Uw0mZtL1yIVeTu-M0`a`{cYSoc6bJ3C8bRa`BC_#$>0mA;~=q{Or^C7yg~ zP*dDgdfvUWr>kzP(zZI@*N0qRUw>X2(3bnlX>NHr=c_yAFk7Pu*x_X^5U?QV^S=uR z3__Hl;erMYMf!0G-Y)m~OsN_~-5==6F(TL9eeTdNeM|t>r@3DFuScoIl$sBUM2baOMBeibf;%{TF>kF8CRcL!qZwOrVwPie>^=ptwvCPBv3 z?2`yD)}_eYDnclsKsIMOP5^(?f3bLZ!ex@uOQ%A4K5iz0BZ%JRQ*O@hgzkQc-$l;S zH9^b^r{B|@D_&GyF4DHRW;5+Iic&4nf30!m{vFpV2PTpd$9|p;lUx*P(|kEPf&L99 zo^iSppP*$)X`)WUMBg0Ni>6>1Ka^bP($Y$g35UxU2IB)jHzfW?Sy`Dm1|mC2g*nuv zKA-CDTyEbYf~T1dPvc^CK^F1co%66dcL-HtvE0v}6{DS_vjfA^=<{4$4kRPB#f9Ez z%aSXheI{ESFr1au)o~A>@oU(xsxco7?~m}@Nq%|EGUvqk{x1ksh-sDY&VNBvDowBc zss%;jR!r_z<>xogE^6 zt?p7(ZQfUPuboD3s|chMvr(89=3ft%Ch`^zjV!s4LnrL4ts@0AL>#q^2R?o48v7B9G(%x z?8FcVq+0cC$#Fp_7*Ihm5U>?+IbMo7=C9Am$w5N7MxkL~h}mu4a~}g#K$kySOJ9h5 zmh>DrGOLSvF(h4LzNp_iEn3nHSUz`>V{}gZVg8H$8=!u4*1$X2%19&}`C|5gox^C6 zwlw4Oj-YX`(Zbet!n*i%0z-Lrb~~UP)GnldUfR;q^5{W-|4W`AasCM$oq<6FO%Dv% z>!ax3tEi|9KwD~Q!8`PRE15alB)T$>2Ry$4WrNSby*cgeD8Lxv;1!2fa(sMb>T(o@ zzC|p8U`dwMW4{A`<1Cg=+2Km=mQR=Xs~beczv8qfr|ou5smEm>?YS1R`#iDH z3-z?Actg4TICZ`Fm4Cma>t8p}K<|nVdnvv1OYeC9?Y+F5wpo*d>HXk@po)q~l^H9i zU~Zl7oJqC*)31ibn*y#<>gtdI(X}9;shr)KgESCDJ@l=sbZY2x^Wt}=M?mC4#LLV7 zlfy?9&VDNL<`Hz7x`yokU8W>mcm)K=l+^JjuT6qodwm(S-~SKYTs$j9H4piCSv|G# zbCpy{GoAViGZl;?th?z@+07a#vLzjdIXwgzeQBx0{@-wD&U@9pEfNzqY zKPP>{Rv~$Wk>JfYzMwC)m)CM7hty>BmuPa=Vjp$WlN@)jbuyb$wsZ3%sSWr;Lunn{k>%z&^pXVl9^6o`*oFTO)0We}hiJLZ zCb8Cm7D?HTGP|Y31yjOUK{F0}U=0KirO@E!R#l%&{PAh*Kckjr*GJO7Zk!^8e@3L` z>jLJ9#i^)caL&mva^b|tXZ^|sP>^($(6K(X;pMd*3+r%6!KbRxF#^v6I{Wcz?3pKB z)@kKv23ZV!`k{f;i~QmFwt(fTHsqr664m+Dl!gY>1N;M6UD)bV-7)4hP z+3mTf!T$bI8tiD$bow1_LfG1p%s-%K`*>@M4CsTC$|5HxB{_MH#%Xnk`Nj)L214>z zF5R1d|Ej;#t|ImapdW#j_z7zY8y znmf(3@ux42tUEUeu`%U#e9m-uLEc9%5cCfiN3u&k?;V4OaB z-;n!Q+RL^PN}2l!nLxSk`yn(WviYGfwXMzAtle>*%JrPAvF8yW{~goR@oEft%)*C92AYxY$yM79{+s3R4Olb| zUxi~c^}Hyh^v%mHHXYK>&gm^p6#C?58hrDz{vY0BA$l#YF8oosV4+02{B!?2KvuV{ z3~ND_VNqpkASpkx_>tn&Y%X$n`eOv+iphq-I`t)qIl-YXM|i}T zXxj@*4DEIQBmf6xUX66Dgm}g~ z9nH2(v|gNDpP8<+5C=dqU$EparUm2z4Co`tWr2l-I4FdR;d!|s00)ZNXPyg?=^1pN z4n30sB%MeFL1bRN6IcI?1H3-u_6i2*Vq(6Nc(T}x0JCiBWgHifYJo#WjvS^}zGmXb z)7?8b_-6=UFpnr4n?dwEELXDw%>LtQBJCe>L^jYmQElcNA3wlykMR8~Q*CdLEULLp z#&gh`2w=(6)1h1b6eb0}<-b@_U)pbG$P-UR#4u|r{T&A-?M+1iKT!W2MF%FboCyH$ zwSQ%cZ}6Mx_Pc{JFpD@V0MEbP0m}C@EV$GPlp|eO0J+6)?COokr3W>GjsrIx{YVwy zk5c3PHXuYUxFgq1fe#f40bX)%NA?>=N$*B_ux<9mX`hKN;`WgzD!(IrjMCQk#m%Wu z+{@=BPt@C@D2Datg{FL)VjbSx(6?`516~{FbZwgiM9yrd|KPT8b-6hh)#Uk6wdR=zf7Q8Ialx06;gTZZz?H>dH&FOYH%`(XZV%i*R>$J@5_ zaIM+7#K$=bk$N0k6onm_Z;yzuD~gOA3{}Z((t}d@gqYX_30%4Q~!}0`9TG7 zcMyTV9e+4cXa)<4#lG>o5Yt-A(Mk%1zVGgiu|J4W7MehVAa~#3eXRH)2DRrke?AOX z%gWgBn}?I10azpI#={BwxWYDX;^I||YMiBB8)~dbiR_0YmG`DCp&CBk!Yi+u)hvnq zaA-P@mXea{1;{plf(trO`NHQ)nn?%5iG>kuxvd5ADZ%EVxqyipF?Ko>1*bWs;?0;c zJ`ixs@^OCdm*~VTkrcJBxjp_p33q7t9YsP%4$iXT_m6Dp07=zCfyTpU#gy0QuRd5a zV5D3&FpF5%%2TwYSHSpwgEpSUUeWq=x)2kZaUytN#SW2M2&63&FnuJj@I_cj!lti! z+V5~$$wV>oJ%1z85quDbEin1SGYf6l{#m&QUMstcXKL}gEMvQSd5V~ULk=zU4+#tz zvFDDDzr!vS$v<=Oadi7(lravLfy)g84`OvWm+om9a{AD&k$R*uH?7o+4#lcI7W!!H<%1MbF8F_X75MN{qY3*}(tON8My1J+$K23;y{hEgK z#Q$a5DPjaaI%iA7%iE5`(#pzHAYiZQvU3$C<++ngOhST;iAf*f^**`I4mFAXhS9EXL%?W2PDZwUn7#p? z5&ZPNYXnl!fhnws$t8H+=#;-CuKTrBcSnq=zeP$$rvJ}C20E&QpwP-4c!PknS;|B{ z^}958;n7sq6-u!E&q^m%+^>BG#-7RCZRgQo@DC63tv`mKRxyPP5xJ9vqR7;cIRY+Y z(6BqMk&XCN`^{MwGV#4lfzA4%+o?JHJ3%bjJx78hiU_2&n zSW@e253sv^L?8RRin6Yq-xbChk;)N9DWqe2O2$=pQ-%4ZuG-!*VXgOq&HK{(o6&sq zwW%&3@}IH14jl67LVjs!Jvr8L@4jTjKxxn=4oqrRyZyj?N=(8AoyCftcQu;M!H%Z! z%F2teBe8S5yQY|>!*6jMNxXt?)A&e%%=^O(H`wUkm+9U{aOzi=FC#cb3BDO;CQd^3 zT=@JkyZOGK4AMpfBcVdBWfk^e1BiEaC~xmOH3la-=&qZMIB-qKn7y^jUULvR-wl6B zNJ+Vi&Mkm5dr;L+W99d4bk%DP~UhUzee8MhNC6OmC!e>tX6)Q0}5r)xamsxD}oM+Q? zc-A2$7Q?K}7QYzWh_|S3_)3&ihhQmtyf8#*XHNkJ5UXBoEXlDO2@Z35@>b>m;LJ{+ zRM90{@u-A?&FE`hhY3*%4YgibPh49{5@^&#C~`f95CVo9)D%<{Tei>lzChiE`3w$O z&nYfOE2b^J-B<7lyS_$wT(NydJvi_z|I|iMG`6eQ0YRN354k7ua_Z7HslbDGbYeM2 zP3qpUBj;HSZ9I}vHL&TtBU(s^g9;FRf<+-b;6B?_?C^tU?A>_zw;}h)gn-d!wht6k zi7d+1;WH2%2>qu|vzrN{vVBOT-ws@et1YUZ{M?oKB_CZ#hEX|-9+Wl@i@DEN`JNP7 zEA>5QE(Wt|0_ZrL@L*vfW|stEePwMeYJMIf=vpMe+iq_D^oKxI~a zkm!>K4$9TYDkJc-UDhrYunwf)&qeD-JK*T7pP|rnr>zeK9N*1@W|}#prKU)#zV6e} zs*+9_h>PRt-_0OG81GwxP-YXcv&=iU+SFCC*M@d&vYM2=^a%sReUU(}gu*`p7S`SH zl9>!*j%k7*xh6QM9G0?SqoY&PofAX6(}H#h@s4jJBjpAOd1Vm1C89>f%*|=WcQV9* z@d5#LeIMd-{WgEVBo(i~f0z}?@l?sGx2*_#nZBBl|9HMe1q!rP`LBmQ7|$bS?8GYC zS8Tx~Ke+!b&BR5flt36S?&4Q2JV-LO+KtB7wMoV3>1uRZ%x#$Ie;-T?9;6kEmF^s` z*2lu#ml0UdA(Vj(_aO|-g5CWn{LzyoPQA^FKlhpk))jK=Rw}T^1a1=&lgWS)JVYFU zP&N7mGJa1o`E)AFrD8b!zV&TV_K#MzmnFJBV}lQuMnu6 zF5w}0hJ?X}yX>yGE zG|}~|4kXcs$k6BIiGT)Ov5KkT^6U>OY5DkUp9g>J&nkCS#?>sQqm3tGXk|`(ZOY&O zjR+~5y_Fet`D#krZ3mN^0k@YmL~gE!*j|vn!a-gaXm|I8H)eAxJK|{q?npSTIIIaq zDMm8pp;{fj$$CJs~l9`sxw<{eFF!SX;xTWgl1L4fV;oc87-TO|14hljLYX$h~-nmuPVbKpiQhCejQ!DEcyGV`Tlf9!w1)Q5~#ly_Jktp>_rVsMm7e&PwLezu3 zJw*qopM~hTXXc+N^qI=L`Sa^!%3Y-=3BY1v0)Ik+K~6dtCCDzLavVl^{h`VY7|e;u zpfN;OZipMZYP1c`-!=SVs{7PJ+`~-V3 zILc+};q&wAlxAm@nlrH(P54~}xRlf^c09{UT1f$qRwVSi0Ri%}?`pJMEW&vo*!m zL|2`QN-3{u zMO?$ek+AJ;Om_xBsB085-^gxw@}^8L z@(hqh&DDy2AD+FymUcpBQFs1h^zY#dJosk;1*k!DD3k~l3VYSuQopFiq4C#pS%n&S zXpdEbA5wf`QsbgxViXDaE;-jceG%8(4%G;SJfxT!Y`;FbUNdHKjch zrE;q4KO}N- zX(#dX8;t8)9Nd`2WI+fF?CipqXkWkAbCjNs7rxEM3fE#gH;Q^F+|j-YRN_XCA{6?x zCH+a&_>>DR6#J2Ge0;>Hfx(+5vh0>aAcHu(Vtm2jKHDb;cV2zr3F7Dbo-J|Ns?SUK zgnSOK47|W$mnRJ_SLDD<72lfs4Km>vFVfGl-MWXe!glH&s9i#S|F~(g#pCp%UnL(t zcGU?Ds9Joy0mP!-p%GK!=&KM&uDuant|D4EFGspR?LGdw@xgfr_yXac`(S1o9t5Q+ zj9%BbY7TFp3QIBMkYS37g-W-PP>#UNc80*Wf)eCj0vQ;xUaH`bFZ+DM_pNb#pXhZE~X8Ij_l*$KJeN|CnSR|~g()%ubki+)&*K&t+`aAYhS&q-P-q8G6dw;AD@R3RV^TUkqe!VbJH)IX246v`~}+;y9}HMT%G$jLWa8)#U|i<(~Z@u`s|WzPr6Y zY%|{z-vd-vyn*W0f6h`|!h?#+R6)W+n@?L?TTw^n_hDVT$q+&W2{)SO&2e}6@62pK zss%VT5|Wd{Zt4P<)OTID?6R8k>`4(&*>sneK7{lm49>UWfa;^hxj4ACv&*kVKIGpw zkCZUwzV2V@uEFB4O_P^j0GiI%ZEbNz1WXKRzk1>v99UIZ zKeX_?ACAk4?!F@yw#PiR_0$Q5J4wCGntZ{0voT3cBgZ7P+-Zew&jf+!4-FUIjB7gh zLE^1FC@+6yCn-$isf2^DBLt6!t_}geO}#Y^PP!~^@BQh91<> zyF<0Bzk$;V*G+!y*Gm0GnGLrizm(~>mn|%`YWU+Q5<{a9o25X^52OiMV8B6Z_yMEKf?{2X2r?cq!N; z%vR*706*ZY=N6jbZNrA49=j!n{3xE0NRC9fObB8{g2qN2B9@HB={sPkXQ*_(hk&>z zZOPI8Xc3JNE38~?IfKmtUugZRSXbcptkXj~vHL5JbFeKrzbD(8+XWV(u>Gf6J;4FX z>gsAhuaFE#??#S?hY^6HH3)+`dZ=n;EkkSiuXek)*yOXt&vwvnkGgzgC4Q@hC8J%J z`HJl3glwNa&aP~qCz29{^M2_Y-$Oy%hB*s478vT#dMc?Qc%)SuxUcnmwROpVbAm#( zejn`w!hQqN(@3^iTNQSHq^Y;qv>WdxVp9vckVCwgvwRZ!KF1onyRKw@hD8};_LQY( z!4!R}K8rTOH4SA`4f2IRJLhQL?mjyyv{s(kD#9yNy}#%(Y2*W($Gp9}4SO$f?S)?# zuD3*7H~XO;RGl%~HDYXL3Q2|6x=Z7>2q5p)8}#kyt$B{Ku7w2o`oSYUzk$Mrz@Px9 z$)p9@zYCYzb-qRzalfxgky;v2=i=B?_w0m8|u^i6A&W4M7Nl>Np&DOe!~ zI*Pd`3Edq$k=SY|-1@Y}n+Cpbk?S@jeZlMiegj$(h7h~EYkYy++GCs@+j^4#a!tTg z|AL~DyB0s)W;&^`FU1l|1x(Y>VHHxoZXZtB~E$R$z34%_tpxPIILfiPk-lKgHP z#Q^mWY}To&Meq<8P%9}^xDYNq)E52hMnu=^7H$}UU(g$;`L2UeU^q3te7rh-&#RjJ zvAf<$dNWgQ=*uG(?ISyG7S_#u+dD{5YCJ2MMX~usZbDjT=RUxSI;O=;K2mq^LY>Q( zs0DRxLB68b!-F~ocPq4E9HriBr>?}{3gUp3*ai8a-hQWZylXi{gN%ToCqLj;^bo;6f8vYK1;`t+$}tc2#7v>RNP3#LI4PB9%q)hR1{)u|^?+Li ztD4O4&xwx>xf4`P?oqnEsU>^#9Fy$sa0367*)(Xx3bE2r%y)4M^wmZ#;g`PEh|*wo zbA4oe7vu|^mF*A5B&33ZP(b*D;~hYeonWp308h?R?v2$Z_C-jSCxE+DYIfT(o_=qQ z;udPf1B2lg@9wC1l09M3(9nPiD1O?-^C?op?s|2f*q1vDX5h=0a8Te7OBZLInkLKh z$6xwQ#MBBzJ=dt~@%qoO82_Zu`(Rt#x*9(eceF&}XKkItw5}dYX;F!Lx|B~DRl6U$ zHq9m7`&hSkd~<+IfWt<1VuFhJ0qiN|rU3kU>T*mf3&{|?W1^rR6#Vlw;a%pi`i97a zT=?D!Cl^O0kb9ZYAUq3y{bo#i=5RXr!A9jM`(9l1zbc0bN!$=4J<0 z{-xcdp69HbAo<1t{fajY1)%Ce{{#>pU~nU-XA0=}i0m`k?Z@qCHhpF!UOK}?-@>rp z5<>x0r)|*qE$i@2B8fuOa*htv4l9qc)c}hkHtiK{AUPQbXZv?iRIfJhbpBc2A8b{8 zWyr_!k`Ako0{*kuuVH0OUoIkwS?rR1dJx7wnG)}rDIqjKx&V3I3z`@=9Kfkp{j z;Y?6KNJl?)rVmBirP7}Mxp|Z8I`PqR4aQZQn;3=Bx?g}nSgk@Z{3x+7noA2&%Z*nQ zXuz=-(sgst_n%zcb#sa9Ki@!r_*r$jXFbx%y40dJ(+Kvho{DyE(%J{nivJYOED;)e z)ob3o`=X++If%2=o3gWS$P^He6G`91y=HsrF+J=AoCMC!Gc$TFn1$%4+1)nt&8nUc z+os9ihg}1O5v#uY70mXZ48PwtqK=Lj*IswL`W`#c)klsK&#za6-Ea~Uv6P_~3Ec18 z0W?C#4V?enJyJ2*l+m?o;N{ zVxy+))6;A$E}yXbBk&f|w?OFGGy4h33tzUy^2LQm19sO;ld6u(1)*a<>510zlOk#dtsjR8;xT!;HlKYlHSb`%!(C!>yvZ50p7V z2`LP1z%}g*9g!$g>oRivU}d;T^3R>?VTo9He~})CT4!Ks|EB4k;!Ye{P5Ju{J#x5( z00`R?DaR}sjE}+bN+eL?qWn|h0=uZ7Kq7zEIf_X5yKo1@!L_Mj-JJ+H8v!@M9zarL za&`4rLTgl)Va%rA?wDREWq7g?&{r9hBB4Bwlz|7H*KEI$oo{1nn2Niz`~7i)prE;|73Xx!PsRXCo1SL zu{G+H>}UWMv$nU5_m6nanld-!`Z=}eb{NP4GEg*Fz{d{0Z|0*60c#X7iHe9$ElU=l-pD z=_EgXBwk+WL%l}^gnkeS=7odlHJFS5Jnxe4nxBcD0O&r8h`C{Y<@)&*G~C-H^uHCt z76~`IXBK8qMCb@sb=tM{DD9ezG&JaFXvPU;o!#?F(FduH!}BkFtINl)TQAj6+0=Uq z3TU~=vCy|8VwI#KOBg(jeH~lpQ2(BRb9DE$$ZXZvpbkL(Ijf*~*vflJ8lz_0%G9&~ zD3Ua9f#}9AX0*BfTQr>pNE?KIL-{j25~wPlgiD@B@XY?<&dI*LfA!H#hDKy-ltw@` zI1U5KRs~2tcvi#CCgRNd)1JBDkFU6J8DYJyJ5V}zHm==^wobc+&kfNqi0C0v=2iJ3 z*49|V!y|ncz5E#HfI63y4B_kWzVA6(Z&cOqO&#V|{vY$0yxW&NM(*}TN*T9388g6D z7XPd@I?y*>boAE3lJF#&3#9AKkDw#2UyVkUtAp?i(bW#Z(Y#%-g={?tcjZZ|d|B7k zt$vHmC86ewCG`6aO+Hfy=%2I?$p4yW5r8btNJ&fgca5Vx_ut~~C@+w@= zI?z4@QnuHB<8X>kPj^QPASv`WErWyBy5%m)8<+ktUCT48UnXg65gvyaNEy-{XDG25 z>wvk77+1i219Sc@AESk27?=X?sNo9faYL{l_S-u+JM)`OL_` zK(QYhN=m`Gydm}n0H;dtRG&c{<2!wF#Yc&L8n0TAH$YS{SYodQR%qDS-SJXlF+3t7 z6)|bOR0=DZ3{qgvBBqd^XQ!KaEMCI2*r!+vpr+3x@XsP+x9*k#rjkGHva_Lr7;Qd% z;vyT|lPG1mkiNYUKzzO*gL2$8?hWMt=6iDT^YNyXrc&6=gi1>3!Mh6vv4|1(_%p4( z;{v~Q9^NwPHGjKlxH&nFcKdwMz~cCakdUvIM5A0E{b>Mpc5WeC{!)-I3R36jB)#+2u8`g* z25@7`J4?TjNB@IjvYNCn%+Z1N8Q#C|;K3?s{7Vw3oKf)*j`~x_%)W&C(nb`4OFaza z7W?;4ipLkfb6zW9pyCn5wdCj#gPEI05ilVF;xx12e{TxaXkbrxP47~X*;^u&RB&YN zOKch_zhxj7$=jAyu%(CP^J2 zVQ;p%dG#}IAi}Z9Z0DPqghWAFo`>(s3XlDHit_zN?yLN<+N?8utAuNo}K{l z52J{TO!F$0!p^RPg7nWK02Np;!(LL@HVQo<_lGE~I0^r*J5!*rG&`nv1pQM36F*Dc z4iG;-5|=2n>7RmcT~Yh311{0tHbnTV3-WXqbalf}FE7(8t~f~#;SoCVm9bqrdh2F_ ziM7k{bNhQg&)rMQL^0ck-_PdGOu<_#7RBxIo6}u=%D^`uIX37RA((W`+q^7%33N|% z_zDG8tu4n2tWY6;lH2mJfSdQubUKd~7%rrRJl>BK6=w2^9(^t?rV&u~3xro%d}uK? zrvsgliemX9! zq^O8j&ZUP@Rp7^5rQCu7SU?GJ+A3?wBzVk5RN8_S0pZ+4M`Fj~wU&{R9};O=T-tX- zdjlRqc`-4uK?vMc$N70VkS3}Ts?eYJ#3rTsO)7>HZWmWnU_4Z0m$$3makU5i6M){p z9UN8z1!7h+P_sYJSo>C=O}sR8ZMr+I!qguP#Te=nIW{`HFb`bu;h(3sSlH=sHBzm! zoc(jrkyxOcCf5r35Bn0`P6Rw34^c6d**PiYma+bpJ7@?YpJlfzvG#w@;44);{Fy8}L6a>LbkHE$alB^?DT z<6$;1H~CT~>eN0wN`F)5S#JqJcdtphTX-KFjinuc;!bV4Y4A-nJtYV+={>(Ey2~*H z2@v$d)usc=d!VuJU(@s^-?3oAkA)D!Wyzeu9n}lvW)hrGYa+g-tW<|GfnR*D&#-X63 zA{Txx!Q7cL*?e-uHU#J7FJA1(d)+Ug}B z`#%&ekWx@!YERXvq)Zm6#~>oKt3FQRQ6ttC_Ovi06`YI*P9eq$Y&GQ{)|r(GUl1kP7K1~4 zhJ_AWC-yv|?d|Ia)ui*s{Pe^|LsMF0IN!UPsOX>&Jg20j%#~i$uDva4Yby(B=x|;0 zhN4!uIzCpY;5O7%uYVj3LToB3F6;KcLw)JXZaEHAKK0ye^ZxH>1AJWmK*xNSK^_5i zYOoS5z4!`l8Sq@8Nb(PtM1f?cHaysK?R7kSxWJ3i3hy`9^DK&QD})7s-$A(%yQR(` z$I-tJtHx>sPJDnJ@;{JZxi7wVAS+~7`W2avS05F3ymg;WF=2T`dh@jK}X+} zwwWG{Ln}aUQB5`a9cgm%%4k8vYBtPj5IN-cYmGug`~dYOPqv$|neahf~VPuq}p=(zgEduZlsV^L`+4bcpr@m)g?Tc4oNLXp4 zLb5q+jJD5sILv>@7#X2*B_d-3IPuG1`P%e-)XUU|K037Vtnbj{fajAI&gLM3mD)Xh zO@%U5dQPew1@h@(KwxXVn=f^{Y2&ts2n3PM51sy}l5~Z;`sxjL<)=0*K$$MdxHeO& zGepr6y-xyoI?ymN`S*S9o2I0%blA}rI3F|)jSe3Oi|74pK&q@V1K#VWuKc?1pefFK z|3r&qXK9JgWY8Yk`dI4y9rCuUQlq**s;30Ys>)-IkHOO%3uu!@%-3UMV*UZ=eEPR` zgnt!$9Kk3zDp0=<&ue5{vt+mVxjb#O$>A!FBlzSeFC4%>R+3qlH6>@)5AScr=Mjx# z5z-sF<}Jo$x1ghnJmQUQz6oGlY;onUKT%=(TVTRVsahryDpGMDa*O0Jrb;MUxpQRV zz28cm#^=~deM87at-GZ?eQ>f$s1$yvVmm8c;fpkbV{${;OKr3Ug_utr2^{w$l`Y;= z)5{EBdmLXTrJ3}bzkMHe;NaqXf77i+`=`a@xS9b69i7a;HdsI)F$oJa84CpImeZgQWTrU&(`!1K|f;-pzb1cRQ&wQ z$DwZu6s$5kwZZ#Ql3n$>u8xz(hYU`mTsQVYGq7z2KzpLM4xv0drh?!9MT3SeQi700 zb3V5Gj{5B>gflRR_5inDiu0dZc&(2*+lSUmS=s`gNtdT^8%eO>Q$`fb1x{{JyvJD-MZm z2wFeSX7cyVc_3D;^%_GeEP>QyRDlA;wYl|`=z?QBY6dx zmvLeFfM7hz(u0d~<#{t1i1^;^-} z!Y@=OT=OCW^gT+sFp2A)T)Rz>K;D>q=bgg2qSw)QhMZ+20$MRPgp1ZYNw>es?!9Zowr-~B9dg+U$rdnD^Hm@>iWz?(0&6p5_ zhf61Igc-FmjZcBkOLMb2q| z>0OXyAGRFge|{ur9$@S?jm_3i5a<^sEtUn?RNC4)yt5TA{nG0~y~Ruyl#lxhih9F} z9X2$!5<+Ipx#DVQh8N5tru%7Yu8oa)Ph;{!k& zhWLflMxbd|WN_N^Psiy{59kkqY>|E{b^wI!E`V2Kz-kevVLai1NtQ$41>*aTz~7!d2cs2oJ=o+KiFq$!XCJD~L7H7)fPN3$GJNXr8Qu z-r)_`Be65on^kn(_?|%tP)%H6wqJlUjjwoel?MYzjd|NIRCWZ&1h307H;3FWD#cn2 zo;$Oe*i2sHPXURO=cD5l$A`R+Ip?9}xTfzFaaH%zCulu3x7gx{pHIjD1)JQ|_h5If zxF15m1VX}Rp>s?7KSx7C;=TdW-rkJus}D_>2q(WABt6bj_`blZm+4ZAdR)QQ^--iy zWiR}uDwCpkKU-uAFg|?I=W^#|7oZEXV~UuXgrLYQ1g#m?Gkr3|1g9O9g_wEoTyvTU zO|2QO@P-T*io71UOMAgX2)RBbx9<6a%KgvzRd4u&6Jytd?x>_pL=mF^sjF|Z2^_s* z10PH0fT`pVXn*4Y6Qll1trQOX@4A)ZPd&Cd+1bSTUntG3tvCOhHTU7R7i3^w1OvF+ zLx{j&>DxksCsY8BS6yOvx?NK6q1E6NeBP>&kyijFp8@G;$QyuAcXl$*D0A{2qX~N5 z-IDH8Q0Aua8h^RlbGZ2ans3+2Kq6Z&Bk^;@p#B4nW?aF?o&Dd7RnM5(a=@7I6}m~t zirrn!ItXf?g9Y)>w-wexBCGD`ZM%>g7Vr4W71Q?!UiEzRU~X$ zsHgcnbmltK{a|4PJ`Hy24y57z@)x2ouX zE>jOs`eB^^>g+K{m-c$)b`TUA5z()sqfdaj@}nC`MQ3%iwEYP`RgwGWSO7X%DwuJ0 zp)NQmyS-fgh3r%^5;Lh-TVMOqohbmCiVCVgs+fOy%N-%0R3=}!6*5${s5BiYjCn`( zeJYIw(I*xy9K6YwP|K$@^GjSg4V#63-N-R`@^5XJ~<9l$>Z5X`eYND_0~tLHjj5CmUjFEJc@eN z109|If)IBS;4-8D3p?!wSVO+Xi*5i&VA;F=c$1miSoG@o(D(5rkIOp3;@;FM+lT)OBUHNHuEa}P#8Q>5v+g{yVoASYKTr+VQkQczLR(uY-u zrYGIrihcUYr;;QqE0-J}$8B7t$x7;pZaSV8`KxM)<;UoG4N?yh(dY_LMIsS9nfXKFPD#g_p?>n@&x#SNs2ydpiR1fn!=KR+lGu-w^kK*bCd1s{A@9M zrOk1VW}O2gaDr2fAaPkvErbDjpO5b0y{=24fQ+p1$tuj-n&51y#c>LTzftR@7!M*N zjD@c_zumY%rfU#r*NFl(k$OKjslBIl-w>bNb(&K5&cmh zr*7lHw^0VT`7p#Atu=u3$SeO%L$SNoIeT@{{19eQ=ZKlycM8d z_XrrLq_aqT>$ux_{wB}uRA$CmcFnMNpHPTPz~9bo;g0s_veJwH!B>xA6aVXAI$1-axqC$0 zVZg(OxtZ_FR6WCg#e5*~yp*2UNZ@Q8sKO0y8lcJcq9r_xY_#~%_Mxv?*!0`rWGi1J z=0R-M?6KWP*=*laOz`L=UY#Akr+zMy=;qzmdY3c-XTpIf60~{?j0o6(PabVb*H*JR zqmfR0gwyOBv1y_-%r}~n+g)ed#l}27k43tqa>Sy5g{r_B$9eg8$HnbKCGZcxDo^Xr z?0!68nxfj!5MXM5!fr-$8tAqj(fTNGwERR%M>ouBddUN@4F{!#5JYvgJNGWf< z-FH8f7(~>Nao{_0;RnzZsMe?PLpM;QhN7Y!7A@`mTh7ph1M4piG4J1wPcYcm z3;iJZefGNW)2VM?<1g^7-N1tzB*-j5@P-&Dq75F8Y!wX*LedSqa<Q+d7C!k^u%ns4+`^f33&TA;ekdlC=z^h7#U@?2p+}O z-9xxq@q+Gfw@$9E3HJ-UZ(|*;?3Ua4BY1a{f<5iLMz9YO05(~%VuzO2((QiqGdd@b zI(wefVxEts@xmQjm)$1&K5$xEmG0c+vmB6;V)MQ}apK8a5Jqsl2{C&na(8#nd79e@ z8(Z)GrR8XUFl(>6ph`~eohS7JXj}fgeGFG<`@blA%c!c~uiaM#X{1v^I;1;Ax}=fr z?ve&Uq&o$qJ5*9SrBk}4L%O@xIye4e|DUs;XYX^y@RbY(i^aX}@0|1cT-PKF@(&0A z^gT4&696>+$cRBias$D{jJXe%^46sus_AFXyZFe&5h z#k?2LAq)j<#gdC6Aj%ra!M3C}pSq_FVrxz|H8P8ZjZG#IeMfgce$L28ceFx4wyswzM2w-I83!x50;ozb@Q1#Cqc3 zlZ;P|6%3~jhL5{beMO8!$RQVHP`v(^GRLI8S0yN@nJIB$nUM7)3;V+#t@jtutkRY# zYhscev5JQQ7zw%w7=UHt2JK#P5+Ep4XMG6pC7z~XqN~~oA*irWh1G^>)_s_pqo`it znXnoRo~UYIiYU6*5h2FsjHoU7t@$3*Gvu{`ZE=H@s#Vj!939PD9_kc;x={Z!a{}3e z|7GU%lzcD`yBVp@uk=9Q7V%sq3W0W%$f-+66rEM5rT}+ps&*D9 zn!JW@?Lg$0FKxE)dy#MZ8-E05q&~b0K{-dmG{SSeYgnNW0OUrJQQc5%dBtuW_D{<4 zxLZKE3&0eClBQ64&GM+XJr9FAtNu}=o-alj$Rl1+4BdJiTxhITseETP^eJboSp3Zm z%9%4P83lj$rPWa?uZk87jZsWJk8U4XM6Ztl2SYzDKyH)1sxz8`#w7d5&acyo&MsIn4m@*4UFp`_f0s5W3gS zkS37XIcf$Ss)JspNZ((=b1Ob67Hfo{s(;8&beLQ>MxH2`X6m1(jA7ojJhX8(>XMxP zhQ@}q_G=e=svFDi=4WI3~d;NaYH7Sdlr^I^z%!uYH&Rp`|f<=yQev|j(K zJrOUq81UlBltzE9BM{O}{um`|zkE&nROUA-?JaRq?ec7N>G_U}{oqcf zv3StqNDqDE!%$`CA7NFRr!0(%&Z5c7zdY_#E_llf!T424RsDlZCHLV7;7F)g5Bq*5 zthfaVO$G+da10=q#tY!*>%fs_#wl8*BjLHFwKjFdeIR%UxYaN zY=X^{f#dL}VlS1x=8Z*c-XzObRh8y{zE`r+x7okb(6q+@E( z1F;xBknj)R3*2phG6(Lqi@%i5z0m##F@%W(Dzf%-gn+YdIJ+Hf&G=YclilYKC=ST& zC07Fx8hL@%ij%)Z-uXXMawMGj=4z4R?A};T2=&MigrLIuw}r4+CzrPxv6^4#&^@{{ znwIx&*~KDY%nuIgrDJvWE-!6pQ=D|0}VT1QYV%R)2R*C3dwlzbE`6yM3&n zg9W-nX!cEB%<9UX1CvXnNq)jkthDVO@j73kaVUKC{|deuh{( zZnUSJYfFix<9%|TzFG3}5iE58qV^kZy;AKvXB;QF)*h6#>JGfj=!(+YoE@vUxlN4; z4WGHWQ(Hf$Z(%na>+f-d_DbOk)O@zc=T~$yWv=mdCL-DVR*MCBjQBmX{e~d)jjD?lGPIv%_4 z4>L<27M*3??b~O>lE<{(?Lw!FKWzI#?uE$M$dP>Smwbcdo|E>B!6&T=L5`jSjC)S7 zyvlRZ^PBc53uN~idKP(9R{vR0$L79OznBSCx4qR1o$cc>iE0udm5G>dpzrT3gEw@I=H1&FI4uv{L9nrU%vy z`7o6rgwT;mR40C=w1 zhdIDEaT!A@^fP>VkXZ~2e3AwCuGXod;&uKXRnKysOJhD$uAR%UDO#pPT=C8#KtZoy z{R$W9Que)8B$JdXm?xa$*cEAMt+ZH@`6*xw$F?*3e6@jPI-93l(N{5E)4eQA_8T8v z9BOZUZPaliC-qj13l$c#29Oa7vnMof9_GN0!E(iKs<6H=9oQs@lDeqg zFs6Sls&thU6B81NX5U>jSaEd~zyB}}Us0syfDD&GP}QvQAqR)hO0Oqz)QP_L(XD>+ zWgSb91?FLE{G#NPGg}WyhxN!^McY7)j*cC zRg;vjXb#r6M*D!dUyGN}1SQVa#`gu;c0eS7g?)MMjWSG*q$U901MZb%CRwjqPV-CSX%18phE@B?Eai_SpfM~!1Q!3q#w zXl)N5;l+-?@cJAf{IKe;+y=mH>3(?62x=PAxw+ElS}=Wy80b$lu30B5FQ`F6{w)+T z`Eq&<^s+OM6|b?~C4=@q?jkziiE8-aiEsE~h1BD&5gxgwLsXg7vlA)V|H;5^X2)z) z+|F8^PNfQ<5Tt&{4G%-2ezC!b=8uw7dOX3z^U>}@WSHQ>Q}yS`-@zqrerJF8=8t-8 zS)nDKS>p#DWW3;rYV->@@)fbT!yqAGI~c(ZHzTL0=P5bBHrJO`{}}ud)Dzmboyd84#a$f`QoN%=L;Zi6 ziOShV?k%4`{mbD8sLfTue7%czC()EUO1Uj`(eVmcs3;cBt0bSG#T$YPpDeVU)5Ul{ z>D({;+O*H?yq9vB+kGVuiL*FoF5$?-fi~|Saw;|NRy%}C2Fv8*?l;#3p>Tg9kiwiU zW|I1<6WzQ=8YO*4ol*GmABm^q@eVZpD%ys^Hb#EPrO2B%x=6W?RnQUF!>+n~7ok=` zfzaLAxr&1M@C%W;W`hY8ph9WJL6)JCzJ4^jxqRD-v%zRP)y&<@IOy>h=;>cNoSY#B zXm)g&s~KEZPHNQt#-Q697-$W$UUerX7Njt>2qeznbR|+N(FmXXI%PO3`jy48-K46f zIxC9>&Gz!gf5=a!GvGXY?(L-&xLKMu#+^d&pP{5Zx__&GrYeLsL|1S>0GJ(X}EI%oNo!a7uS_wRooT$ow zM#(=r=ck1YBw+QC3qrZpb|`<-*DOCap+T^Ec{cU3ycXq~eD6R4%K3o*fDam?SY%t< zJ?L!-?b` z|KcZvkM<=@hci|$AahjaFhYB4U|WLgHK2Wb`0C`UmS+rWhkNvkglOt>+j9wx)d>q! z```-cXgB1q$IzKI{ZJLperoU z7J#j^4jsRxR{r(k&g>hFqhF;Eb~3h7onyDDT2)`ALq2r*-17!PZx(3s-MJS`_3K&YUy@;Hl`Gr+6+;#ONv!q_-MqL2__f!Of~ppR!&D1)Ex zA0dGW*tR)ozjWX4L^Fa?xA_?xO6sWB8X7z#I(O(Ghw{~Y1!|Z0QwNWV?|{!ldTCWV z3&LW0Kv;~6=wD$m*eKMDMiP=zWsf7NfUw~rV&pHbTc_kw{7nAWkDY-3a2}5FKd!F0 zZ?j+}_b?+$8PFcpvzLsS{4nCqUb@eiI2`9^gfdBCYey~<^)@FzMO5&{OT^7(;IHeoIC}M2qS^a^;RWipEt9q|rv#nhYA0;6~CW;+fb=73U>b{p_M2K#(o`I+$gb zm>A@-s+K?H&Hirb&{Baa`K9?KTb5^lh1!3yCCV4VbWw@(xZJ=F>GMQm(Y$lm8kuH` z?tVk7+vqBOP*z%+ux7+jBm3mXxVe1Ta9YO-1-#?kl~VmoBlD=od(mbFSvu}ZlxIY9 z(XvTH12JuvPyU($5#r?tr@yu{560q9dtLLp8NU{lEr4t+4zYO%fJAUiwJ>tF8%4H+ z^Sy2lE)<35(f3!dpRV4cPH6-b$SFHLLH;z}Ly~9T7Ww6SCfy+no~PZI(H`W&@^iMV zq9O~p7n-M%BE!!^7XONqVc8S8zmNdBI=DQ#;YnMVQAtU7E^&xy`RcK>mpn1bnwiG5 zwaa%++7^N_%<#K66&tV4{hGnyr$rGOWo5;0z>>2;cR8<~skzrB;;B+Vx}i;D4wUl+ zL?IYlY-e17eFud-#s=0eE4$5dzcr~=wH}gf&SaBh`x6A!yTz=~Gd8wRGnnd6p;oh~ z;uo}zY~)ORS#hXaXK%A~3$sQYA*_-Kg@{U2N)UyMNb;u(DE;`y4{#)wk6Tl(^)y`x zCLQJcB?>dC{6w6tWU+Oc0^D8xihpT*r3aV6RxMZrB_Sv0(%PlMp{A{=Nw!&Yvuj%O zubyYv24KrSb`MzeznK0O49uPTmD!~gq0D;DeW<7XrGb*6(ks2&Q(5@ zM#(S=o{w}$UZ9OqeS%tr)1tBf-ouJOe8z?!^`r` zMYX=`r$O7?s|`7bC`r0HLaRdNg|a%L^jin7uOzo3v^*I9SpWVoEy*iLv#J4Vg+W|W zGg53SX4kOM!9q9|yh-M=R7>>}Cxe5Jz)-6Pq)=5Huk@53$NroFJ59f7v=A245o8>7 zH^;=qg*}u`V9u~oA*T?cu8`ugbPs|LSO&GRRFPdTKSX9`Zy!xb8#e`I@WyN8yFfJwiBV9K{<>?~3&Q`c_~wWn4EL-f(D} z-42rYnwZ$++_^~swrQA18TzFeBUGi^ZxQ3|u)H;o`o;uaPbuZJoqfY+hx zJ-gYOU;QZtq19WN3rZ#~l3X@PQO3R9urMd=BQEfoRHIDPV?*^)6d{h zAnt)n4ZneP#bYhQ0qeuTTuDrvF>i;*_lppoikClS z#lrsMz~bZD@s(fe4nHidaqeXu4}@i&#RA!g{~>q8CJg))QG|1HjTLn?ifk4HrThBW z?hYJ23cLx_Vz98oBwlU7elr*oyfbRT)n@iRH(pc}4QZtqH$p4z39#~>uup;qOAcF! z;@8Q#rb2|DI3)m!Wg(0_`EF|~u(fiyRi%aR?{E72R<5YpsxvaWv#PMtVJBVI!I=kx z$WZKNUM$V?7PEhNl9uW=m>nsqYNmD6+mS}4&JI96Fbdv%X zJipD>-kz!xfp5SL8!fF=6ugVGGi|+}Wcg%BrzLAUm&|Lmc$jz5RYtzT?2wW=DY_M1 zHL<|8LIT!uEuC_=qLm`lAg-y%h9a&2PW%eSRye3nwiw+$aS_P^Q&vT`USGW+D{F;? zrL|uNtFz$pQ)F$=u};gJ^a_g0JI$(^nim2Bm_m1L3a?k66L7`Oly&jq(RV)1VHS{zVt-fPar`SLXYg7pezB&FZ*5s@)3qk(( zupUvaHMQ;g_?`7Uwa+>6T2#57;C+z||Kk-|RWJ)p6v86G^-I~I2ab9gxxu)y&!0b^ zoXh`lF%6>sfn5IvsL>EAj}A*&gLd*Cea)SHrf$A;DJGWk^mDk%YdsixPYe@r6OHWA zFAgsDh$;PAKyqpWU8Ug*K=I4_clM1Y*EwR#H+d*|Mxr3GOC9SS1WzkE85s=I=$1eG zj4b)W(1_z%t8-AG@2iiX6a618$--evz>>@m;JYF$m5fcL%+dWTO~bns5Yb$#++AHV zh=-MId_+QW(ZWSth?2p$Lvsd2M!H3jR+3`=4>))T-AusK(@h$S#><_=4IPk)|nz13Q ziw{_~zgDiWlvD_II4cBIko#fOgCf2{8hm5=N3|rNPR{ku=N1wzm+#vqu9jeoKsR(h zGxe^Ucfuo}M;RwIDbi1u(BWI1%T^qH(h#I0&s`PdmrCg@03O1@ku;P4TzmvAMFG4U zm}arfv(JJTsPJw>b%y0h9qxNG6kDCS3^)2lT4AFxQHX#H+81BA;s=kLRZ|Mrqp*Hf zU$oO6s6)hag!5-=xp(Vyh*z%FUJjDwJG;9F0uyVOT@khCpMLL)I688`#{CThBW=f( z)oY({h{atYNF3lpil%!CSBYEC1oOm0p+C=r#YgtfL>vX72>1Fkfri6-ww4c|00(!H z(#+a}@rdSOCaWk|3789XPPR8;P^(g|toq8sPpL||?W|71m^MGtiv;w)uIteVP+=4L z!#xG|vKDuPuW%p>WSIp$+7~;8+e1x2f>FME+l^^JAh+wo`e#R49*r<-ebhk;SdTKI zS}vG2Lt&otA#w1-C_o2d@X+SK0M>&CXh;vSDXK{igH&*h%wm0dI8Q_ZVD3T6PE9n` ziT-BFy1YRoyliN4h5HrKc_TeNkx*ZV-|Xt6-ZlMnf+2kPFS&E*^bwUfK@^G`$nNzN(E)7bE~NZ%9#@Jk)cg8OG+Ct5^TH zEK0w9`%yoy0z4u3U%r0zd~X)p!+gRbc8MVjEwpXOJeU$iB@Um{Z2bA`ftE*o;AUt_ z5&yi0?|05UqkDu%YR|p?7%JGyEd&QgM03?-qkT~YAZwS3t^Gz7y%ho%SKL=4ca^nO zPyTgD)|=O*Gg&xWS)3746}nmY@kH*sdYCnSwo4V06BR)jN9%l=KWSx$#JZ0=O$i$3 z`}3LSCX~AoT0Hhxl@FO@fBsRAko^j=anojJOmy}XfYiUY{Q7lWh!8JJJ(33x6~j3_ zt{XEMBJ9F#cBTPJ5D6q(;K~}c75DXQehePx#(T*Ou<-B$0;xZPBurT|+6)rHMeTmC zcIj-d9Oe=qu3a7HIvw}!e0t7eyMy9L6tDb+Mm3U>BWrCTxhh5b6fUibyqHs2YG&6* z9uhC6>sgA(tb>;pB!OwGRCb+qM{g;v9PX<2QrZ5N_gW#RA&)DOoa4MEGrpB-n0@>u zm`=gS`#tk|FqabKl=lGl7Tn0pS|(KVyq5&pB&s1pOFqV6L2D!Ov${n+q$@I9Y;Inx zYSJa^=A`81P6;ps!PmBTY!nTU&d9Jo94?g3wamqDJ-yeV;A-+ezM{WE&3tp1`((hC>B4${@X;KtIdZ*AJBo5IZ_*L zYG+Scv#_%vMDvSO&=ev;wN{&1yPj(jB=gZ}#*!mrbL0>;Jh0&|k0v>m;Pd}K!+&3H)+8Zy z2ki8Onqk`=e1+$RC!kPJehGOic-}vMQ*TH;>?iuH^WbRO>s6c7E|x`@kVT2R+GfD`wjXuYDU645JBk$yb2|6=->BT>On!zSq7nlWWZC>>)2tz(krHSQ>6v7&FlETQaBy>ba~vb zti4aRCC6m-J+*?){@VZmwBIr4bR1Q`_Gg+m-w_FAP+WXhXa2kXKX!AkN&It9m?bY` zKKQgbg6z>D#c!3bBs1fEmvtSRkm#Q5MFQZ-v9$JRRTdhUr0?+z$X+O~psQHG!T8X>{dEH(KKiHUV1E;{ zIO)7&k8&=%I3ZQEv19xwIZ71fhNJBV%e{yOu~su~>2uNhxE`TG;5ATu&I zH&!SW^UviC^#_W_pM%r!v~cS#wnu1TzwyIZe5TCgrO|s+?_|+?w)?`R`u+U`2qI{y3y?Ok-xf1}9t!Cl+!=_u#MKoMitdcTg>C2(N=mU2 z)X(I1sL)_YX1)I_R8jB-yFmW)v-rNez2+eh2~+O+vjfD5U@dG{L@GQa^tJEJq5-F~ z>VM`Dm|Oo1p^kKmg|rtfJ(;@W4?i|4G~ zKh9usSrORH=c7TZOh(0vL$Li>>J8E8*Go_kXshXkt;XdXn6PXCPu}CNlk`3B{(U?2 z%KwDFfS{h_WvBY=F;9`aMcisayOXQwo?b!Xo{|%UayaDbD|vwr`>i9@Ig{bLI|YiZ zZU74qQL_FhKo(6zw3GDr-zeEIqT_c!?gH78?BfjYP5xi?FO!+mgBN2tX#^p3PhOiX z!@{S=B*%mRWMOSN`+bpp(Y{KP$KI9F-Sv6WaeMf1@7V=^SuTiNHr+7gaXUasNlpC} zH_eKV!X|J85^F0=j@bq?Yw-!TbH!Se=-U3iNg{+g{{GM77S&+!7t6pot7M~NJ4!Q3K>d-4fzFajV zmA=NOf6f^=ZG#T^-#h5y$nUUz<1&uzs&M{(nx{$o><|N@RHqN}6yn(3b7B2NB_*F< z>_fpv`8{&JX}(j1sV@OPHL|58h2!;!xK6Vx0he}gG7r74f4zF{o<&HAP2LqM5%mjT@(il zX6)EGFczk$Hz-N<{{7~mu+KZVG%TwMWDmS<9TPigE|EumD(L4U*e(=p+I=ybJ~A~0 zk^-2s^Yb+!rT1MR^K*iZy_26CsOwQdL2$!SsmUCcgvxQ_jz>p6O&2$^Aa_m(WF(J9OIpB=peWB(K=1hsJ+afX%OL%b7)j-brOIw3H$)!Xhc^;=r_ON>hf8-(j^GAJ?B`i|0Ys8A9vwkuhY&I5| z`{+z>C@Mb~wS^N;ST4Kz4iTi(vG|LK;lOj?3Zb}iST<$rkg&)}l1_NT! zmKcCljYZG=kcUB+bF7*TR+k<&L%I0)<~}Jrv?H&;^-0UbpjvZ~k+*IF@>uA#J79JM z{tQX1J`T9(+O%Kc{TqI9PZVR~DT((@^x_7^qKYtW!4+p8wKnJj85F-Bp$p zX1rW4G1t`1Uu;gkD`K`vyyh#x?OryR?e@Vk%sKddXM%Zhkp%KtC*zCZKumyCJu^w8 z&wG4gx%#^>-Ipg&<%}|5;gK}TIl7;)a8A~W-9i#} zD8Wg}O;yBR_*j8TcEwSK3w5f{`0ocRD~Nu33Sg-<-g8o*1`VZmPuT+0vPNrm9P8xb zK|mLFa-v^+e~6mKuPHHy{j+mG>^*u%by#vly zgF-)dsy)>IiqB9iWUi**Gws{F5IvX9c)bm=C;m~#$j&kP4=;1|lCYYPjEu};`Ixjd z_~tEcxOmjYf~{{D0S`y(9%IRCuq?_-OlVHq7%raLtZ#a7@XfU+4a&=D*9ulgjaIwd ztvY;%t)uCy4))eAFYVfg8f#_WZK>1u{#Xd4+6LFv!9I9yXX^=&@Y^Mg=jN$^V;#}V ztmjAGIvOdkp`2iIm8JQ?QU!y;7n$G#0m0`7))nRR0V}fLKWsl1$SxPrvT8_6Xd(dO z65?z;$XKv3iToMVXL3LyWrW{dTSsh1w?4LWdjO}5px;1anw+)h5axYy!!1=?LZ%wM zjF5|R_yvuC@c#Qsz$=a55RUL~FxHjgeZ()Ychn1mPT>HV*AT&{d!jW?2)`_?m@plB z@~jamxBAZw*KvhZx$@rj%h!2&UZrhJINj;w$C@4_vcunhUXg$Uo01YVki@*W_$zby ze`w}l;V)nP5}QUxM77Yl?9Q8!g#b}fR(Ph$b_~|Td4{v*p#l4V*-o0 zom}$*ru)mD!+Sx+`_}X9%QuFe3}&~L&d^*3+OLvWn3#^xwCl=m0B_dT<_7{$JLKL= zX-RQ&^6fK9YJjah1iAyw^4WT^{a&RikbE7E$DeQ{oE_|2o{7?hJSPo|!}?@V*64Di zk*iL@u^7=khKo&n+}tB{!o|H3`-WigeE#Aqt$;xJJ2p2-sVc;H?q~_IptqiT{M8$A zgQOunts$z&zaid850)Vvm1%Xq)ZCA(PAdp|P+yu6#W|rvW4P~nI=RIm*h`!3V2{V~ za4_hIe*`>qq#Ooa591S%FaZ7s;NA~7AHcoA3;vTQ($flW*1c-pSIpBeymmZ{hb0s( zCb`yF9BaW|OFoP9L?s;uFE>RIe=_Vi(#$(iJ=oVQ+ShIHCkb@I3TI52eT346(MfA` zadE2S*AfUB@QW|Jxww#&XX!RI9T?WBiwRGeqknkJ0bGFjmD~T`Um8w;4G?htZEMG6 zpL%BRhzV=+H_fqtUMEPIKojj7iAwaeZz+4Tpv&shL;UPrs^QwPJuuPZvzw(=a&m;= z!qJ67WMsqbG44k{@DlI~-GpbZ^$6VsVppRBk5SvYw8~-Rrcj62H|Uh-o3POwR(s}G z?)R5hKS?t>QuT>M^M3|lyyIT$tB;jgc}BctZi+f(Wju8xehkpM|t4>G5d(=!StuE)`Ko=MbbU4)KJpV`e(k-G#t=B<0z2np~Bb(a=if13Lq4L9ZGe*!O*mo6PX!o$u-xPRi3RWz~cIqo&*=4L(|9#%?GTbIQV1gV9tIzG{j zfg6&~Q2aN7Z2RM&b5hmh+f|{ZtNUF(`t(9;)gL8J9bJnM?ig} z&3nJy=2_cs$;|eIzVW=mu(>%aAo*k$*DE2Z;)bVSa0E>hq{1 z^x1)Hd^2|}LMvez8TyO&;6fpuTQc6*J1f346jMfOnX7#cHYw9D&Yuvzy56GzK^{gI z-#o@T)V+0{fUYOBz1{onI#PZ8%S3HWQ&@L_=l0VP%pHOs;NEttOHgcGuP$S@5U+2- zqmxi%uA^I`i^{dR#l(!<{>gX!o{yi1C0XN&J6UmNbD1UgtVSbs8bPHFJ}dA<$g5B{Xz%{!$D;iiAcReWA2VXl9QLP%sck<~r?M)Nd?lfY*;AL{PM*$ohOLfqKM@6Yh+ z0b5E;-KCxM7qjXD3hTBWvJ+!tW1U=7k~wt<6+o$$m^MPKz72NAARvdo*0SOo%HU_D zp?R->i_4G^P<}BmQ-6+N=V);F?R7piAz~YS)ntZf>}viriydmqcS{_zh~e(K3Ubro z&cO;q2h}uD&G14LiL&-uwS`~=U6cd;3-p4%N-S27Iczb`%=UbG@y_ujr#P{B=+#Bp9P%xP==VTZshcw)=kU9s-e*uH2K?~S5Ll3BE6Jb zsnj82+btd0;y{o^5q1c`004fwx4~qGWD0*%T7$xSwOU^ezt=_rUSCCnZix}r zr8UE@ZnmGK!s=n~B#E@oY3(WBsXLZ_<^FL+*sK{Hut)3o6E^!snV135YyyWX{FMbi zhZScPpCd7CJlgy0sV^IwtKH%`dl43E_UH*#k*g_07-1;`=N6@VnT3x}Zjw2uB=k~I zyMC6}y0lLXuK0Ni``(gINbHI=q!%KZZM`N*EM_b~6zcZP7Uy(6bdh@m)(D)3(~4NP_5Np`p6WP1wJB{k@4LW;U$?LG~p_pD%zPfW5!djyC_3fFSX z%5S6LYdUV1NH#&02+qb@u@gig7A|6Q$$C|URpAPAvh(ZMxeRIpeuZ zs3kJSYTreeN7p4_5i7v;G*S{c5tbmh%=-d?d`XC(Pp6jFh0BFDKqcGwn0?K!f1GqJ zU+9hydfeZ`qf#b*H;AgSk0{BwMPDesP`|q&ulATVUd_1U%`yDpdnNBfiX!ZMTaRB| zxB6?W(J`LMt*rz9FyQB0>0B`vj&Q&YIO^M4Z%B0ncjk!z0`# zNfJj9$(5GR8BQcsTeNow_M|P;GA>eNi%HX+<|w$7lrP08?`h4G{X;FOXHH&qSU1XN z55djs$Qy2^N-F;5AQP0ukfq;9+mJlsLp)4yYSe?G(3~=57nvGh^)5oaR16U@mWuiC zg2M&qcc)aek9KyMSMx8U25f^2?V{UWGvX~bNYjexNi~2)FF_(94GV&S{**%(#Wb}L z+UR65wCU9a(N?m+w^S=>5@qKmi?E?pc?i*P$M)<7OH3rc*U$G|@kP#A#GeZ1o}Oh& zEz=zKq#pE*xMPt^21!Q;a2sRyyr|IkvV{i;Emd&)A4Gq$xjplkMh@l7t^R^qDxY*# zYA(&u*Wi5l-8O$$TDD!toEa}cn=11;<@zo&eWVr!By=vA@k4oqi}l{ z7*S*|=|fL^MM|a&dT4&-YoB;Y`?PWLq(6HAp5atN*+G8QFGSo_T&+&Ep2(iMFBcuW( zg4|CUc9eYY=Y^>$FsVBYCRVWnsI(eR8{_B(Vg36ad8H!T1tanKBP zQ#e!v)0r#IT>2I!f3Y96htlWyb?Oak)eEr2=gDUZ$v(u=<>lS`u+;3(60l4x8Tj$v zq;TLIw|D0bj~g+5<$NO_7fHZgYECGK8yYIjRyDC8azU030#`nVgoeI#`di2Uz4#j0 zUeHN^`=(yr`bSJr%+N^lf%-;Jc%1jrz0hx_5i6GmC|Zq4Km2F3U?7!jqV(-=35zri zuwYL%sq{x~YQ~S0x4WODUjE1FBEQS+ON1W!LXg%fyVo9!sMmNIN*D9|eEj^5M=`i+shSR-J>Tl>x+TRsBikUN=UaB?Y(xj#?Wc~em1dp?{d9S8-8}iPuFQN zcbQFvgl6p9Q^J`!Rhe1~$I`6vanbN~g{v+2GSkD5Qz8&rU}Uhqm9JSyUX`BC<^HHA zutBtCQ4rbKh39LJ>e^gOOf!&@9XZfm<*|FbehDQJx}Tzc^E^yTA&B6k;*}l%4yq<1 z><)`2GPD>Bi%Lr3{iCtrX8uCnpDAKc-i2V;GJC*%D=xvvo|?Xv{_3Zd`yWef zDA{&hNDCiRRZC&Fs5APgV9KAXNqnU-%+h+cKK|j=!f-Q{*6U-r^BkwI1}wXVNO<=j z4!nL_g#6hx{t<^#b-($@Uv=XYlXXynRw&@r=90-hzy$&O^8Pq_uEd28++ja`z zNGS*=?div{j-}gcdSM(RFf_-B_JHv@7xr+yqKmqv{4G>2^@e z{w~MoU()W`)xH=`3VAouzG#=}>!fW~z6XLv<|iAfqYd_EZ1M^U2I{}QFdp_O&dbAS zhiC=^8iqnHf$X-21OJ`+zlVyKc_F zhCB^sw<*Qc7s=bv??sB0_LTeCq|4d^1>{xhJf)nCtmIzwMiGofysnR{We7qIDGTJI zgB!T;c$2I?Y&j;!Vnjuq-1F#WC6Fmo8@o1PoU5a)h*ZxU*w=@!CVw`P=v#z_Z`K|p zt@e}9Z4%`%3u!W8MtM3miS%!UbG%9ZgkXFxPE;>Na}>j7$b^YED`Eu1;~N z^DHXl@I7A{VaW~xh)E`+Aj8K~(CygwCt!=XWc@s7VRaS~)$G@0C2sxo(pw{{pU595 z$PP@G48REdVc7tST0V_l5(4mVkZfE5*qUwK&9Cnp9k0ipIy=v>SgvA&CNVc_nGk3* zU{{S8gI*nMmw4cV%|B1Ek=bJDbzJT^*;BF-Mn>h41#JR(Q6VKk<)V>`sAt=pm9=bX zH%U6HnM!BO?aCq}I+GtyPT$rhD%GR&_S1K{ay~_Mm?0j6Ir-T)WhZGZ`g|_+mp!k9 zy7Ke4EM2gnm0S}z+`R%RC=NVCV6qVZ%kw)Nz;HUSH zuIlkQ>)DlLo!a9Je#M(#cw~nje6f2M87k!-RzRN^=f_Os- zaq)WPei#p~G#*1F60HX1Fv)lC6mmE}|^fD)V|cjjlo zPep}_it57i{x{`Zm-zu)i5i7%Pj`@nPNVBnmvcdecrxTrvF*!(7L2vl826L|K`nuf zEAELX0{@RAqeHpRAkQYL1atYsZ|R@1MCgm0OYMIhUpcdC4I}UdFnon&5-J~22v*IU zvo-#X%$!4cyO^H(3grOecr=gOncf3#Djfk zYw_IyH~zCWLCxa_wUxl{S`MwIKkA<@BwuYYv!gH_)!E|P2OBv; zeGuXWR31;CIe-%$W?NXEG5oU{b=-o(Nf++X#ASg8y#0S!w*ySfW!c$}nVZfS^t|p( zsm!;I`U*)`dPn^i;85jo@Aqh0#oXQV=ItykyFuP^0+ukhNH@BYiZ+%0d)dd&^u7d) z$9%k+o!g}_#F`zP|%F2E<=|Kr~XCju?l8)-T>wT<)EA-51*p;7NY*1%Rh$o zh}0Hykeiqixre`fs6YoVMm@BvIm809+j8NmEE# zI`b+@=o9}A^tC=S{NG$@d@dr-L`KE#<|fgM&--1H*}XDQGJWnkm3X>3%9?Ue^bhVu zf}jiK5G4lv8bUOCchw-(rSEP@X)g>Prt4{Tba@k7UYAj_x4kOVa&O~;Ya9J?ymwIu z@kMW+6&E8jD8j6<~GoY?-vQe8-;VyFco% zmq9i*?U;ePmlr=+UhxpQ1N`7D;gFwo-)tNsKCj}BuRjS1L?%hVvMDf#))hmh$jQxR zeb>z(P^@bcdk4xAEm6r+>jlLAMr7VQNt;#qQ2)}x za2yPP1Hn3q%X$qu=0n07TVM64?_3=7dKTNqK`oC5NVKSWtkg?2z^0OEM1GpLbDaM? znXb5BVL%>m50j9kYy>%`gI`c8;1wWnc6N^_I5|;XUngphQm6sEM~0}6aLeE!IoSJ! zi4_Nwna#KMnzgxhOXfUE+aKs`d42Bg3(JiTP)y8*yW^^bekhQp`{BF1WFoym@sPQ0 z65UkX<{&%GJ~UEtMUtQ_eBm|fR`H4-WoT#o>Lx*X`mfW z0gO`;-B1s#--dHnStJ`Aj_jpL18IIZ(LY7C>S$G_H>S&pN{By@nF$nxvayoMT*$fK zA97NXThEKV(Jn`1XE#PC$q3o2S1`1}UjD{>07(a)4tYbR$F7IhPXYryDs8ElxjB}d zmq%WK=kI_+3+0v9U&fH*5Pn#&IWNI*s#x$RVQM7XD4g%niWgYJDTVZiAn2HpvV~lj zG_-JHM;m@aI!`>b-+7_dEo@->V|P&?>w2Oo%EFc$AKi@=gJo=XAjLT2!qNab`Nh-S zypf3ZHaP}|W1oM|Ty?J}Pr8Br@?0s9GGrIZ@ zhHXtZwH5Rl#*)Kb2!TIwfGl~+i^JV z-luNmHpS(^BpM-{U%7gRWAH>9@IYeAU}_`6?zA9%D>ov_bjd=9=J$i7f6OBw)rx^q zt5q^ZXi4U1-a*+{Yg;A$kz&jKY?s*qdP?S(B8Juy`0C0NOl;hZ++0!;V`{mSpoDd2 zB7r8-vStV-SX)wY7A7BF*8Bb6s7%Pu^8Lx$nwsTZ|Jw~Or|6V-cL+>6z0FhtI3Bhd z)kY;OI=W-0oy$reo6De&`Q_DfI?_T|$8inMS3P>=!(n&I4541Wo$#A>EIU@NEBlvd zS%bv>(&603Ye7FX*vR0D1cHd|4?jAiY*8?IA3br^wfn&8LZTLaYByg_G``AF)Obfw zv)6E-wWv}`@S7ulzF!<4YYgo1IyAJtT#5_tD3mckmhTZzGCLJzPq<4xx>(Zwj+MFJ z7?hahJl}fKcr{d;(H>_RIx==#L-Gh-!6;dq_uWO_*@4Vgh2ls`iyzz7LN6CxI$Qp6 z_pDR|H?4V}9dij>w)?K;CqY9tGoT9F*})4v?1Rh8JI-P2AE&Znx6Ld@lH#zt<~Ong z(T$m8Vv{iT54b~R%|A(@- zjLLFt*mbAUEnN>FNVjw&4bqKtgLHRDcQ{c&Qo6gOyQEvXyLl+deKFtpu5Yce_Sj== z27mMy58U^a=Xo6XITDu5gmvE(K9_l$xX$ZUL>?OT-@A zzuhx?rz^>qEs3o9tRED4_)8mANkxV67OmdiC%X5w7NZf^xb<|xMLnZu!Db1GA!O;B z0EIf8D@hcmZscu0+|!&hjk5BGyu$``q&PZQW|F4=Ab0C_GkgLK&%Q_3e13*3>nuOA zGwTmYXluhu#t^+(;Ed6D(*>Edkg1WY%hk_MAdOp@KPPIPZ$r><^gE}J&V=+AuYr5V z@u{g#+$&zVC#~0c7YYT_`vT-c|0a;O(aG7Fp$hC=qLRng zi7nuRW z`)_59F3=XjS4vFh?Cce*x28S;^jzc(gF?jLz)pp?s!vf|Ospk!DD$gYE$4KFRtW&g z0}0!8mmy+j>w~w$<}Ue`YET@xfU;Mcbs??D5_iFv+*`VE)|YjSFV(mhz6S;Vqt^aG zQXBGhcal4c91E$^+!Df6z*xGcs%C*47o1&o@}TE&eZom0K#pgA)N=CxD!Kn(0giL+ zRNv=r{j>UZ?ht~0%M4bLGqCKEEb;-R4cHd-dLrH?qcpZkKQT9EqrY^PcZ$_Vnlyrg z==YWS-eN0?R^WiU1oz{)7EQhGJBub&WCLlf-!t21&8YydsAArm&66+sW%4y66{yx1 z0hS5Q0Q*L~kUMM<^K@3`&ktUI$)(~1eM@$O_-`~@Xat-gk-w#mHb`NX@D2$Z{Hhy3 zgeZ$FvA$^ux!w4{BViCR|9inG5>@mnRIY%N-cf$ZfiYmDKtk7zQjLfcUBFpo;D{LO z)kFi0FduxMK0k{XM*UPxW}%Hkvl9k4SR=pvE{BK}A#my$gegP!oJ9~24%y9>fahj2 zxn=Y@X%c=Z|B+PqbI_nBbH7pp4%@7G{aQ<@j>B#Aj#t*0`X*X@tklB~Ci}M7^GKaU zBxTxV1X3+pF{mFfHcy~PT3PXtE?nf`Kp*yM@s$B>5n-D-KK-vYaNE^%eb^lYBs3G< zg}-kf@um>@oi^UY$-W@_%EHVn4l+Li;HK~os`u9P4p1G027{1{{v{=D|a8v1-CxbZ;m;V7q7*2$dQcSr(LEGd3IEDzpc9NWk)KK(so{?y&ZW7z(cI9 zslD;lfiU?UkolbSIXO;%Ra8`9=kf;D!%bLQd%@7S$99`vm~0{oeZ%a)uV1}^o>8D- z$6OCkFJKFZqmU{Py1!4);~mj1>h?wZf_qPhV)v;=exulZ!64IQoF_vMfbjJZj{zf> zXzww{t&VJ&=6fdEG>zxW+`Be)!g{lN{4@oIzBK@b`p2qMFpY?WBqFwPBzDIxMz*i{ zff0`*vT^e)qa*Cl+x{b*C3BYKMRv>9l;uP>m@g)JhR( z28wHMXqdgTR?0q=)}(#Z*-JcSk&YT4RKdg|iDuU+|ItuX8g?4jIS}hf3+K3z?O1FS3;h%Z8*ZlWWd`pDL{UbUcD~OZ;Jyx z!sfG?;{L&=NT4AG*=`)*6mG|F67GrxoU1Bj#?iQ<&)Hak7}@M^(`}afm<8Ok(p#CC_;Uphx$FMSAK_S^K}Z${oP2213@k zIs#+Z7m&kb4=DWR=P^UdH8PCHTXxF(gT_#Tr#$b~`pke?jvHQ%4@5*nk>6%!^d)ej z{o4b-fR0g>&JDL5ES(9B_$tdfwvunvi?(8B$*9RY#cxtw*%d+~fo#r#RnvIE+&YeI zRWB0dNq&|y;oPAlpR)Wh>(p@mF)7>0kN3Egb{cmZb9NRXtvLA3N0&k}jx>tE@en{> zu-!(}^^bVcDugXNUNON_?)p)9cON;uKZ$h@+P#&&8#J$Xh9wXNuUo&R+Wefo(^?0r z>#oo&G3r>PH-MTwf|AO=_C%|-N-}>*A^&OmBr(yk-~E8agAP2d_xXjDXJvP)L+zP8 zhCsFgAAiQdW|G6_Fwl$`RIF0)x z413*sGg~VNzv5ddJLf*)y?E`p9W@x+yD6XT@R%U5M{+J1jR%OlQkg>KaDSQx+mvsl zmSJe@Ntxi%%<_xB>S7QD5xvcRWBPsR$wwTF;Ffu$Xmux6cK0`{lk7h9> zaebt|Sgx+M9uLEs${#+Uw=93+1+Cbg^IDaf>ouv9>(eVKeWqo4r7#3Wb|P_j$PShzUZ=x^B1@|sP7l?anzHJ z^9P0^??!k(VqzkI9NJOR`}_+U9qLaQUP1wp{483cSFXB|(UQGhICv~yG+FOU5Z z=6^RbfhHa>G&M_s(aKdJ5cRv)pD58O+>48CJRE{vyT9W1kh)>T#1k8a#C#=>AS~B<`OlwX1czp&yQ57ScN&F>LW15RI=?lzI zKDG9W820`^Z~WrQXRfJU*qAlg|9SgSqm3qM7Ax_5yKT@f;iKD(iUA=ci~-q{&<&c{ zUsbKa;=$<-Ea2%G1rH9;{V1{cVO+X?d|~CQm-c)Gua=0jUtgOGARB+)f=n2S3Qzz6 zQkgvgl1i^abZ1jkFY z?xio<)|J}y*UZ67jNpG;ctTD=fyL5x4q?{kW69tK)G#;%1e1sP7=RLKWHiuM4js?# zMZSB3oZ)|lgid-(R9YIuU-usj&;S@Qo?E2gL3oMqEVjK^d^UKo;)M8<3NQWV#9*i* zpj(B090eTZ>>(GacmDNaQU9EGqk=aFf!8D)^ncNj#-%^~o^$hMZEYE+ZW)z`K~5ky zJ{lJ6Nk>n?wo4@TN&tN8+o+KOF%YOg>yZZ?#Mr+R)Ypywl9Y?<`n|PB@Sqgwqu3@G za%)?Iv@;+Glz?hzkdJ6 z`P%251QXG0LBA!4^&=k4*mXy46~A>qehDZ#N@{AaLqh&NE^*HP8BC%`iazlEP17>| zn9-6Qu7GlI&(oJP7{2mDJ7}X$IY{I&x14W5_@lSKQUA6f=@<0J%U8^Jr{kScf)aTC zjMND2)q#6avwPm&Q7%4D(*kSqWk)7HpcmI^#0EDD7n_PaP_EqMHJ{)hXM-^=y~nL}q0U9Fw~3 zFMSLVJQxbL&P_u>&x->T*~e+xX}Z$jO6t(p zPC2+FXl^yro*M8?X?BFaxCQ1Y;Pe3|iA~%F|KcGg6Mir4*li`Q0T($lm4^=gN+Yb` z&it%tVN7l2rA{>(QMP2LX`a#`W??2`1iIvO&+W1xU*y#n*p#&_eQRd!9}QPmm+xkA zkdXIu>miC;NF+8gtQ+%*J?Rly3qP<|5;}1cxCa{Vsp(!)og{FFy)kbV`#0r6fq}F9 zsd-)5(&{|P{c;``)Wl_2{N?4*`T^nyoLEh9PYsM|^Ol1nUqx`X-_ zSF`7g)`!)$zJ_BX`S%K={~{u)w;c$wa~?^S;vSFxl(q%;!J8EI1dbpRqt|j}bs=Q>Rm+nTkYELS z=@Dabh_LcT)3Uq#c)t*VqxmW_*tV^Fq=_`5kW|LR@*rR8?RlilNzL7Zpxo8;7DHlA zzED+!G@^7=YX62O*ni^aV8MlW0*T*b{!Xix^3{EaGn+Cm7LqF!OzyEQ~ z553B5Tf4jkCXlTFO8=1k&teZ=VL;G|gq@vpQ@*N1_+H_{24DJ~b028AA$I@3mh5xe zGX22QL~deKh2xv%d0horJVL%q@iBl*Y*S|4Itda*mWPqBYF|hv6EPS-+n5i?6>6xH zFw@ON&d%s7%f$!GZpDIcmhuF+;J+sBD|;VNEB)x0p< z%5_pHmmQP$-2*{--a2J{{TUN75(aW|Ow2V7qx`=&Dz-8MO+(di?e5qCJ67WyK zD*xjOEdAmLJi1yU_(>~%a78?0nxB7Y@JRuKk^5hq^?$D6le}Xm_d?&J?A0Ebs}bRT zc&*Pa0CB_DO=I3OiWm5^gop$)z+hC=-QXnZa3VZqZ+2?<$LGe8N|n*X3iJnX zcgBkh5wC0(7Q7<$Evfu{mD%Z?tXFq;>4cz&t=(YNIDYSS-5)R?3&nY{qW_1G15So;Zl|dZ zc{7loawZgiJ06!g!YF24J57$h##o%5RtE>hl8!Q;|o&1O-d4k1L5orE3^~5#~27NGRR}%nsqudU!y9d3xp;m@sw!Q!P5kU85-7Y+gtD#Pw@(`O$Q*19(``oH0hP@`M09Qp4rN;4nZ3d`h@;$HRRlv7~4 z4%kSNxx2(C3gM!X&$e9b1-Xu;)b7^su2n42vpMq%rmxzC!dh^o{9r%mCVv=CY;O^> znCd@m#(AIEo#c)(4dZE&W1@1i)cS*O!cy&AeP(Gm{2*vQS>GG_&Xe6`$x4FP{vPe$ ziNA|A;CXYZLLvKMiz=a&+wy1omWI!xr^~1F#Ye=D4PXBS{%5iFKkY#mP-kI-U6^yu zSNfn#E$EUU&u?~)nv)+VuI{?UWF)VuGV#h7y#r{u&z5JHJT^)-9WQpt#U;hb=^SA% zLUoSJvGDoy?0nAW&;e@HwfV7^0k{Wf!J#_HAPzdDaEoOI=PRZ}3?$?kFE54wvAWo~ zFF0&^7FGbh+Oz6`!gh!U|ND71R{p1PxpsDwA1LWw zJoesdkJfWMlgRTZdxJ={%=>;4&N`EQe^ZF=exHU?n^NS*azRL*aHbJq#LN!~u!6UB z<)Yd-{=sK$UlZk}{)U=Cbf5*-GcCe7A-;w?&L8&3Inwt5bOV_wZ*}C0mTGJ%XY}mC2ASG!=QciYk@pWxs;D;B)xXO3)b zDax^dO_z1|@|*|Qu^o8$fj;cngG|iDazYOAREFhplMgA&dP2z>lU4h?p- znSC+Cy&xOZed|hm)((S`Bh?`vrxd;_b-J1r#(EMU!De%sJdN>&9E=T=5Rg%`p0$Zj zhus-U5Bth66aH`z|99$q@z4Kh4gYV?G1iEThz1Mk%oNeMAkH1_VsX90@6_|3y~M%( z@CoSJK}2b`^%!*C?C57p1G2Au-O;ZX{A)yrk|@9h`Ysh9$1;gMxJ}Jr3X>yS>5-X- zyiZ-AEn|1&y##t)cTBs=a$!-0MmvB;NnIy0Oj<*X&@@jjWxd3rj|L^ky@BA>SNE8_mOb@L^|Q_|Gpi1o19Fv zB~+W7kNEFc(z~*z|GbO_Wm?v*JqOv!uYd-oJ?U(yFJbhdr!8IVw>v^ajdX<>V#~4P zJBtZ1lc}s~U&BseC;E4N*(dVrB6cuSwS>buzO zUMFkT|0=zCg4%a>b-wkzWpv@=b6nqi%TdoEE3zDX7(u}ACNPpA9JIJt4ZH({+p|H$ zwzj2+$Xw6y4+4I3NVH^jzjcs8d1Cstoz#1$$cuh_WAvWS*$B#-I@yN}xaa5|S3`8V zKa-+Vfg7kXPZIeBi*7&e$_Z+jeiMZtYG|nTkLmqxP3M8Jq5q?b8abK~wq&On>*c-c zzWe{Hi+bhm4W3Wx*Z*&Fb)}vE$kkQL*WMC5eflqht2zVbZ@H7*K-Y@y?}oAHqUS>% z9GZ1&hj)qNbB=oA63YwECq}aB{8nW%3=Q?h_6Oq55)AXiw|IZ=1=b+OaK2%jiw>4~ zYsqNek&6)(Tv;S~0n4DWL-}JLEGnZ?zA9eC^?%j~?oM|21AVEcU!dyry;aG2S~?Xc z<`C^WPum-es~MdZ_(dcRM@%B@z}+(qzz$u%;7_zW^Ln@Z=WmN{a%%}9g=Saffuo3V zO13<9{Veti{$5d-a1nf~qVfZCR!vA}u5YH;k|EBS1D--I$TqF%Z# z=AUu#OUZv_NRO-KS~FlcS6+$Qp|t;RUv*iGkL^+9pR#ItE5TF9|Du6iP){KXtV@Qi z!a^Qil8*_N(oVX3Y;c?w3SYEk8-zVOGg`e^%Km8uPvt;er zIs;TAaLzkJU8%g#GjM9RaTD*4#;{yq{5MMhZXoZ@T8nK{FLp^y9;kzcF+%( zp`IC6D#pfGGbQt%YHCpTlpU_q(*O)l>FO%D8A<4;2c6K(sxL&y>s97@1GMGjp4%t^ zZled_YxN&F8TG1O7HgDrl?i670RyB_cQ_7XQavJY=bRIK{fY~;k?$jk>0BJ`M;Vb{ z$SuSDdj)YlIqdfIF4IJr-@1OzOxg^zEE+i`9_Y#<)+R>a+0-&Nk-i*1&D3!zQg77n zx@emn=1V;5ykfLt;^SX&oZiH(h&|9p^-%PkAJ+-@phA)juD)r!2BBEaEV850w?Y4# zdj9tY*?&hY00RWs0?GVVszsKN&957c97V@eeEhoWsmug{mt!hCnvOnt6?j%@@Ua12 z_t;~Vh$H~9gwL0am$z%Ur&n1$+_e5dDg`FSY&d+RhJyCKQL2wwt#kgz!H|eP^_}P; z#R~)b%zS%sFAUU)U+3t=Q4QbM!qmZxtRIJRRJzOH^juMLCMn&W(yy*1*SG(quVBW8 z+6BwS;V-@BC+K(YJ9%8FM!KMonzzuE@jmtUQC|NG7lkJq`+?JfDmg^rN77la*6YR@y>h;9HBbGn#?B2SF#f z)sDlRPi;}mK(;hDMgi!!(SRy*gLswy%Cz&i}!P#K^G86@|Hy>*OhvmE&RB4&! zKa@(h9PMlhN9_MSua#WAXDdwOFPm0so3RIKlV zySj0ob@9A^gJC_uZerHISiMl5ReNp!S?D>+{ z|2q0ZGP0^##Z61(*Ttr=XsQ+w3|H28AA{7A!D0hEI&VRcJINWA7yspZ#zRw9yZqxs z&{z`kOF?}D9PLwGYxFxC9sWifx?lEy_&)^Vd;&C8 z?dm>LZt|>#sO@-W0)xFOXs)6)Nhx;i#Bgd65JlK;Wk?h*)u&q)Jky8%v1j)Q|FeJf zypI||?NTH~49%>0(Kb(YNZE9q@0Xrz-0nM|4H;E6N}zf%kfw3St-evvggvOX#*-{X z`h@&tV%dll5XI`-T&>A^2NL?4z={1vRn?RK)iO*)(Hp@#|0!?#0`B-%;S^pcEt)19 zbxl`xe-Bfqm~yw+Rt@+K2(Oq!shUR04(1Q+OoC$gdky&zUrDIRybh+MobsOaPrqJf zfJJ;I`cm)D!yLkr_sHTTSTlcjPxmy4CgsEV2B zZxmmTl&sY3p+We3lqSYuR`<7Xq>c8B6% zc|v4hb4LI`9P2OgT3ctnEcB)ves@1OJ2}uZ`0gAN)aL&w_$4zP(>H6q1|Oza^BfZ> z!b#+8+g-#=H4MUbuE+fy{B$)82{$))R9~BRZS@>KGd0*YGO}nR!S@7<06Z8Z{gjV;f}GxuAgK0Cki6efs)F%3Ehvx2tEE@c7ZL9{1xiN=z+Izzn&Q zOL({-f0ux}o13`KWGO=12O&n%C%Y3u(W|ZF=(?FJ^zyjKbNSB6aY7*l=6-v-chy8; zI1ie(zDmi3^-wro&K@FOM zdUi5eFmVW?9ZhKZgC|FpANvP}Haspb6HgbRZTnhRM+-p{6IeEP6BH#3&40&Tblrhf zOAs8&H*o1QZT=w$T>oSj6-^v&Bq4U<59vfpar>djh^d&ejpQJyQrT1_=Ho9e>H;b) zq(r|~zw2y1$n-$#(z?R*3>iEX@U^a-Ka)~|_|SF=|M|^k z(YnPUKY>pMzFPxoYHOnU%Nf?lhB{vaEsMkYM6!fPAt*~A=xdL zeZxhb*BNfPbdl)pe|3;-&Ew6}!QKyZ`PG;GE}0a*xO}Sp%@|2X#N$jY9>MSJq5YcXpnLr(C6Idry>DlZ%<`H1ETf(`f^y~Z?x*4j>cdSFzoAz%3Mugh^fZbU9)HmCy%URGcO8qch{n|)ey`)7U)*1KU!O=SpM6EgD8Iuv>iQoMhM?k8J8K=X$;gh*L&Sqa9N)AU= zRqWQqM_zO?A=44r7B9NzM;m;%Y-}DKJYgSLJb!qywXLm~B7M{)iR+FP4*a7F^sVjV z*t}lJx1ehr;Su5Nifdw(5<1jKld794?jcqFQNQDO9jR(N`8B*s<9$^T`RYPi%W1qHMVxL zHYJMy%%T|`ho^C|lb+lM6k;OUtsjS?vnL$iWVdMsas0g*QWqhZUsmHjSsLJ^F@JT% ziDf%CLm6 z|C^;G=+7Z%vRj$)N=515tB+`xmouF2?N?2Y4>m+FZ>ow=*?nJ9$$8nSie(_@=RWOi zhnlOlSv^hq?d|&4B|Yw-k#phR1$xSqTltWlP;V9sZeXqk6lk${{6L$7i=i!Oyk>(O zZiRnGVy>cu|L&H9?Qp8X@XMNnb$J2*#nJK4U6rUo+fr$#3vnaQH*Vjq?P&S7-xQzp z+J$0WUIDJRvub^Yk=NGXkU(rniSyF_Bo40L=Zmc^Gm<{A{OibV)`bl7M1$iY@`qD$ zBp9E!*DsTY8xc#~N!Km6P0WPDLOBHBXIOO1FNP6zA^TDn7M{-}4oI)Rc(F z4nqa~9~nnd1*g|q;_ie2FS1OR?n@Q82a5$;n_r#K>froN6%s=Mz?wg_|&_^sSaOG}(A2!D~1rn3d) z)z4S01&MCigIw(W5mF2Lk?iyMfFn`KFrU1zwkjUyKg*3klJ=VX(}MTh)%!{^el9y9 zJBLfBkc1TWu*5ci~f*U|ueD!tv@lbN6uNO-o?JB#`ScX`v*ktL+zdL-f!S{Ph-qx?SM zSdTg;weH&#RophtlawIfUjoru&p{{f)Qxw)EmX|Ia1vLJup5|wpyvIF`5 z856+(|E}Ov5qx$+rLp2K_QCj~f5TvMa-k|?i>+cGQ|XuENHj?5Sf?Ns(*D{GDoT$9piDiH_@o^7hTA61k0)tXLswOv65* zqh?7fq5ITG`3st049;x)snB^U8HZ9`X6DI8f*yKn+fb`jzLcubL>I>$n%0U*SHA5< zd$0Dz9+F3BQe>_g_tNJj?V~FABiRKNQqRHU5QNCTu49u^6;;MDdnY#j!K4Z1mrk0} zq^v@~+yi~lM=^y$vj(N*$Ve}wbL*d1_9?tvarUF?OoDHE#QVtKzLjatsK&4gT61sk z>%18t5VoTLUp?#^7D?3PMin+>!gg3d{BBh&>bDmeMFKV)4^K#PEBKe*X=d!x$@8^(U2; zM)Wb160YDzQJGKV3?lJ?+v`5)O&kA@(qZHF6DES0`qDz&GX0TLR0rH~eP`d%PDuyW z%$~z23W;OI%6H^udLLn$MFm#*`(aH`X?_4kyoKoBs6nC=jyoAQfwtcFj){zaqhf zp(*}rbY+E1m0Gb+?0{<>ih&JcXc%eFtt*Df zP}14y#o;Dy8+K491*`iAg^gC*_`P}dpJ5ZNR+m~t)Dubqwg1f9;JwW36b>pbCI-lF zvxvyZ&}>u>1~9YuikO<>;IU}OhH(ophnB{~2?TgL<5RQ?pQd;FkDG|TH6DHyjE)jm z9LJDsm8|@?dcbYA!6>d0AFR?HqXYDJ47d+-_NF4HK*OB9qQ$!ar zUkR25@1JO>JGvjVQ1BCIK-se@_L48e-?skpW|nywF-^~&Wqq?%cWPs0IB92^yOVeT zYJ&Opm($AEfQZsR6q$bNH{dOwV9M*`?^@!(C$WA1PD1%hKy+!02U#e$jnEULdT|hB zK3%7vMs_5flPp^*rlaD^+9?L&r#EPn&uIZ;(DpbzJL@YHDWn^m9;uLp+se zM}sq7=ZbK9vdVu(NbrnY6?6`tjx|c1o{Gf{ocD*)usZib4)C5QcW$Hp=n!aP_!x_@ zFa}tIi0SDu0dWmYFY|)jd=u5{UIekk?(BPG+^W{b;M8)Kjq}gOU(D5itQ(^-A2Ga> z6+;1NILXcnTFjTrQTnQXI(hAdW4LA>#x8+*!QE-?ECI zq1-h4`?=Mt{FA!YNY}*4IIlBD^#t`+x1Ifi{f9Psi>Q(Q5DPP5EmLRAE&R}w*`6)6 zw0D0+UR1qs)I^Q0fKIVoC$&a4SQM5){BqrA%idn3JTY@1K>7nt+}M}bYPd66^|UZ( zb=q=AjH84ZpT6m%_mw@5#GY1v^WO_VR&D52j$Y75vWJRPQDeV7hD}PxmJzCcb8zh)R1^S+uorG0%qEE#FAYbv z4JwZEB?>>nP3aTfAr^i3#cuhnmG!skZl6*qX+Rmp%VvDX%-~qg{QV-*r?TvxsSwm_ zSrLo=vA=%aHAJ*mS5^%qXW3_J_D!TBWA66IM}6tQr183Mk}og-oL+0&M3?#?3T`h} z8{4Y##?J8p%nq3s1dTlCg$!7qZ5~78Q0wfDconuu@C2Dr>7uY#k53{|nxM*`b!?Z4 zPuheVPRna=v(XTfpBne^k3?aw3(JQOy6_i z9UmVTlozwNS%-#&(JbofZE*_;wOW3(#f$SREuB>~%s$E;xevWNzl4TGMHPGeG1)bY zRjys`wD#Mt`0TN{Mg!QR@I$G*QUb_A_hTDcO}FIW=!~G+d?Z1st5~B<%G9(*u8Ne6 z=YuE#%ZKKd;54T5+^l%$iZeVa2LDCYf7b9>v{%F23O74jTOnYurDsnC^I_!W0qN&U zLc+b@)z{Xk!v1*2cBD5~2`gRUQ_2LqY{M>*GuOXS)XP_~Lz8~`1=q@kE-%xn6Y%iNP4SJHFia1iMyHCisO{H4jHKc4dD_IBNL8)<54D%_D_ zM-;uPYQ#iI3l<&?xvkjY0OxqTSk0lC>^u%HyYSe2JOhodx529n0@qhYVQTq0lsP)+ zPczc`p=8i~9clIE;9ui6MC%Rrk#e+Weuj!mXF<-Pi)@leDZ! zaWv@r@c#%IJrI1YzamGnzFpeccECC}qqG#>`%AK0fdKYINs(ikEBpKPqfmQeT?ZsZ z^q^lk6EXaf^Uslo&KjBA2NPPuZzz$Qb|HvQa-O+K3;M4V{a@G-G?*iXGRcaoX%mDp z$+YiY4xDC5b(3~J-1gkO{^$oGlJ_%MRHa?U`x=0v!?1ofdY`+i2#y~kvli1wS_HVP z=Q_vI!CC``OqO_jO^Ei1Pdhu$saEDMk`mItj*udu1mC+yN6A2pHwHM>DmCXpoSgmO zm&9MvpUGNwlvDAEi809R4FCY_o9Dh9HS?BU_e~`>AmKm5ozc6(;^NQp4iBO0bLH$f z_@umE(lete)vpG>m2`s=b`*X`HsM}D0b_MuGpQ5}jfa;<=3?sPOdOZVL0`(+{(fXf zgbWt*&Gf8)6gCZ8d=e`G(iu_}6O*j~m|iN~pO~A^_fc*Rp>ay;>U0aoz39dNT|qr< z5>FR_KnvOdSxAQcCzm2{G8hR^AxPYw`%_3V)laMXYU-LNwJv@bFCO zhGH^mGtc zzfy>jORc@(L;Oo52>7%b(zhG&roM5$rtMf^Y_rUX#CaV%p}~^!kwn3 zPPZ6pA8bN@vD)v{U(sG4Xv~ezZ4vcpN@3MQM_T2zXc+u2_XX&-^|FXjSD>k09P4O- zW;o6JY6i+0U+e0JMeN+L3adKwoT@A9zS|E~APP)JhHOQ{X& zRhV4sLU@KsY;J93{rpMrF}v>A4P3P+zy!$(-+ZH;G$d-18nhWz8b>5Z?CBs3B@HG{t^*1b@q`PPd6iisj zHtA7Hifa00t)=aenSm)o(7yPV`aq-LrDnagbtBB3aVYWi(cyM@6^gp$PXr7z0sjn< z^!s3jzH5h<#a+6S2qSaa+cS&Z~d|X@{v)R`?(wS!& zlA0+;t)A=sog2B-*_aR%{=z1UF)U7MHRm_NX2RzEFy zyJZzMA_rlIy`Cu<9pH`o`|sijb7m%nudNEw(zJgksDOx6AUMT!hLGvf(RY8J{GP<= z%-kE3_9GU;NU*hRG$m%O;Pk6R*%%Y z^%-i&%gKrCe@o9Md3*j<9k+mcq0!5tvTfDoiVj6MrDqxv<%>jfaP9p;KtPc3L3=Xq z$f}9?s`}@5Cp+vqw^}{?J8goZ#pxdP-l3-XvM681l6<`--C^&3w9;+C33Vd9LyI;3 z<;h8GrqWV9yV;&tyoAJPg9dUu1+^%-9jhaOnMm0=2md5G0wu~p@*`-KLvvxSI>G=$x?!@4S%{fbMy4w#TSH$zG&;Hq zc)z~dR+|s?+?*wp$@&XBrrc6KI;gNZ3G<^~d#Yl7X(D`j@XK>Oh+(aV zXGsaA@#DaJ|$_7;3E4aWilr*$m_;uE^CO>>62P+i@k_Rz>By%gsrx`>%iSa!`eR5sN`<&Cq zF)w!`)vmD)y=+O9^EMs=x2@Z(>)IPlPs1=+Pt>$ff%5`#Mi>If~3W+0Fj2E_iy8@ zv7C3CheJ}no??kC)VwD*%boH>ZN zijWjVd1?B%*7G#+cCe3;fRh+36;3N%T8JKmk+iF1P4$grSm*F0oabJTWm!{gLGXHs z7qrT{q{Wpe)a(+~C8(GaQl1cnO)(+5in|#rix8=#IRkxRm;UqvQ{)s8z(ob&C~}Aj zPPJ1Gbdo@BvQ zs46`+4T7$iAhlD!m06c_;8IfDG6WU!u)|UCcN(9P*Ngh>VJo)X``W@=sHj{!ol4^f z4L4?et)gxH_#n{piM4}5H~(gI$`xdy{gY#f4qPu_F@$Fiy5LeCW;8joiYoGE;klX? z@G?6(yBMdZ1+m%a-Fv5Ng7EIb3|-$5Jf1n>X?<}b^418OFzgrU-{04nrF;Jr=E{mt z%i!&p6@PAMDBLIf`mZfBy|4>auh6hPQ=cy!crws)$2}!PzXV_0lW+K$br>{QZ&~(w zTI=$9?PwV3zWH37Ux}KCWy_Ij9^!9PiB&nVZcDP*!Qig2vQRQbevMF};kNiNdxgfH zy>z+(%JRL=|!rYq%DS>DFI6%`@{g-y~H8a)-hdI;rBpQsSM1ld`w1ets_|PBIf)Ig80UJH)&z ztw$9E9(Pr-@~#uxZh4uv_2VDnAa98jeebTf?yT+aulSl61>uGM+^ABFcyE-hxZ3En zSP9~d7|@gZw9Qwd)z;2lZxKp*blgk)uG$ie(BHCN=3nvMN61s$j@U8q=sjf7X~rr8 zDa(aA|45cPU;3cGf!YG=k()vQSu6})Ol$YGbM$qk$<7t?og*{kHRl67G88et^6e0- zAlY&+p;0|+T;AenZyt52#SvsXH*8Nwg>uGlIVIA!YbW)kf#hg$sdojJ&DOG^zKYuB z$MjM9Np9sse9tF7zV?Mags)$%+-u!$mM@^tgovx_`E-?IQiY#KOqu$I?_Rwx!@Jhj z;j+DO_>=PG3#GreXa?fbtKU^ixNVycnp7#B%}+G_$i8pUl0VFJm&O9Kx@ZC_v&*p`hrQPA-;xsYOnr+y zGnyF#M;<)ab4>+u-WT(HVkT>(HeS96=cOxK^-j$8qLlH~>Ae|oaIq)$lIPvDKhKWD zlA|1U({?Je$8Pl|{*doXW|$kFuX$>4x}sItr+F?`mkeecBwkK01w z?yGX4uHy0*#`GJWn|m7nd;D!PE}W;Urw4XGML0h1kqxpHw@L~RPZdn-UeKK0(w@RO zmsq+ij&56%OStTX@V9A(GWhD0KrYZk@rJ<9c zz#=1eYvsU;wug3v6TiJ6Z5oW5zCZ25^aRe1&XpdzmPcula}S~EbPUWI*{$~-UYcxi zXb{5pU&&BgKc`py7{0>dPHHY}ng6uk%myOV$!d1qU26TvxEe-9j)Q0&EjiA|C%?_gxg$yLBID7KY8W^=hq)`&n(6! zZ2|r(YMcY!?Nni0 z4z3Q}SWmA?I0jd?GQ!v)pBHjE+w9rs?}>RTMzcEGmixNKk;tfGL?-TiHc3%?QFwDb znhuez)Nb+@Uujl(ihcGFPs(c|rmWb`ikHc=W+RZ%gp8jAx8%pEN7 z5UrWa8-gr0kOCktdqCaS)lYO3{qP+fk$(RhXt?5Z{e=orGw6hUvBra8Cy}gUqqAfm z=5}6Tl1B8A-Jx4q6`eh9yLaye$HkQ+KK16w$A_!y#Yilbo<4^e0FN8}@$m%~^3R+a z0%voU_Fr4pjvQ}DQiMJ5HOujIuKoPSyxdmXn>Ta3y~h6JO@UB@7iO+z4gv}9`v!N~ z)$P0L_qM`r7$Mg}IFFZAQfAMY2I_3+TNcjupM0S)n2&#MZ`W4*t}?>I!+RB#|Gu(Rr@*GMlNtDK$#@4KM~w%>kxjm$iMf zF%j=EkO$CVVZqCq>|`qnwHJbh7`Ly&si?r^CXdIKU*wbDb;b1E%InRO8~5hv4~lzS zxN(mijN&z4S8uN5e>+ye>lZp#dc{g0{`SZ|WvPJs3fFYD_f3!%IVDnE)|`A|$t93M z(Rkq-i(@k{RstSN--RWO?(O-G2ZNZO7s{H!D{X6r_=c3eQmM@xbuA7~bbYfukQ^U< zZGHgi&=J;W59(q8%_P6AM%Gla@h4{d@S*E`WcJJQ%gAskv=x=G`PWC?XMe+kObGRX zpk+g}ScYG)?*}S}ScC9dhCcF=s>z0MB^FFvF7nK;DP>HH5dSXmU{2lcvQaLRC%DPI zj2Z}(nyRy$3p*u!0rjmx|D*BR;_vYL3yexvf1yQ>lb!G0D-*TSs>+Rn?{FSNFe2MC zo^I>`S2Ys&SmE)Rw_m&7(E6P0oyhh_WjxJ3eSU@A0!OS2=DUKjqJ2)>wh8Yc$U-oo zNy=M}GWF&MJv!I!JX<>+r``|ZpSbu?17qZ|xMVBsX}OF%I{DIgt**yDZ65E@xt2J2 zYsGoOHj=-d*KB54j>-pF804p03_J8#t$ng+v&YPvl))VdExoa>v0vcrA}BbuUu|~} zMt<@lQ~XB%U^-&Nczrl7o{= zcjbl@CIJB@c>sGXSqB1HGs}3k`AAX>Rg{LbbPqNSZ~NLO&0p3_U*h`6baiK@e&e;_rRs7Xt^y zljT`uw@}PHeg7hq8Y97>2b4A6UL7y^CTMoFN*>Gwz(7?({9#aVAJHIcH{1?NYb)&m zC-UH*sJGdtC+7H=E$`ITsUFwMA7MQcnb*l4j}3qfQ10Lxu>U6XBrGkQG&3(h{x&*? zLjNDO&N3>>u_ z=RN1kS!=$)V%D1AzV7S#@88}#I4>_AO^G)z?D;4luf%JqpT3qMCLt-`=v27;+Cy-X z!PEP-k^A)O2lKnO?o%Cb4L>ypG?DhS`1_xoq7eJ9e+qDL^}O7 zuq&H|05|3NtzTfe%t)IKi0V!cp{2?@#;aQ|yv66SQce(98wPvFU z?R1&v3*aa>+iIK^*4S#eC%6~(aPf=Y=Qx%Ae)j{;?Zs{;uj94H7%)oh7yU+d)u=jA zI>Jy(GL|*|knIriP?JN({FMY4hBZ)+|F*Y~Zn|3Tvisr&C_H}*+a!K9-WYynN3E0{ zcUOP8W|1f7DiCZ4z1W+ljsG!UOl2E(SL;(vq{X)d|4l(Zwl~*=k>!03^z<+X9Fb~V zkLKrGwAIhP=<0#5t->)XS=MGjlT%Y6mxsNv0h>Gj6{29h z$%vl+7I%&Rrw=6~;GbSkpI|~t$t$P(7tt(f!N;B)?#vtXEPs(2RQ7n z{RujZq>abZgOcm^ROHdyHnbLC23;>wmpFUJ4^ag#sp!av ztyGPO1Z>IwHP5=C1#$^#j|Y49bsad6^?^?dL&|}=t>#kS(__@x<_+D11fB0(DP|9k z>R}YQPy`I-v98nGE|7-!gWPP^^V;FrbZ^78^F3Ani7@-G(hk4h;as5z^6|Z2Ya@vE z&=x^NY;L0N!b3TK_IABeJKCpj3;+4k0+f-Es22ngGFEX-^yq6Ez7)Rac_Qt2yNO2E z+hc@88_YzsZWi7^NEgW#(}zuFTXSFrm3;ypnbJmh}bgWqUh+qvWw>8sy4ci(NG5 z$}wB}^<3S*;%h`7de; z4+#i`@cpK%Yp@ER?;J#?c_C4n7XR)do)?}En!E2eWUYz#-0QnzlH(6G zIa#c*q;L^T;5{3Sse7K5d8cvP<9jb~j|9>za^f=4e8Q9^g8egnc!J-%I{AlRO&B5L zelmQF&UsRP^zokAy!;#=>kP}tSq(VjQ*?^{*0eSl{X6cTD2#jk?t%at%*CyTj03*7 zrS=?cSHxVd<%dH!{@EiB;i%0NN1(WGBi&>ykS;L&`1u>i3ykF~PI`N99<)yuL}8LOD4vyhJoMhNmh#oCS+mL>Bne?Zaf(O34Oxy0=;L9 znvHI#f#mgtjX+Jo1ymeN#H?2#|$!LoCAO4bnR$`My+}O##TriM-d0-%iUcO?| zZzKi+dB}2wv{SKmdGwO#a@2@fm00Cdd5LzX1(__#5l6h!rHHq~bb^A7_{Ue*mqrKa zL2<5mTI3ZhfEFofAR6=G`~Pav{rfw3EBX*f5ZbmL^b7(_U!S&IL8RfiIThq6K}i5= zYRa!?3?cyfYPQz4$DAG9GWoq|R8?UE5x>I;oYw0)Ri%2DZaTKdLYz_GVPJPUp6>Fc zx}Zd&i4MwyzbkRj=Xd;?0b;)w&=jy_Ffv+WbL@`SOj!-M=Pf?K{~i`wgvV}orcM)t zLe1vLWhN9C##WtO%{HI<>vatloy{y-1kViKrm?O!#_)#ThYK1)*>Ia1(lFUwa*5O- z=G@3)T()f|`$2IUxD@5+Q029h<9UXp%I~DPrHN!7j$ucoEuqL7>gS^#XN0cins|T| z{6C`UDLbjSi=;rDIO*{n;{hSDYYQqa@UOWc#UH9PC{14l#1cIIcmRs__10YB;}iJP zH}xv2&}jeYat0B_t=RY-g6?l4_TGpWFu`ev8bb>XxHeW^DgL3oTT zOq{dj5BW|yDoF=Dh3w2;Pjjzp$Nw)9oeOLjnl_%4hU2~7KMcvCe0K}5jXT)PHu#ib z@%{H;$koeH+kb+g9%D)54?3if%dO>gnk=oB@qujG$ujS$n$zi??4w}p{mxyxd+f(H zCOywJQ=Q?F&Y1STqtlbXsw!FI19}u+pp!jJ7AX*aUKeLpE-RtrOT*1gUb0E&b=&vI z4gGdMt08b$@1=b4@+Bn=Z`a<|=D9#m{=(v-!{#1i)kxAYfbqv|06Q58*-%=esYt-n zY}j!ri)-sX9nql4=3C*lSw9VMV~?g%5Dgs`(Yu`BSMc!L;Zuw))^FxF!&^Iq%=z?=cQdZ9UqM_XPiwmpP&9gBW4kE;%?W_pTe^$lS}VkNHg zuA^7SYrO5JT#rhL_I_-AnqnPZf4^-~xBo`wvq)0{l}V74rpk5Z=qTIz#^#r{MS-!3 z3I-N4Wt;yj@d*MGi?wJ%MV`q8jg9?{Qad)y>FMSDB480RJXqy=xcjE)b3H-lPMROK z9Zwap**#Uo; zFz~dc&TMcbdQ&nq4Q|0fj`z$E)O`40VY462&YN)X=f4AS>eRHG(q;y49Aq5j=lQ>X zM@L7iT;p3-HrK*d25?uF(MX|d5ok|6NuovFoFZO02(nrEGUJAk9{FfL6=wJfzwqyj zb*aD?X_)*(nwQz>Wo`HT-TVF$D_bSa*|(FEQ`rRt@Ww(bQZh0~a*qR5ZKT%c+}zyb zmLCtJ2^7+T$lV8VL&K*)KuDRK#PQ-q=DpwT{mjUHT?Bii3|UZ23|r{CPkJhJHny;R z1v!%7rIu@|sI1=l9WRD1?Y4W+daCdYP}l<8>!N@C`e?G19?wl4?LrMcVE$4)m|p?` z>;k~bP~b(N-||dJX@V7blwj3AJlxTA*kXB}5s;A6S>fD=9?$(EGBQ%N=M7k=J6M}% zfOj_IK+P`ifADXX|Ki`u41*T#H8pPJ0-vsV{-5z?ZF~I%1qHOth!jwVCncil}NkgOVKgMNg%P|*hLYK!XHxGBS^X?@+W-lK*P0*B~^?Jg`VE^jXb^E&&% zR6hN5RUpq&_PBRzch_9OMTqqF;`E54hq34X!9MS{x}AK^&X!iYLNRIRFr~!3{H^8W zNTTa!CHEHFFv^;*p9$aGQiBK`N#NoCd()cbQAD)fi&XgBOR0SR{4Rre_)kHTt82*I z+=}D641ZV|ejnx=9}O@B`g*iieJY$>kT*QL&iw0Dnm0DE539d?ae0N3U0T~Hu}Dq? z3Ig(uOaW}U9xGc)==@i8$L!y>PN(C^Lkv|?>$cMp#+KkK7qn!rF< zZbOl*Sr@|cB`i9c0){Ee-KHQZDw=(z5U>zE+=e_o1(F4G_XN0{W327IHX>VZh>2-a zc$#?}Dvoh~<(Ia8MtoOP90oqx1VOAw?F;$@ z1R9dkZ~zkxVZ>9hcX0iFmeFER-`JR9!*295A~foalBB#-lU5JY^OXQ9`KwT_8)x<& zD~_%4cN@XG&}d`h)vmXHEf70ysnB*WbBo0GO~IXwuo?K03%tNByCpNRFoF_w^!)1j5Ec%nNoiJ7^LEZ2K{kvS>KbF(;q(_1zY$ z25%0ww(K(JDP=+^n6es11cvC$8qj$*0yh4BR{UqOG}BK$04hteG|F8wDLl#ON8AN+ za%!wK@*x*s?y)RuTRr#zdt|iZY3Vc;>UG#ea=EwwbZP0Uay2 zYEgexalq=eYoZFY#I_R$^%}`Zr*R$kYXJvKj6%Uzz1KB01!uMU>IDy-%_bCqBgVcv zOJ^UVh859d(F611sY}zD*k^1PN5kuHn)=yS{dHqC7tkMmxu#o`bQk^NjzAuZK_D2e z>MkRk`fi2*>>)#<;Fhgn^0(+0F?|4i&S9SC?GN)$?Ps@UfBIRh*{*xtbj3` z!DGq7K6JLv{ue%7?9+Q%6>zknkwy7_w|J)$Va<~(l;2mq!J^mv4Ue8P=DRmJ0PDxZ zagYXSu;kU(U((@J?jao?n%oY%R`PZe*MQV5!1l!G}(UJ6Sa0ABya>L$jXL)UC^qq{*cJ3AMiMIQ8ddG{kvU}%x7E+ zZ4Z*kOQhs{^unEX21{CZtN{#+*!T<7VE?K}OU(HSq+H6P$8SGtbYGws({>oj)ZBcj zTG|r}%=A2FToV!#dolR`h`f%cQ-EJO;H?}|d`nFHoGu6*%)jbSm$vQUTS5ym15B+F z#25Sf%#lX`KQde68647Q*}HmA7nNmwF7)cDpPys%pc?&PtK0H;0S3;bsQB>+(6%0% z&Na!Uxd1G+ushWA~ z5Kz_c(O9sc!}%$2GLYO(aM%l32My?{s-hPvrBPD7MQ=;43yvo35OZP%60W9&bZ{0q z<|CRHD3JbA2mn9o|DbHLZatjQQN^fMnBo~rbrE_aY;P|zK5;Q%y^9)}l|>;HNosu1 zX9BvYNinCr5ywB@yCUAI8V(zCn=Xc;v5!PxrCN~TMz!$VkB%lP;)p5{^d=yc^i_!idqB1tJ6zkevpEHR*uo5e;Vw7%zGVrBi05 z@{bD=sDa4Hm~+J3S%3s++~@&tsY-UoZi-QVPYB48?yzO(`1d3Fq$cH>cl6QE#coET zeHKIKu||>0`7W>EF-{xn zcJT*@8)DO$m}Ybim(@|vOcM1K7PEe-*U`C~HxR-Ib|vz990Av>sxz1Z2D(^x>$SQV z+kd?ZX$VP46L6tIM$k&9s)Bj^nPKEYlJ|{U+LB)N!O$~^3-C2|G&h7R1LzTsu-9Rs z7kzP=x3@RMpx1GGbMx!J{ea8$iIQpW2=1PAMscHaD`**1&_i~c>1ACzGAbm5yDf$C z5YIKt>g!kgUO_viG{8ny&|FEXkj_RFPbUviwvQpCA4$8Z_U<82E&QGdY??!0*ZZDc zgUE7zgV#QB`2HRFq_qu?OI22IjCTmd*kt9lVFwyt=C^<#!oDbD2n230^vi|AVSiUw z08Zz~Gfcbf5|jC0kz~N2=kaF2e;bLe=xLks_@}(OGr-XIxsm$z@Ksa8IMdzB6eZTm4u68-B6Rd}$)=o}>czy5FL)vVuxl~50fX1&^ zNe;(G*uxnj7_3T4^2uykFGL#}@K+WicAUx`;a@>@>B1R|h<*l!oGlnoX^!9uPb%kZ zb|2#yOsNs=Vgtl-zos;S?&vI&ah^4!QV4JNO%-5knwapUng2E zin{7N;p$k&$vRn(BiOk57m=5Tx1C}CXfwmYj^z*Tb_^5C%QIH7=zkhHFE|_re-Brq zM{xmWbL_qANM$EIa2v-V1(<0oPpVKhqaAytFP><1D)h_{=q{Pm+Wa_lcTB(v8&tyP zP>V^mI56~ z+8PZVmFnH7`a#tDsXwj%&?hhAV(Z=~C;dAfxi=K9^!Wrl1qZ1rsFJdEEOs_^$b;De z*xQ~+oB!@<6K3ep^UDNy7f4{;bJ*uJVM|j1j`?5qjg4>C_6OG}==sM2$oJn?KOM#I zy|*mislz7Y2asQY&&P%SnaE#p)lSV8EhM6g*G6J6Wg>Y~zSs8Tw@UyW*E6%{8$ZT1 z+TQG0O?c|dIfYm6Xnqa|kjP)s|MKNsJcH%@4(13CyF>Y`l)I1Dp}k}E@899_zw&ew z&$$%AgAFl3>FG>h{LS$F(|=P`lj<4+CW7~cwY6WS-KL`RdjHVf8#l#c+<>C6y=s}3 zJe!l?6Z#Ss^c*ngtn1U^WaO1TkeQ(O1oN^{w+Nr%d^>G*S$@8l|A0rXULlL<&MYz z`>3c_lok)(F*m;<`oekeQ&>TaQUbhho<;b#@*Ki=0A_h9+o$k1Hm|UgP0fM5Cc9}le)J}>OsCs zCKlzY04MWpAOFq$nXq2r;)U|)+v>oiyoPZ+jiFs@C6@y9hXjERFZIX2f?g@$-2*ZQ zm5I!gbe#-k_e!88t3{sx8szbot(zb~kTgbBX+@tzvG((d^CyAs6d#`gvR4lb+j-`K z=sW!ba;BB<>0=^EctAOnIYu)kS6U6}mT_H)vSoACdCI)fO)A|L&J$aM@vl6>@tN@E z>FHa?<%H-N9hFDhbI35QNPs+fLtqO|w#MShqsUc=D)N_-k1w{iybte37-5VG2jTTa zxA62L`ooC=vCn3g)?dgt&S6sPlRWX!#Ljz8RA#%ti>MJSl*rnkcqvBy?dDQ?jG(CM z_R9TE!TooS>mnPg)u>RzaYW|$<0@%h@ydY_ zarhqx#8Hc^3d?-Fmz(Q%sianIj|s&IiXh*aRn|ux=_5~0Oz%vObhGeWgamkd^z9oW zkfok->^A9YQUryF77+0Z*X2o+>GWs)-1 z5FnKdJO{+h<;kM8@$G_`fq?;OOeFZX^Vx}_JRzpY(vw$X?QA*noysY5b6Z^o(q#^( zGI0CUQ%#ve8^NscO4Z}xEla)C095*cCx5y^y_uLLD4

~h;HRX%di}VU&al?!+fm% zl8h1!1j_0794sgC;4uc&sdj%2f z7#UIr*?-=qyc)i%Nax@}0%u!!y(DMB_(6&=sqHuMM|vtRWJeq!a1DFm-H7$W5;U$6 zd3BR&(xi)6EW1YD5sne8fsg7Y`GO+g0F`YyLt13WMFwK!k_3GyIT%yF*`giSTJzy2 zVcFf?9W5zTL7~L`avl0P?c~pmina#zOFQy3jIsbD!$fcLq!AXZgJ$zn&8zI}^PRoD1448d(bt9Fn2L&y5N|5|hL-Z#E-s)N&I7{!OWb14&>gB&ZfSw^;SgYn=E=vuV>1 z)Nh)}&vH&7rJSAd%}7v@U%M7S(n@_a_b7fSW{G}wO`IhK@-XPgkLDf<7phTy#aDw(|A0Zh?AT^e(5G~XTp`Xw@KUL{qp+Y zn}N^98(|+bE8TZ;o`2rVtzpg|MY0hYenKBLd#y;_#kfaMY zn`$;YKo`E6kbMe*YjAHD{BCI(`gd1^1za|N=(BtnsBjJh_$+^-X>EgI5GxEcx%Gjo zI35`3AB?dRnYDv}5EQs^2Cd}A5NE^lXa154;?dtS>Lvod=LO`Ug%jd_rxHAFo{_bd zI{XAyy}G#?5I#9M=`uDD6Uz+w#bA(YfACfI*rlqMF>duJE;_o-bg9&qDmQl&{xawj zSWxAU$t<3Dtg?w@H6im( zPPi}Q+l5DAgN5q!8p8Sw%*MX(t}NrKZ=Z<=sQio{Ed+j@Gj@gCf?&=Pg$XcTt_j;# zGpK1^O+~RvF2lX0B_&h5Bk`5Oz*NU|>38ynu0KZ76X(Lggkw*D9gIUpdsXExaWhV} z%k8Z#`ir8123c`gr=_^UN}cANhEzw1_O$^26{W>RAxLw#GBY=?Q^-YhRdwii0i43tRWpTLA-f#C9f+v!!GkFd4MvP-183?z(6UM zMX19C#xevYKkbsSOX2?ab(hA3xvXcVN2$3@#lG*h_B%=bbuEn@ibk=k7-v+_&MNp4doGo zO`5h=m11CEbVlrO77XW0PYXa%*IM#qDt`U*7)dH7V4S|W4UgU+G9}7!>#itlKGFos zR@rX3kn=nL5xm$MeRqoLgnUtAGHd}q%apbo-^?|(K6AZ5?sp76=y74D4iwn zIsi`Ha5)qSsU>t_U<)-duvcG!moKl9zC@IW1xsp6q`NQVx_D-Ou3|knx(U9xptb&7 zVFOvw*L4#3a{b3M{PF7T7@JHl=1EhSsam|cL=l6OY)@L3)y6A zi~ME6`^^^{T`Q;PdT+n$xjJ;%9a4`+_fkK21ec?EVOvW>ww6#w2h>xk3N#F&hXiOU<5w;x>c z1Ml-+hZ-vV6)TGtvMN>1=J!R2gG^1#$O#Ukz=|rfr{@A?D=imGxDt#5ltD5aG_yNi zscG6YOAS%tpfGW07Jug`J2+}F`Qm~k(Nh~OZy0tyJzc7+Ik8I#_x%25V|6F?<<}0c zNHiRR_Q}W*l9ei6z;1DTne4`=VV2Fw46%3|RA^em+zYpA1Qzk6rRmvykgsd`{f?m3 z?jUQtXqy9DFP1BLw?>w#jI+ogv|)~mu^x)$8Xi6TilX>A9(Z`Z6OpYQC-ke&z*+-u zQ^>N~Ka5~2%dY1cb8cHs9$T2F}i*R02Z>5YsGCHi;Zu(orF1Uv=@Ow!W@Ip#F1@~t3YDdDG{9t&(}YTaW7r=?2qdnN!?r_ z8IRbPaVnEx*xRWBf=6J&7c6~WVlcK8xl(f%i{aXjEM;!c}mzuUXJnAe@nC zo2~k==BU+MZD(waqG@|b!8%57H@E_0vEk5+)qb4tCc7Hha zsqk1(@Ohk;&yw+hqE$FEd3xNB3zm}CZa>m(&e7@qUxah%f3nmXtcZz8)>&Kjo$=?c zuCDyWBZfIucPzs?+J4i1w)4&C^2Z$MQ%=OBkd2i#H^!3~EY)fKEbi!Y4wnYv2I|lx z_|=r%iR@bLei8O!7aS6Z;S!`R|KJ4EkD<#R3Pw>;Rf_r%K{##pOMaG=#J6GgjX~9W zmspi0KjHf=54~hRNTZ^}af{wPS1`Lx z2*Q!h9z+l*&H+<0yLT04^-x^hGeg`=m~#p=-+?@C^0p*lA1k1UN)U)V2!fFm%4@mg zx5$B!+TAz2uGbiXBfJo})N-y3dJ{#+jtgi4v7XL#qiVCoL{5+esemmFfjTWAHUuWz zlbM-rNkO>2UI)*-(n~wg5)AtAjS@yin!$enI_ACzF%7vWdOx;%=TAOtab!AI7edHf z-vAA%kDM{UdoiB&xqoy8H!~I5JIcZR?0LXz>r6Qh_I%WV;Uj(^>WC(@is5yhN?O$< zR78`dJ6|CtWtD)%L{@0|;DBx;ZE6dB-3l1ExIQ7r$*W>weobLnQm77*DRWNlJz zEz9>pgp?pm8g}+_B2C}@Sz7h3X(;GRXdrfCWxD8t#mDCb{@&ylo)7aDtvJ_F1H5Hg zA<5hBn2Pe^KZ~Mm1Fup6=n;QVpXv}$DREX88aQ!NJy-k-s6@?D0eye)f|kn1G_}fH ziP~%QEt6igd{=qPQ`E8Hw5Gl9>kV{NCqZNnhh^mZJ-olDrQkPVC1IHt zydc}vz#0;%X5PfB5S~m92Y{p|ufY<#Y=h>H+!_0}2 z#Cb1+AR%teRw(#?J8N}$;0F<=Q`v&&i#!}2#U0!67tTsb5v+Wn6x3&GWq-wVNrI57)ovn$9bI&1-$yfVi_w-74uI7lp zjr{EEF8CFJLD;zLT9#TxXm#W-OieX5G9f9Rov({%Xn0>defrc0)7r8k@S#8^B!{o1 zrP*}aeg{juF(d@-N>0f>Ln4Unb_J+vX6J&|585Jt@$F`E;?ju^EgfBGNPwwdM`(OX zPr2s^wz|4{uTVk#fo=BNh@1j?#blaX;6Q(++!dl58jjY2P!a5UU5h}pRz?|(K6!Y{ z`2Lhe%j_hxgMlEv3^Q78<&9yzrtiZOw)|pd%>96GIbVPO`b9f(d-u$Q3^}>9q~GPc zV2fc)Nyocw zSH>7E15TLO=x>e8njkbDRvXX+D|u z=G-U90Gy@x81b&o1z&7MbJbBG!DzWKc-1qUfFD_j+I9`Rw=5rGmz+5Od(kF1m-q;wd8Su`ToHppeJmbQYANEE-){{96lqTfuR{ z(@r*3;jk@~8rVcFf|y*UBgZ603I6!kTw&!FeT*MulJYn z42YBq40MGax0(yDP^Xg%SM!xqjllH09e(JK5rt19T}}#1j%QJM`2|AE&l3x|tF><| zojKfXSn|DRYK|RV0oE)O@-IqdE77YfXVG#^8Rr6NEwOh21>s3)TqN+S%a`$qmV_t) zmb2zR_8sAsA7(Fm<2l5Ne#@3nt4@@gok;-ukTtS_ZY-tciby4+zmr5q>~|RdbKwK8 zKmf>ye1x7$`GE63n#{HWv#{{f(_DB;IrxM<;$2I-iaQnJo=&UzZeO5$i*9TI@T_;e z*Z4dQo$VM@Rj;vFag+yS?UWx;YojXxtKQ+eHhP_ON~y5OXfL@AHC41OTDo_iF|DS+ zPqi+lh_L(BXyOtQsJ3Ls)l)8=#ix^m0 zd2V_Wku|TLFpw@kxep4Tqq@)g#>{RQg0^n4oX7T!(+`Qk5-Q5h%1QtN#a2dc4c=jr zaEnXIG}|de;`=S$!Db*U#K`y#8Di=k6ecb%QUX@JnDUl&SHwNT<*KzwY2FWB1bqhw zn_U!yH#W^vED%zSp)IMCNwu84?ANUt_Le~>?==$>lj=#xvyLW^q$BJsKXiiK`!AFF z=TF*Q)CyQ=nAIWI43pXPZ0H;eUD+}Lt5n18*sos2r%Zzf;K|JjisbJQ%#dD-10^xB z)<7JxSbkN^cE6{7sxbG+L2Yyi3NYrDL$f`dazq?x1RoQh1yu%PT0rU;kIA+eR2DUbLTLJ~Gb>y&@-B;UK$Dtd@3JApp zygZsl3HQSZ`}OZ@gGloTSnXsXm&!Q5wBSN^nkW$XoVjxA+KhWROOfY2f(0^B;#2+| z{@7w~+mL$7r_w;%&7VF&6QO~d0ekl(`MKE^^B)a3N%B6)UR)P(h?hY3PFE8nvbL#Q zUWKhU<|k>h0pe)RM5{tDO}NmDR~Dg=$~Coa`YTY>{Jy++Ou)cs^>tBvJPb3wkyGR$ zZXE=fnFCCP@yjMi?P-cZIdnxWm{Qhob!O1`l3_>*P~iMJ%lr6Pfm~Xk)60~%_=3BQ z`k6^B#wFs6Vor^LcFT_i+J7d#zQt*2NYLY@KVCFB|hF!s4K{Z+6HV@SK0u`Rj z1hLD%-t4aP6m4c0gZ{iUn2vw~qcBjFcP1LMT~; zH5#<5Mo{o*Su+9SLkR_&5BP)HRCucvKa89sP%!X~1@kDAh2KxukM9BYUDG(yedB8O z;tw`n9tU|?<7UClpuf-OG=A80*Oo73ycW^i6Nnhj`7e$e=18C3?0x5JbXN@g=9HSj zr07?1wNI1V?d_e4wx_~R7gl8*X?C0-HJ~?}3cMo9)A`=^${z3Jgvlru>ST$Q7FgDL z{S08rnph%UVCg-&_`P+X3*X&#V&@RH?Ai;#X|GVOLI9LANA+XBJyA1&0*Jn#$E;yD z=~V`KsAPZe{>OBTeR@@^)TQid+>1`U!f;J&b}ljEdV2K1b6W7&`fSIXBGf(F=vk(; zswxKzQxR4cO;MBQ>V*5IRZYlhMJmH>6Av72G|E8-D8;B#}jr? zo7RlinU^N8Iiwv?pUk_rP@^kO_xB`Xbe^OvSiE?xhqEhT86VnH_rqXkn5P*^v4IteRZ6)@v?5fX zF>n&5nl9w_vS-lFy{%q3LPE(2oa}vm)dLi1vD#uzDX==}^bj2bDZAlp(LIFR*7brK zo)`PL(cduPgXTUVP}kgCBid6103JMIyFpYPTBY+s<4 z3wp0N0N@=*g7pb}VnX{xIM`MA7zn}1T&>t?i0x3FzNpx|G9mm2U=foSL@@`jon)wvm>k+-x zbs}xZ3op^%zZ8FQ+}GC$$5@dCaOX_DMw9REO5Qz!a#VU;J$9BhA9`9C&Y&9Z$>x+( zog|j+j+QzL(uglO9P_~m* zf`Kkkp~UC)WkE$_BW%C0MSVU9^#b7!?F*$JD)gyQs8`;@*`(gp@Hm`NPCmy%#(B#M zM=#|IiyayGUP`&=oQl68{(acevh{DnKPF!k^COm&K{G{%>N7aYvYtwSLm?}H`cxDi z^LJjYzpz(N?|&9q!=wI{3N>ATQ{2sdO-W6dZ(~Fldpsm4-d!kak zwJ^1gD_%KfV$#e1CKpJn4O;okN9fBZq9iVGLP#pQ^~cx8kQ_K2vLJeGO)av;BE^Xi z+8+OMUPlHz+?@L|`#%>qq(pHCbE{rL*jE53dhml6V+SK~#GTi36Sl2Yxb-o2_nHHY z1R#r^n zc>$Ls#AIvSe zymOG+rp?9Tl8DSnmfi(}dd{C2S0!jPpf5leW{9w#jh2h55FH*zZoppx5s-W)UlLh8 z)&xAy6V`=mQ>F%t?8k*D z?ELif=+709@iDM~10$gK;si1^(gOt)pi%Lm@fV6aubP0%bH;8mg4vlnocQbU)%)Z9QXTR_N z+iUOjxn?Z~b;&UIJ=b+!=Xo5*kCSe_Zske8=gIRHJKOtS%}teKJos-oSJ=B3lk00= zv}U>`GaErabSeJIwN;DxXqcH`h8Qy6p1ql4MS7-kZtQ(kleAyNsT{5x%kCcPbl-u~ zUZ>C*&(PQ?&IPU!YuONC+!sru?aW{v%{956HDmABEOJMhKh1MB*q(0tM$pX*kL#sw z#c%+#zMs>G4Gz}LD2WJTG^r}~?7Y%SOZiJg= zkC_tpXI$S)VHg|dO;#}$luuGvod2Qg_`#aF!?W&v;Mq$V=Lx?37Yr9VW{o zy}L`rejV>vwq^udvf8S)&vsYec!!I8&pV=8qn}{PI)k^U6TE5r^IMZgxEt4Q_`_ig z3)e63o2zqr+NF3c4~KbWzAde-0$z{tNHkAs;qjcrD&QUkNZ)&P9-s^GE=`?n*jegC z`#i9MK!h;XPt%rz=sSkd6$V}@RG)U$&esj6X3EKu`6C!kDWl0w+a9%fRiaf+CdgkU zYq#=WL{@Uj7|ijSX0!(g`C53KDuz*rP*=WD2r#HEU|gIJVSCY5o?9kIwGlZ%P`gDX z7squEa1dg!y~;a6DfuzJet3zuk~q-O*?uf2eMM^VN|3hA1C)X7^byhUjFFM>fUjQ( zU5R(4&#sc%j8#A;4B8q{v0ywbCo1lHvzP+UXoh-vgFnD^v--Jjr8@xB8o1un22@|W zUZVVA%*P&4F$rdH2bg$Ay8~9N3xT2{9+-@zXC7~9-FrMtFtZhfx7b?+@fX{~bQNrg zY<}nDqR?McyQgr~9ZNV7Ab^0-4#_hPru$zsWeA(qosm?M;;3B;U1izt20 z<=8TLrzn)(X*qY>Mq4Lq&;8)-S6EjXuJTUjaq+OTv;OYxXcS;b(F`v|gQs@Q?WQ(H z&B<$OCP>UoP&mnRofqkx1vqWE84r_d;)V*K8a<2U+ja{sKk>L3ou7R}KucZr7Xdci67dHmsIg6o1z) zR&4A8)E4OokD?=0M&1u)8VZ@%f`Xr$Y-sGatJ{=xwyx}W3&+=&PC-0?Z=Z# zA;Z%%WnDj?8ycl_gwtSx5e@gEoAEi0PmOcH`cEl>`uz$;mSkD0rq!-w`&F%8*XG{3 zZH54#4Oh@Fj(XD8u*eBKR{pP#LRHbUVEDUhUi0mAiyNuI(&i@;lh3TKPgaBJd3JLf zyeK~~+%L+_!hA;bb&|11iq(phD~{(G`pY1AGq4j9ed^8*p=5zXeE+^;rD$D_0F7|vNJ?OM zWu-LR8$@sAV72?jw@lRmxn+NSpd?So$r`yJj@^^#c{ z*C1#wY0OQcqodMG565oR6tcPF;>(uI%)^R3-DtbIyW7gJYsQ52W*h5_XI&Tvsr0mmBXWWe z7(B_z+7Bp`({fF}_VMmwxiHMuJLWi@q|)iStm zR_N&Za366*n44nP)f?g~?bw+4#ut^dcCHDZGzQ1a z$o-IXYkPrQ>n7$n4`1vRlZC1Z=8J^>YqNf9(ILCIVoq#zbz_@r@pk!ikyCka!Vbh< zrVCQF@!PFumv?-`ug`@+hXX)rIc-N5yLGw45ABzn{G2EQI=^a~8MMwzOSTwY)!QC&OvqCD)Tu zi$$kE*#bPwE?U30t`-kqQK~v$%E}17NB9Na*dN)H!@sg{+F%EMwxtkWou)+^%xAkH zxA|*z#r7WyCT;|W&3@j$_QZ%BIn8HO>@PmXdQ(u=uuF>D%K4ST7ZKzx=3Vs10na7q zZuJ1R7^?c+U#|*+D_L`ye)*l%Bi5Z#qSu|b$=7*vc{_NjcE6_JSR^)VEAW$UV;vX{ zZw3BTjOFIXg4B;-w#5HD5OtxM>$%}^pTK*lj`lk9@l|w$o@+B4Fg};%m#Sc|$5}2_tVbkWGGy@)NKN`LHxEO@#3_Tm9Mt4rLr<}eJuvjp zO4%G*slu3&Inf6yJxvkaFt_AAZmVk6ll&KKx**R~M1{6IxdIZKQVt2|fKmp85y*EoirxG5G?Xv0AwaTy$*-U z(p^z-!~Ws1`q0-HERB8r zq{}f?`d^rpW7g`##^f7NR3uY84qi=~WZY%Y+H-ld@dNddiS!8i^AQ3wf>(!+1rgQN z)v_e>D=UPLQeULlKWy<2Y206=`4$o3>+O#l%T-9iPc1)ob%psGMsnwJ;s7VHN04^$ zC&^gQV(AZWx&AJexMsYs>A$bcj?&NQKC?OmUO-@M0e9 z_wHBU3kk{Aw&K~nB4tqBF=h+#_9m!=obUtR!2SM}@9pI!n)$FV0I4;|`S^<9_=o-4 zj_Ia|&4YoeQV{7TFdgn#o?s1$PU~tOD?<5|TPGD?X zei)ya_%bsS{NVff@n!@_LBFxNc?IL+Z|NR1{HID)83WO}t za0qsl!&zyb^5NhW%m=Jw5+2a@*={Fh7qiVD-(-T>C3l?LL}boJdz?R>nVsoHP!TJ- zf_=9w<8hT@GS|JF{*U_e9tu1)^)C&6FBsa^HSxKga)atdFkn;5vRBB zo&1Zg0oHN~E^*u95K*B7)ethhU(B-cc~x>pk5koxZ%N_OJDk;#Q}8A0W1mc?FB_ni zP+xo6Wx~qhAp3GRgFG~-eOtEcpgCRh>ld{E;G1+ZK7W2gtfacO zx>K}&#kx3vek0Ird@t$O=V!7;WiZWkcFcFu0_lSuUO&(;a$| z+IejH%l$05Nco-px{NBY5>8ZE;oZ4&Bw^BK0QQCd!Et5K^WLP0PdVFxlY;j9Qo%QZ zFTe_2&0i(Pq5CgHH^@CB%>%?AUNVqwsiKOHNX$-etdl%D`5$GBltBC-!LIF8&45`% zwObv-i~If)u=z@f!Ewp?mNii(Um0ymC({k3I$v|9zVzHKxos;~^sYpTXR;NQ$(C#+@WR@}o4wXlA35B#gwMbx4qM`7|i z9sTCnFRT||2++~9VabU3Q3K$lQ)3OjLRa9|faY&aVv8^so|Kf-!FM7eA|N$w`S$(0 z%Pa>Mzl7i(|BDyuXSz-O{fd=}iEQXVlS}>dDPf<&c+MaF3#u-xruY*k_4n&Y54l=z zZ=BT11mqvubXSs1`E$jMgZ@E$YaX4qS18(`YrsaoM#&U)R7t_>#R_>hi!h~um{`8s z!NdiP?NXE-K}Kfc#f~(Ki}-4#hUh$k$MoO*G8^04h`^B`7O280HK6!m=1z46ljr*c zw6g|4HhV`^Rdt*4%PT$nSOH^vp4j8-d&6MJGes`4S!4J2Edw71vH#CoIKdzQBYz8g zkErX?M14%Km~zpm1dW~oi8vCQ%oERd)#Qk}E1#R`xZ<9Wkr9Le*A;K*%c>50A8lA9{9i zaY+e26{HRw2n9*CU3aU%;o|Y4Z+*S{)7gK6x=12;a8a}ouZ!SSTQ@gqTKV{(zYxF& z*%Q6JS{^M*ZU6VQu8684q+)&w_~b`=_{7BU{(R0@FuzA30)dQ-jcGtntiTFc>^AvH z@Zx+}t-VN!-H$wm-t=1 z9Tjx>X>XB~T)?As#WjR`wZ5fJ9SBVu;46;o#;3Q~Er+a~t7dk&XIcGdb7E~>rN912 zthUfJsbrz=sd~C~8+I}^if6Fo(@9_V#(K7XtzKU`rh^JzIm2jmBxh`tvdxACq?yN# zC<-&lbK)|txEETRWNwLx;cL6z^g6~n1G&I$J2vn`Iqn4}5u@%iA$-}+}i#eK+l zdQYN$FMD=ENykbn;}|2&l0}X3fsgw=qgawX-0?R$b@mk`?+SSD^10kyd)7t-hB%`< zPm`O!Db0s! zaT}RzNwALgNPWU4BV$Uh7FgWf4ar{1rW9)FW$xAY2S$5x@L6x3XT5QC3X?Q4@-@f1 zcaI<;D-}&;htg7VHdT%CW@6WMLN5B2w?iCPPbqpE_;rD`VZc zBkG{=urS{7t?tXmv~sab>+a|KO`!%3AGoiIxWEQDc@i*;l$9px3So4fm>uJ$`ZJ$OnA{8sQoma^>2kL<2%g5b#WkZerk zla{T}hyGnTDM6unx#huOeUe=XH#fI~GsqPykP;DqR6J27-TR31FpsbES95sz3^o0; z+QV@Dk>6ZQTBUEAfA%B`K@DUcz37Qwa(U0B<1NK290;7?G}M4nIQAwKa>|NS{aV6x ze0g!PI6Z$!iMV8iP<#DRAXN)YJ$w7i{%LQ)b6V6IOu*$dv`r`mf5BJTG?{O`a0j(s zjDLHdd*Kh6RLEKv39b^kDzH8}I4S5ViE+XUBMg8Xwq97!j?0|!NN8$kZEfO+ zi+cj;PZsjMRG^0JA%72waReI7CHjKYMPMp&Cb@ajlP^9zG^mTitZUoG?z2~pe~et{ z+~wd$bGTDkzy<~#t<0|lRUIU=LgMe2`pvokKk&P_SiW@*Uc29w#~Q#ZcIQ@g z$So?=?tyyjkXekP6wUCURasRU79QN9x=U)rei-0sTyNm*w}@P!MMg&g;jGcT+TdFW z3G@d9e!9m$*vGgZlKk{8bCK-nUdTQG zGYY|Pe3VPD9d@?&udU9$@~x^Wa%#WsR77HsQK}D?T<=xH0xeFHx9SIlRQ_Xm@F*tjE)LEoX?B z4=3pByRsNiQHc1Y(HwrZ1uJsW#)KO7;(&_R1Ni;>%f|3A_b%3V>0Gr}Pl8h}PhqlZ zBPsAJ#a1;VZ)!#>Dyq0AWDg&PLND?lt)6JE0=Lxxd4{*Z;CJvma*isviieKQ7)AO@ zu&NI#0)F=C&8S^hMPxY1?uK@CT^%8nJJ@reSAOPnpmh+VRPgFmlRl1s!wy!m>p>ju zD+;kXlq4V^=xex5W|=X^K{pbPet5{zd3tg5&;ZphhiPiW+noP?a@}Q~vr$lcea7lR z!!nscow2{UPPMqInp|ahGuds)6xE{g^}Se952fZjT&9BG6ZI0^kAWQJP@JuSStk%# z`1HtS>m3aDS1zs23$eDpbWzESsaF(+FVt+qdZ5=wkjJ1tf*`{ngUZn)7-t=j({T>X zb5x?X{cIW?kO}i(&BOc7*guVs*jQ+*ztcP1MvKS+Q0nLPo9LLB%kv0$4u^n%(T4SP zCm!+^SY;1H01pUo1YA4JuCJhQ22gt(L_iQ^lO4MYLX)3fG+f-JBM6qy5$C1ywKotW|jhDJVmg(+bbW zug%GsO)4y_T{m)lc9XY&4UDvna?y~)uOx3rlFu>=@^62w=5#0H_^Yx9r4y|Y7D7>( z2VohHSJ&UF+gY;wYh=XniG6I0a6`k7g4}T7X#Es6s`|S60nh-~+uI>uduD--106|S zEO$xjTt+(}4t8g|W$$!qO)w`o;RBP9Xqm0+Lu5V1B#BtGmP>iJ@m*&Ap$1?;^gyD&U2>jZBT*QWu#xJb7)F=;tjCgkD$_So4 zQx(`k0(r~Rc$tGy{kqdjD`llv)@_U1=)Mb^ue95JAi2o01~pakdrA z7w5c_ZHDUqgSmF3ZP#`78^`3FRJBT3rWG)&pR_1W;Xeg;a zhk-z&e@diB(dE|ak&dRSv3l+VGYd=K`|17yiLD^_#^PRKVc{Q2S7Ea)4n!H=gVByf zy9}#)rk049*l+oeWF#gfKFjyb%S*6lwA7GNo=<^y>QK|tK4gU+-a^1d zJf5S?C_>ip*gR&QK4OA5ok4s`4B8hn6e|~Uke6q_yZV|h?ZfmZhu)RqxqQ6ZQ-OR* zUt&DG0^-0BFppewggl4l*z7TsLC3~;CJ}kY2!gaU?;b0Jl82(A!@SEDXNQTb0`^!*YanC5&jhW3d^F72W#D%fS+U-FrgWtg~dgN6pOQuV6eq!ORLhy zEwGLX14bd)Wk~%VU}NEkzJ2@E4T?`0d@s(+t9u+<`l7n9a4#BKOW|lMUpFsu83An$ z2+w%-6&pr}?wZ5(d4;z!4u!-oU3G9eN}mzncx#F%xu+hqE>c#U zot3lYiGltF*wvc`JJp*vSQ4Rx8B$@sMPXq+M_J6l5gb^)K>&&M;MH5zIRqBns-%xp z&{;j2|0y-&RiY1`unh;AcCt1I8jtP-HVX^Sp-jW(G zb?LO5yJktm#yJV&vnNaD52fhvA2?K_{Qh#<8eQV7k$1H&JC=54PF%EhKH2aDXXF|u z_5KqA0LJB^_Mp%4g(=$cV-X5txNVfW{NFt=5RD!3Q#-L~aUI6`M9#H2UxWhF_$(Pp zh-Lt{zFa2Xu9_BmMcSsk+#&x>y>NS6_$w$HmL%v!L==at*S7QHY{O$>f&mg(t5CDX zkSJR&+0${}RaP{g0|)a=o*TJ2Ovldjj0mM79j|R@2G|1{uS#$kVliSk`v6@fiqZFc zZx}+)IfK~Dff$S>wRA>wEUfN^fH})DfMBynD)TumEKs6iSU-O*MN!2j*B#O9VbX zJ^_0B5&e~2R$%2GKA3f|I==$T74mm|>KXXX*%{^K;(NqJA<0LlyYnWqX6~pP=Kk(%`>=U@rU*D}mVxf_7>z*noZFyySbs_+F851mEVjXsi$$-6}u157Hhvo{&Sphv3b8k)_0!GmYUWu z2Lg@nQ44-3e#&=4=Ut3}b~>YAA;!Weqr?yMT_L=;ynr@p{JD>MeDdl)Jwj(RB~*{l z<;B{OQ&IAKWI|w&b;rmPODubn_QmG+i9J>=YC0mnxNg*%bt%nHxt=^e3cu6RZs|gp zQD2yQF(>3A7bkcR;c-(RW@>2~4YzqhZ9F_fb-4pZNLS z4v6w_uR6|7@(XpG@Y`cfcy*qtbU{W)2nc&#{|qPge%A*ee$OHh!4@eCoiITry-y;@ zB^4iksf8lsA=N=WOha(f-XH5Iq2}^MB44)C zpvH-^E;tq>CMUe5$jW-XJdb4bI5^Vjt#5}p^3+cn@ey)B z=BPW*>bjlJC!uihQ`L7`z9NXGu(st))MRXi`8bi<`?*EEaVGGBJt~$?+I&ojJdd;g z!3PJu7-bjqN@)uNUXTa2spurn)vKI}p0a^%Nbn?pqNggd|K61~DM%el%~4#(qLN?pLBg z^jz_@%}_t^d#HiD(D#LE-r(pW?eidrh;?o4i?ZN9hb!QmvjQkN)N2mUFJekQb$P7_ z=^tnSvIpovQVvzP5-~xC;wy3SNc-*G#jmP;pK79b$z-KlN1hY1L#g3k&XS`8ZGHQJKzyKjhdzrhC5?axpH_>Y6Yg4g=@q76@`l z-CPZ-+%V5f$nr7y1w)zl<|8X04|iL?Z15Xzj%)Pv9Pj5@=rMM5#2Xnu>rtARHA~+B z`ioPY^JS{*IG?TF?phxTfrdI6@y+FBG-6faiU_8ELe{$(t2ERIBf?4L^l(Wukp zId6nj5;u02y~AlFg9{=Dxp1k3pLV?edwJU${#p>FaG@@DFtr5(^NUz!Pnzcs&SqDQ zk%zX|*CHOHi&`N{b#31s^|7$*pB`2gq6-QM6{Oq%r1@XfT1SGaZ|x*R3ESJ+HOJLN z%ZF^qkQ^fg-WfC?f4b7LqQ@&q$gIJsWcdKJ<62*y;)z_CU2ldVl%k(9*^K;Vr?|x?g-KV3Xtz4}bAsN3iG1wKyANi@krqOp z9Yz5W{AI)ePJ<4VXJ|plW>L(}9$hjZocGJ|r|U}*G1js@vs;hHz@zlq_(5-u&?P{f z@BNY2MG$D8W=6<8JR!gRSNI&mF5+AC`5M)IHLHDW^T@p-~z zyiZ8@wX)LTD|wtB^fDL{X>>94?t$c*rlK1jo{%BFJ%{1lSqH~t{@(l8A3D_MP>1%< zyaTdPPS>2W2{^f^rfgNqnnlpj7xRkji1=5aN9ls6QeCNOriNflnc6gq@>Ym`F)??c zKb_;Wy{ifusQREq1hnh%yG4twodzbKK1JHEuCAiQ0mYm3RZ1Ez@5aKXre3%mZoUUj zd0Tyf%=6I7jshx+_CM4mz|-p&K9dDU7Fj;q(-CGmj+7q3(5iWNEpZusGD);M~ zJXX%C)JEUr${>=hsZve<4P9crW<^DgE>-Lwu?kD7JLgW+{%1`XS;_)8zg=+tZjAMU zZ-it0zM~^r{pPb)x}~#*D}U#VENqSECNqT0^gA}3xSo59oToYA&OmfoS#iEO-GkyzM428!tnK7u2qD4UJEms-v`|-4(q~x1Pq3vKhZ(M0s?%( zs=`P>(E0YkdB%LZ72?b&U;Eh(G+6xIbOj@Pu^P37%TcNh3II5h=~AwHpXU@Z-C zt7Hewot{KtOfUproLq-6B%{T!YK646J}HGOjQ`$eYG`OsGdYBSxEyNoEG#yo4K_n^ z5dbderapAr>8zO8{~Z*%Iy$Z``tBW($0-aMfV?_N>1^QORFn?bf(gO>skw~Q=TKe3 zqobDG;kfUTs76>#u(vZ(Lln;86K|h|y8mREwBJk@qvIX}_$T+;_GHP&*TnrIwYI+2^5W%Se+jO+R7>;Z>A{g@t1Uye;!m z5)#1$jBIQq;o*bQuV06|v(-pi0e4a8wO2Rr^=}R(q@wmV)Z-MO6~PON^k|c16}jst znWb7soZ?CWtE(n58yEaK6U={}VvJC!L)tilSpN8t-Sa%?Ip%dg(v?!aSDv1kxnigF zCCm=$d>XTVbkYVvj+d3DfVy{iSW%?aSjUa2xyjfAuJfZHuW26Mt5~vH-(TXg`T{$o zNxj6U%u)nl7yk^qdOWo^pXl?$7&0WY znoVZz%LOs`1O$xZU00HmYFp3t1?hsL&Tq z7bBD>$6?y;%BKgwAloXtQkCqa;S=XrBZ=@c`uaq<;p`Z1e{5T``17iZ?JQ;wx3Gcy zsE}GHbmBCSp=0=9t2-f+DJO)|xNGF0`J6uXAMY3atS{ORg)t@Qu^?*FJ3}cOf?Zyo zp!ojgoo(eU)Qu6T-8l8M;@YM>;0BN+sAH&U$tDocB?y;W(g1)_ta@_e)F>Gnt%X~M z&)fgdjlTMii3EWKGU#NRvOSi4xhPm|=#dVUPES(Vzz)pDoU5B=%s0Xu4b3Y*_uqv< z*)j}8OrhWgWzDAlfSB5UIO_UbuAY8n!2h$ju@Mkl&42ckY1!k$2Q8b^om>_PqvKX7 z8cH#{2_M6xG!rI%XOj4Y_@})g6Ma$8-A@8%6vPQ%N=rHLpW@zjst16LGq}s%c>RhE zHI2cy2-}8k4jbNm?eM}b&_I0+$p)tlKi)I8yKUpI=kKqtc~Z5CKc4BoDe@x0n{ia= zY@liX5^Vmo(4Q#Dq|dI3m6g@o_wO~v=}+(Y)+27-r03n$%sPuD+3-aQa8W|+ca>wOzKk=o$kF8L$yj9aS=GK9oA(v<##etP-sj}L6-6WUYeC z;nz5(rv1w$$pO^~IuNQHNOoPymsov}6F!8iXH`Hv(iDPsEPb($Yc3ED%fiC6y%}Z$!(Do zQ&&D90)5&#I{YP#QO!LZ=&82ea_c>bo)X8k$kmzXS#aslR$sUc+)Ztp;H0g z(mz+ZP@8#+h=v_DTJT>NxG23bIc+bhp=o~dQN%TTVHd;g{hQFK>}0Y(ViRa?ZYPk| zNb&Le^;2R-5M8zN=uTOOMaV~P2Oe0rT`$}qr(6j@SKhT|#cHf=Q&pZBGPHd&y3NMQ zuhxZa@Ts**CpIi!M^jUSn=m`6rEbLsQ#h=Cuu8R%j(Ubd<$)Id)<{qbdI6tvHIb=s z!@j9QWAfCUx4dU_M=frKtFsgGSC8qW*f^xE1Y2;bYuzSEZ-WLcycf0rw$rBY4F83- ztx(X_(ogB?u6>#e_shwAZTBrq7;yt+&wD{rpjfx=-eHjqm?gu+Fi1CGx114A6oEXn zYJfj?*q!sFS#ykwqf=5f#=kiyZD>gN#t)ak#sHifzkl~@?kD%R2u%A3-`AtLPfUhK zY)p**yt1AVPo}iZy8ZrP7zmwRg3SgLHh))x zEUM)N-OakH1f<55Tq)QXIIOJ=+f1dk<8P=^8DB7^d8|gDwkepj(!4^-^x)W> z6FCOxHV>@3Ys~HWaYs)__J2lg<3D=D9N|F{sDJX)9z@llqr&P}yWhv{nu$M_4iEd^ z`&-EWn5ng~}RpLcqZzQ(jb3{~!-z2J%RO(~eS zz{!lSOl!a;B3h-sk@-7JRLD=^Ye4nsfpFbll zsveO1t0B-AG0D^Z_j&?74g1nT|8>j3NTq`JPt*@O&gO#~MOUzv;()K9*{sEH_dXl7 z{|ot5Fyy@rx@NldvKtb;6=Q~pf#aWGv%@wutx;mK_@<1h1!QZp(^}MFtrz+_SjAZD zP$p|j%TxI3G2~L;G5oDf91)s)X89EMIoaoIw#=AV_~IB1|8Vr}c)-5Z#sFa%gcnQ@ zP0h_41D7B1xzi3OPUC-IX25^4usIwD#BibcJ4Z`;-5aJmQZIJr90u(92* zt|kTny7FshIRhiT_rJLnO-0XraosF%XTW>dDbd?~(=!X+^Z%M;1^=03J>}xUajvc& zvpQ3-GiM444$hzd=ccz*1Rf4;ZS8$FS$q2^`;eh&oYVcruqB$s)j>b1#JVp(+mjua zwLW|>ykA;VGxcYIa@qmo^RBMfD1BgB)40gv8>>b}W8AN;*o0b%k@18fj+7y-XF(i` z2kL~$J%aw;+(agOGzV0FIb2%)c5ipk)-qzMh)Kyv2WMu?welutK z@inC7<>hMQGh~DV@<|*NDDBSU$I@LYrl#0nsBXR0BU5*^M+TfS4Kkc)AYHB_?HInM zx*IEO5nZjgyx(c8d9ZrgUpuN3&+C}xgaD37R^geNN2^5RRd^fn*8h<&k0&OuHe8M1 z$))rVfFcS9G`@oQY*2|Bm2ee6Kve;2mEOPVWkMpYrp_K5a{l;Gpo*z=GiJZdR)2R2 zBm3eHl7QA>=g|!D_9V!P;xZL78)svky>N@?NIS`gw_InY#`Qxiw33Xn^s9dT;?uMc z;YNYanz7CQGaHFYNlBSF+^Z1Eni1-immiAWy?0AKCAGCY!?pV(*mr+j%fEj89V+zd ze=4-v(~>+?O#G^x{KykLQ$5+tP;7i>*D=Km^=>#5G{)>m_~=cyH>ZsutgjY zqSRHi@LVYgNv%s|*4rjG*X$kZ`4ZYbuoe|{1M5P+{^8IYT9>pB8$%)wjn@muxAj$n z%hb=cZFpo=Q7uAW5KB9zyghldRBbMhyYleob~oOcRYRKp-$7J9-z zQOe+>H$sRLaPPqnGJJ(ecE#;90xV_92^H89~*{96o^IR4W_{ChrxlT6=uj`?=_?F9;#E2U;N^{M-zidCiZ z*YwdGu&os7;Uf;HwjI@`enwv3W16J0-Z@t`2m4v+=e}T75tshCGDl+Js~wuRZ#C** zJ0it+RPkKL2x4$ZIREAJq0Z^mm=U>v*X5-PA>rEcKpNVw$e7^lwU5}?J@90{I8V^> z!NS7&ZovK&wt8L1Qs1!_H@^o+<);#1elu3>C1QL|CR9K3=WfRRZKG+{i5(p!*Xy)Q zcc5THl7Iz>;`V@6+6d#4=)V;YCDR|dX;w=5-!jPgo5_!PM2nK5;`KXy!l{|~16L`Q z)VRGo7r>pNAOU~ri6&MLew#B^_<@ID;lGnW8Q9+5On`MxO$u>%fvNK@DlPng`s&77 zmfvLsgzNTdqbCY8a}}zbalM~$(Ll1RckiUO>fwB7Ky&d!-fO8r^u(gQdRIiM7<@|i zyc?WX85AojxZhM-`mD0h(~FCTM@6m9zQ9D#vawCn31}!8T)t&?KgQMjr2~$B_>5!Y z0KO?60HRb(@WlGhlLlUtaP_4aid6OYY6g8)4Ol0{#8Au_|LM6V z2HuUVvrY1$stO7dGpiUSD5_9^H9wJ^7VQ|p4aK!4P0dgsJkYAwU?NDid=|g_*iuG@ z{k3e7gQnB(c{X%fW4h{v6D&(ZZYcZsPd#H-0m3T!5kH%&nSDJdg4M@AZfP7rbG)PWlJubzGQx96g=70gLJ_yC`6uvZM{Q2|e!q(O;DNL#J%gZK!nE_GA z&1PUTT~9_vFFzk*`_=YgOlO@##|t6fd# z5uAUVe-B5jI5;?$z>4*GS_gw>(xkqy(z(==uEy`EcSMlyh3jR6uYG|!27k3dy3`GQ zEup8gFCX*qiQjfa_r2IZI%olC1W@h^zI`9H9MLZ@wH!{x#vmCm^7C zhO$%rdDJic_n7__jq*FAtdr^b<#kxt6mYy$A-V@v2=ld1zV&ER@H*&sfKf9!kOnEe z(a@JSMf&*Od(iU1z`*;%I2C*`Wic1HUI7a&BXeafuF()tCqcr^0mwm#rG?c3Ci_<% zP^2n&As7JN%x;w4UXukoI5;3`S#gR;$pUtfclNdy*#~x;-=o0dF0P>P5S)oSpa>wU z5@xvx0+`COw4r;a_?qF^x1|mp=!*t2#^^HFwVYJLZ=0L zGLC!T_C1vV6+P&ITC>Zl4ELg`yoTN1S_+D5YWU%3(l+MhYDQJc+<3DgJDc{8-f4k7 zrM?iuhv>>{&U;2kaNv&^cP0oK%IV(r<<*3WmM7y{((p&Ec_J~l8Fu~&GUGSy_{LMS z!ZCF=#rdShWLp4WnIi6}__m@ly@L4JC@AFLed0M`6w`fviv5oRhH+}jQnf1*fqOL6 z)+UkM5gq+-TO+Fq3){XUQ(P72vxS(82U)1jCsB=9GFtRs89&EvI^VnZ+iEY#;m%R` zsC~wyvE%;fsicanN|8_tPQ8HMj5r^^fZqOsAr4h^kaEV5p}VPRAQsZ84!B@&WMzE) z{RQ+52)@O?=whPDWO~CTeKn8lpL4$g*uKE+o}{Tx2W zKvsT%>R@-rCgjt;X9rpju0DJx5vXjsM}~&+m7S-Oox<~Wz;o=UFCP4?eXfawo=xLp zDM>I61f7*YcJ_L9dOjJiSCw&E&6&w4Dc#RjNb(lCKC)`}|RmYu;s_ao0Tj$Sf>RkFlLubZlR96R1 zZ!c3kj4K^Q0_=5;>o<3Id$&t5_wlc$D=I!#?RHB__;pWz`*z>im630m%HV5> zwBelI=2mPKHjnF-Jd2&!Kxt{|_t{&ITuy={$5u8Et!Nt>Fbp6xMPV8dv)=lY0(r5f7${_~|97AEW4x{*7xN-bXFN;H znoGAyh>$-Od>{MbjHhZRdM@I$F6AqG`wOY8&)n@KwFwuwo%E)Q!SIJ z(G9hGNi)G=imjVV)NVDK1a6m`E6?WsDyx9--H1Sq`a<#9}{{$-}U z)A!vheB3DPah1$uY(Jbz_VRc8eV;+a$9OcHOeAaBq*PE;*TP=>$@G8VQCsb0+ zgfIDg5-;jWf$L6f7!9M@3&a&(|5BqTxzR=vJ|HN0cW=Bjw$Nfm9H~MZvt)^*N*Xtm zm-ky>$lIhLq`J1xQUzIY`lY)GUaPKBvNy@SOJ}ma!uS#r{2< zJE9WK)f_$*E8BA|_p{EL6@$a{WKW3H5}9?>g@m5p5RngSj&Ca=vE`sD5okWB@_Hk9 zpVaqr0ukMI@A+_>zVp16iPq)CRf{>t?CFk7kGqKw=~oP{5nk&`GCZm6p^grzk9t4W z^GPjZ!tuYP$e*^n%s^*_Rp_l&jXIh8;Bkm~8jBRQRNpx>LZ+WcYk9mg^5w|1f2-H> z*6gz;MyZ|{EUH78{?}fXDuRjgdT`wgG4rhS6Y5^jpRUqrHZOdaY$HFmVj7!Je}CH1 z&HIFGJhim+%_CcRAibw_KEEpUYIcP|L+ZRSHE^yo40zNda`bnnMkgwaF|XUs`p&4X zO3vlig2$twU(;25OXJbB%xhM=<aAdT$<*9(| zNBD!Qgk#4Cmob#Xr{W#%FTB?z;Q3iix9*;mQHG2?Gb+sPzkODg95zNrQI@?s|H0K3 z(k>XRkn=7Sew_I7x|Ybu>iFnL>=_mEz^Pn^=>Gl3VXO8IkRW^VcgEJLehmw$_ag)0 zCXN)GB(amKbJ0^flh zh(O$ADB0@pqbNOgtr&q^9$CiGYP9*?-^j0iv1Km`lNNHVDAIBT%Wta*6N#xp9-l*h z6IAr3LAR0G=K^3g3=cD{Mp{k6R?W0&w8yOw^Hf$YEX?a|&}*&G9LclI3jE7jdNi^q znA7Eaa#R?ZKN3+b(s>0Ul`p4F`H#6DU&O$$i(rUaz6N=t*3v~+HeE&*xj?(W*$h2QtP=l;h1pK*?3 zC=8_b`>uDbXU=EN`Rq5^wukbFh=XCR4ua{ZN#)p2Usf9sJ2U_x;C#)Eeo&Q0{mjBI zXSt#1xVS}1J0v;6<78IgjZQJmE{f$lq0`XNb_3+;o2yetSZwNBag;#2MkGb!=^A`# zF__Xzz0p8n%vz1_@^oP~1yIiu80wE2r(4I>#$CEtz+JbO;$h1DJZ zYyasi!ZL52h}-E^+2v#MbbqxgH$rb_*Rt@^x@g&yP^ZD~UxlqGB3v1hv-{?2i`DYy zaz+~Yl>G#@8PUQ&Ri5#2 z){x-!_?DY(%1~>qllP0My_&gEp|2W~SN66PBjp)`r7VROeG#iGd|>?_ms;(Zd%kw` zHm`-kLf;0zRmx$%z+h3^;Q$v(3KN~?`?MRHw5TCfuUXkat_elLn_$A5>A(jq%;dHN zSDNoMsCQpo%2u2x_BRQgnl22a3j$@gI!V;S~4#w;U0dLy{m{mHDpX`ONP~ROoowADac|!X`My4PiJr2g)RA8yy1JO#~k zsRdFV9+aj>>vSB9-WL-d#P?2%MZmBl7t$H8&E0V(eT)WIr>4EhZG+6*k~=_Ju=eB$ zP3(=U&2%l=n~|7MXUDpL28#sngw2u=kUhf*w7s?pBa2H)3Iz$xb7%V| z2X{>06>2x){NUXAW(mfql3_3HNEEMG$dic>mzF*_sIWaZj&ieo7=%f|GP*6JTknJ- z5nL-w33?ynHP&@WFtsBdJpveMI#bqz2111jPOJu4IT_@2BSzjqL!;ZiKiOz3C|CaI z^lKJHcQLVdz!RSrFJyjUQ}?KNiDg}E%3o1E*56C^o`28JFKTS; z?tWOam968U^yknA(k`gxc`CUUYM*SeBP`}DFz}%ITdV2BDHwr`2lB}1U)F^m958hg zRIz9s9Y(!SjAxMoJCtv9zo7VbY{*+!&{`Wx57vI$eNqe3P(${0%PI0; z6_Z=GJ?Kq4m^yLM>+zM+u23J7vA_==Mz6l;Z{_fptk$Cj%aO3LX#`ss%x$WG3kH;)4M6GP|K&y4pLIe}*U6F3wGQVio8a z_@=E!$A_bx+{w1i_A`H!vw81gQgF&2z2yh&oTeG=HrOYfz9>Tbo?r|7)8??fU&x&}f*zwod+KeG%*}(v z({EAEVhImE@!n{?u(2hvO&Li7f+^wLQ5#zqL7sj~?B%*Z*Av#2sVVefU}M4;x)$un*nT{_fb5 zJyI;;I-+sF;C$1d1o%g^yN@#ABBJt6g6)whxq_Q3t18+{9XIE7ZhN26cs-a65;jOK zQ0lEuUGAZy>-E)3VI0zD_BSR?LwI+y;%)-XVb_?*gN4H%1k5iJ{quhhdGxHml{m~$ zB_kEl3+9>iP-r|s)SpS~T0Rc3-&1A*lR{y0h?Hwr?b?7pDMA8xPf3CRev3}xdkGs? zaS1yl{MFUL^Oe`=yBF(d!S(A{(%7DJ#OkuARI_me3CRXjipK*sx0$MWrSB#+&iU7l3$!oOG4Dpe<)RlY z&yw7=&t^q#^o4gK`41?{D^eby1@Fw%Nub9Go=j6xH;LTeE!J_b2*hk4JW3hxY#}n~ z{h-)6C{s;5RQ7(>I}?ijhF|9C9A&QjAm8yZ7CkdRHG(5C)*11Jk#70g5r?&cM8X?& z|9X$;)p~t&dN}*2civaBw2{h$T_t%|okb3tj2i+DUwz4hyU7s4k{Pk{JW%#bGj7DL zFC}b_j;!*xib$1OClC~fe>3RkJ66}zV}Oq+uLx^PxsK{;L!WVRDNvL#&7wBT*P`}`pGTUE=^^5K&J};H26bbvG zewr5Mb76MWvITqxLY@?0to%#3M1r-#4-zyz&TjDQu(uN(+K`=f5SbJxGzvSN?Tk7f zoonP{rP^o(+#p-ZG$(tC8-3;CilA>Yna$K)j-{L`EEc<*89hlz9>3uDq9Ro$1z3>J z7{A3OTB51PM2Z_64jq;b(T`lwY*qMh68j>(`8=0QYJY#m-ni~j@`+r%*@J1F81#2R zE9tWAor*2o@5KB-;m#P7M)|0q5>)rzgl>MU2T3JIF@*9uM(1U5(mS}mN`vxS%`k~z zhZyRjExR~YgI8L)n-5XNiLnw{F56Yh$t5x5^PKKLH!@F6IzM?rxxax*IhsL7O6;T4 zvJ(wsjreX|&U&ZkK+5seN$6OciwYeMo84sNn_gHVBnUiZj`VWW(2X>v!^Vk|CyGge zjK#Kgk@~x|WmnjgE~&j|vd#PW?R60Jp^vq|DP+>Uv<|r=z``wN7N1vKEcyODd#K`* zjQR{RFm&R;7-i!$7^{I0)aYH0x77JlS7u2bzQJo~ob*>0=ky3?L#I;R{1DF_+ec(F z?f+xanh^2X3F2FP47Rv0`QF)jPTNlG(fYI=s_aQCM(;ysaR~B$5}9npD$5^_tdRK2 zmcof~Ek#`@$vuLP3)GaH2HEwTWSSkE_CIOlv{X$ZJ`--m#??iBUZ&&=bl%ilwD&0z zEFvTz@}COe`_Z}s6ur<-ddSZKoqc^`dU_34KYnNxj@Q{$B@yZ|PsMVWhn707y14rl z_nAd?P&gi*bXz?i;VSB#YcLJ8t z-xbCD?~2-mtrSkxae=)nIU1ybf6_i6nfy!>tn<=&kc^U&(xRNiUD8&~EJ{Y-Z&I6p zDh{?(B50&U3js?u4$66Oh||3fY6XM&H~<}qy)U{&`5DjOsth*%_9M6>Du&8qby1~! zJ`FT<*71o#2J*!%K+OWxNRLeWLKS^--Od!MU>4<2Yz@3Vh4qg;nta6|A^t^9w!P7A zRTjIR12e}qoEKf9;^NTTC$l2*ZdX{S;=*DDWhcIo)4Z5mu9Gm@meA3hD_}fziA$ul zId%*V*nkrz3`hWb44Y`fC|jco)_*J!ZUAf z$ojT;9@o-d&T*1IH1=qv)pW0EU*)uj8o>Bw@BaeN-r=1hWsw->zWmP(nwz@Xs(Rip zwu#jYL-h+|#fGtS@Q*Y2{-j_5dLuT3Wt8DkrMO_+| zKyA}O1nde6W`KksFRE@@F!D7PdJ`UxZx_wtIiawUw|E^EE*pQ<+^ZTx;@B0RVMLOP zFv3@3;^Mljs6gE*fsXrI&~@ie(39PIYPe@5-6Z83hV!`=SNnw}_I!Jt;6zM*R`uZi3T%dA3_~wZGYW{q-^G7-MchBG0(en|IT3Xr{3=&j5_obx` zm^TPxr ziHc2#MotXM;Zf2X%A(fFZ3Aj%W=wYWQiw_WchDo4D5G5)%1LEjRz$f{Zw&K4=OQWG z>hW$BMqS?NnLuFd)?dCj*_!Z+U*k>Ss^*F)WpHvBArH60`swTuJ^Ah+mc3o@gY0&o z7@dden@L1INu7M+nxv%_>mch8731;bTuF}+c;sx8F-f9s)ZpL!Wine3Kg-ws^?Q`8!~#=m?&Oj=CU@@!umSIV z{!7QEv`zohKbxEH-r_xe93+i6lt^%=V_+xd$TrWc&3-80x~-gDOC0k6d0d9*xVhS= zb!RK4t#QA6pLO+6DxByei5iy6_5@4t7|(FCRarN)ZBc-qFnDmN{R1SH&MLS5G)SPc zC?-Q5wFFNyrQS4M!>I(B#S`vAl*8}h{#G4YHu<_UGoQJ1ErlISZWTj<;^TKD8g$#bkm zpknp!GT2fh;1u_x1I>F)-5S-~2oT+AGYX$Q6=s0b6Vkua(EA><-duiXR+~S<5K&<3PGo>A)j4zZ{b(pkj>o<>EY_(N_#S zvoaMKCE?RgXx&k6&T|kietKDET&*`{hwE*)>MLZLvk!#Lx; zG$V+oo8vPY4Q<8R?_H!@|r- zhBuFQAKQvg3odIlAae{IB_VNl=YVn9^~MC;VTYVur_uaq#+g<#Z^=vhF|>b1uY2GZm~<|sY=0lbu18aE z56w95C~p6Sfn8mBohe*kIPH^8RThHiA0B-eAv;fkgDJe^74b<(f^Y|)(PST|YDG(kWfqaj}}Aujg(1 zjzziRnY-ouQ2S-p*k%#>C2!u1dj4D?UR6fjsKv2;h>!B>4pOQ9#YD$J?a@|tfrL(6 zU*un^!^p(iR@ayA39hxfALMf)xx9dVVq4$Iol$Af#r$NeY3<3HNYigRy z@0g&sp6y+CH2pN&EaBeSHSrSO+?&LX@nBzu2fCl>eT7G})6)7N%IGlyhn~i5Fz+rV z38y+{=J#h}-O;x-0@zeW`QMm2d0qZeW&CT` zMT%xiHF{H}x*m;+#U1;L(x$#m5%f%N?d{nV2?@K>ir-51q>srJN9nU}>Q7aq5StV< z{HbFV7yBukp)ydL1KMoOS2c3LXdiYeti_;I&m{@1&6ZE_L%GJ7&m7t9NrUm%oOVjU z)5{A8z>Gv!VuIJ&=6S;xf1D}1-ERdq0q9s!{dq-2t*CN{u!RLRx9wxiC9ffXf|_=| zd;v2y58{?fNL89<_tNe#%WZ#%QpERpUoB5O^q%0Z1f69E#)o+U510-6n>l?0*wv9S zo_u7Ac4Q%4#&IrQ>Z5?ckndLg(>w8x+Oky=5Z}AMv-F6L;h%-@SSjclyh7vKmwCg7 zv|oAr-R5N+LvQ3>OMr}@mWasHE@?%>Juwk7k=@q&#dielcZllWkPv8Z3=T-;okpfP z%V@V+E4&!54$SBPoOD7R3G-8}$@{;|r%4OtDDCWclN5)?c<3b(>Yme*?qv?@6A>6T zGhO;|)meY$WJmYG6XreBN7=C2Fv6SsEHF^YxKWh<$DRG^g23j*2soX`MI7%xr`HJ4pM5}fX{Y-^D(`$QX?VJ zI`yjK=BPL%BBI55!S#;b-a3%@z%B-r+BV@ody0|7AReWAcx+%dc1@rC!0zz$k?)Kg z9J25EO}w7x$Q9!Zqw~G_ujn+bt9A>E+&I7Z{r4XM-Sl#I=z}yTvUkFm59yM5`rDA; zx?r58CiU2+oTXv;WF7Z7@&1bn+k5@2)7YS+Ulx^nI zU`pZ>&~Sn?%{N(?Mk%4#SLo4>fo$;0;r$XTRb;UwtJ) zVZsM-*@^QxHa7ANWkcB1)b;%b*v%$zVeDi@IA~A!FMLt^dt<}$)XXy~N0|-eUfbth zc^R-Y+&3`Tuj}m~Y&I4Kqwmh_k3AQvr136}cEX5<%)=~OmaMaccdpLMK+lPU;Ayhc z=H)QORR0NCISb!jvH5%xA|(Lz#seGsYLjoxGYY&Lv_o{(k>&Gm5eizN;gEV2$60FX z7{R7jD6CFNGR4uBpse26uwu;uC#Xi3a4FaK7x! zVv;FW;>qQga@e10_{N598$uzoScnX2Mm!**7P~LxutWu*UwTo|a<3T}fz(O%=_B&g zwEK=wRGyR%U7nqrl0_q^SnuBaYIdte}SDLdi5f`w=CLOIe+Bilf}OsS$YFXD3w7OP%vN79Pr3_4s5=&*%5Vo zKUP1UOgr|hxQ9Z_HtGihYJ7*4Zk*x_x92^bu&d;Wvb_saJwLf?NV^tK+dr$zg>G6Q z%cri+2&wuhiU)tdNn4!s$IIcW4MH#`2f3VQj^6{=Jj)=8QJ#^_o0TjT)A~vgq;R;17wbv3s7RvhtRL)F zh~lV3Ci;razPbR#nfsLdX_j+WmX8o+p)`xw1jj(_k}5)vj$s5xgxIe2lc9kA_`9}^ z?UC30vZ99JD3>$B55p$BC;O9`k4c{pM}UcCm|}j-T2($3%n<@+!@S`nTojwa+7U_D zb<0R1?_VJxyv{$sf7vcig{Gq`t+`)#HUyx%$B4M1?Wzxff%BelVHShU(ZaW;xCXCZ z(`TFZiHeF+&FJs*W3R4GAMwa1@<00DH!GRT9)XG;zs&qsODrqT{Nc+)T*5ft2a7ML zsj0PpE&k35GoGm#hZ&~!s*X>&K8?Bfw&fo|m%VP_ArCV*mm3q9B3pj1f97<*qdj_9@}gmu~=F z<1qVK?s~>YBW!s?*wg%dp20tW2kht0_sM`#iXY1)ejjSgAWz6vE$$(>=dJzw` z@B_vQlG7tnUrRN!bfr|E`+KJyb7B%uGsK;nJ4cGU0Y2WB5}7I>$K$Y5!0pat<8&xU z!EJ4O(bUoLHX&2aOZTpaq-4*QXWi+JsM!321cs}Dd0o(>3;FmlMl5j0eoL#dcki}N z1_jyw>VyRsYiT&Hy^RK~Fyp9f1tw%8X|Qv(rKFm*DV5P}8L5cB`TID*zGc<_?Wu*% zyTg_mclDvr?5EcS8s#hPj)zH7n_n)pxK7K2zST5xO#cvRD{LvuSquk&a8%)a`@e@@ z(thzJ(avu0>P4$jY8Kgz%bY*tX!LBOflWy8;o%X|?Yf(f8SCRurnp~*#R^Aem1-xj_yHSt4 z4H0vXL~g zz3I!4(~AF`X3%yK&~?NF5>3r^kuq%{c!*IfkiI|`6wPm7r$pXeW>_=;>bBd(d@+N0 zA2WLM3&4onw@lJMbW3sUM zBOT|DeyfJr44GHO47)sLS3@f8BJ9kJqF65}@c>r&g?IT0vPe1YjP-u{v!2tYcR3wj z5)7AV#vK&~+s?{=xB6?l+93<`ZKFNfXiQ8@#@evVT0=eCyA2!D{;y1b1hw@~Whb62 zt!&4k%xgzw&--P~is@KUCOp{#f|I~|!QF<9fNF;h7$CxtKa%V}TVP=&<9y2qll+IV zUf}9W2?pcPXdIiU;;j{U^>hPuSXR6KzQ*{D#TMOvVMM!DRrVJ;dbdEzJS%;;jH+rG(oSGKv6M4S?iyboUvOTCO{C7p{dqXhX2_|T|+wImfN6HmS#fQ z*>n%26fN~KY>gkkWYKGM<7(L~nieSCi5h_zvT}3lm-7;&Ft*knQ+6FRI6IqxDRh{Z zr@La%H)!9#GeV^r``!vmzAGdqZn&hTqPowAaWWruOu2aV!4dLyIpS9mL#$sgX9|Qw zm0^CWzG1w~mF{~C=GI?Ib#ZuIJc0b{j@|4&R=AJP56;py<7PqwTPej~`ZYHH>__qD zi>ipf&)Yvc)U=H5M%5(|-W+`PA}@ zi_eeGynfD=8aErA&P6KOx4R=$#77UDRuc-<_n`e^n_uLvU6RjU(fA0?grplOUb)9T z*{sxjCd7o&3?_V6Ezpx#F?UBNyV8NrBkdq( z+-^t72*?{}isehkbQ_rZ$rpSBppg>b7cEj1Fw*3URz(zA^P?R*Gjh#kX|zyZ)V~&a zQvPFw?^Y}(NyoGd%MfT7&2*TR-IddxfC7%^&rXvB+X&NbX~fHlws%GJlNv{H1gHjYX>+1^+3dZq|ZKW4`wk;arQ;f*J&6V|45?a zV#F^PT8}CH_xg--V_*HdBYIEP1=2KieD-RCz}B?-i}XqntMX&pj6OZBdv|GyNc!!{i$T zL0a+wRbxS-ni}rMv)N}CED>ef!gA=mVZu=_VkGfN;l80pe!`5p5I#f4>Kvt8%0inZ8h6H`GG z@$c`=^A{4<$qh8~3L66W5MFwc<{i|I@4odbYE@}{>3j9arNum<(We%>k&$rN0_EQ} zz&(htHYXEVMu$~h{}*;T*}0E;Qz}DhttQE5GB#HBbK(`K+;tEX@ZGSZXnhzQ8MiaL zDY1(%eT7s&MDZ3kDQE=;pNeStRcY2oUe|cyk&%Uh1X`j9pUz&gyAc{;N<4)vp1XC~ z6Sc>V_t4PNU3VMz_8Z)9gg~^gOE5kuPhniYcIg4u($bcgp&_n#n^%Rg4^PHHb4yTU zQ<|Ryn{DqaQ-zclyMQ#uqmeR}9d!b_l@H~^Ge3^l(j9&t#b>8N-1*z#iQ&;pNBN8Y zq5l=05R=bJz#`e(Df`W`|Xo0}>s`bC(Q z3C)JSa0@Q3k#5`-TN>26d%C;4HWe`oZQcC$+Aq@g^2@r%Blk}a+Uxf$!pF`RP6-v~ zn?LFE>S_kK7U?Qy8>Wb64mMeEW_{G{o#q(!feV*oHNA07YF4eitrt88vCwswgwVPtK#a!9^YzUQn0k&*RX;HZaqaomu?D%`~(*TQBBy zA_4qUwCGX+um1((DLL%h@oTZ5H!)ATKaEPhdi7J?+P}|CEyzjn#uI`EqDGUhJyjU= z0~B*wuH`7&(|W0IBV6t07u>JrJVj!Q50^D>{5i~ZI~*CPb_H-?lyr}j<{D0Cr7YK> zeDx}b#%nwZijPxMWYLW;e16`j>w(wUxZs)qX9abRvX+)FjwT{_Vd2@)`ZOU&cK_3%p!D=~v6J_!{i$tq zn`3DuYu_eka!wX}Dp%8-*I!mCferlOLxSuyld_2dKx@d-jna6tz8M>)mx#{+6blFo z!M_xX0XH^p&69(QO}R*J>;p%y8XCrjH2JN`I%@+LLo);S#Msyuc6Mk6qy1oie8B%1 zG{{lw`hG=_^4LyQzh4}+%JfFZ7GD=umsMpfgg*uRMcT-0(sgEog@zNAFE;D#=Fcz8 z*+jItl1uqbYT13=uqL0!n)pfbHyiaWA5R4$HnhTuqgZ3$5fM|rYP zQ7~JAzR6SMr(Y~B>p97}mZX#9y!PVNHB*A{l5iJ+S!)`-yM<#9AG-CxTaN$}DaA4tHf`x$5iw?qDFB zeE0oU&54h>6P^!6@?EG(=*e6tA0ewQcpG`ug1^WHV?K8xysf|c4+lM#=I`yint7dff79w`*+Q!Cz3Xyz7`AXfC zbJXsNNWj(Sk>hqBan-GD9)K<5mi7u@;$_k{JG+EmYfX{toq|joK+4EJSv2oY3Vvx| z`*Zazx6!Yyb0T)*==fFcBxYWg0D>N-ZWWdjp|6ivY_Q)cMerx42dzjJSEV;UD|-s* zcdkf%lKSzYmLE!gs9r973|UN-+Uj>`7f}LusWF(Ih7K)A; zUK&SF*e!XRu(3IoFju@~z4&x+0Qr{vZE<{`#%(vC3hL6d0zA~lAQA!O_B5Z6&xV}) z1a&{{K3ADD7+uQKs&nB2EkG35euIP`3_XBV|1Hv?%nugrZVv>^z_Sp0n0BYN<-7(W z`nem2=QLRog(h0>Qzeb~&2N)GDg5MTbR@@RPaj?_{)Y}=*n?kc!~bVGfZ>0n0}iw( z;w~Dl3!a8e-esv7X8nKbC;nFFTC9N!E2Hqe57U2Y0tguCVw%3$ko)RQfw8BcGEeTR zMjb53=(kdWk2sM$yi{Iznv43Xq3L^T*JwXarUAP`(eOTFnQ=d*1QZA>^QZ^$opDvOy3P=N zNIWYot%BK-^~<$g4RV24IM=4|Pr6M_8ai`$z1wro(YrvDPXh@vsAjZ@LDs0LH`q4jXZG}BWw zs7=Q`Ui1QkzqJo|T@+7X>0 z^u$R#ZhOjPd=6S>gZa74am|syK*Pz&X^5O^BVse8s`M%)1`_EedLPQZuUt3{LmYcZ z3?}_LeZ1@P&%w#SfqXAAY2pT8oKAe7?#9^A zdaK8`-TsZQB!Al!^h(4x->1Bc?$liadRzAT_wCC~iTLL>{5=W^B-EXlUw8dpEfFv* z-0T%Yd!o+R-78M4lT;!AI0GE{o}WaU?yoeV?|E|xqi!mK+68jU@W~h}yo#RNPLu5d z{=k)%Uz6dKP7)z|nd|gyXk`00g;~D?P{XmC9l4!d5SdQ2zF&M{yp#0dhakxRLhlq! z0A%qwg?u6e6|i`1V4T{g!5mg>^6s`S|EK|r^3JcESUAkr*uPaTMRSIXT1!3?aafUI zHlnlR2(^bN*gH2~?-Jjqq_kUpGR0r3_03L7?@`2LL6C(U3VQNQXfQZV3X^k)5!9C$&+)F&msqwjJk>R!yZ^0Ivs#k+zfu0hK~i z!yL=X-7^wkiRs=NaDrV%+8G`lN`Mj&Ix)}MI2E3h)BrJ|{ug?d{iP^3v#bBvn*>n+ zJod;$y}Lg-vp=3_+qSYKy5)1t#u#R|eT}_;(w?zLWct0KWJ?z%HtoLZHS5 zEJc}p-fT*M-1P|=fc>j!y9(fA9QOZTf;Qw!xvaw<=s(^IKser+aXSs02= zR^SV;ur!D5KDUE2+2iFHK~dh8lPA}n(ouQ_tPO_{ z6MW1?8Ws5o;X;*V*lDC@Vi-A$bc$&loWrRI zD#kb3vNbK`Oe?C?a3AIeSN6DFId4|@{dj_HNA_#f%7zlk(b~6&kcbpIG(fm72{q-Ftsz8O{s}Mr&>8V9s5t zsPYD3-TxVBxlcdti4Aj#$Frw@_=gSOY+KNe<_VU)W0pY;IA@Dj7avFed*7YKe|qno zSr|-_W;Rm-0G8PBW{ryq(>t54SwIwC#uxXwYNHzByy}#riZC*oC*riH2Q}gCm>q8K zN)&etoDgr0CxAoj0M@mEe9Zzculb*7TN7odU?zdZ}$B_9y(X@KHzc zA*rgY45WfQh?Z&>$=Zw}XVl~YJ0DM)x6B|Nj~SB2IR>j_R40A(yHV}%p}!Q!m1478 zAdv6+8d+U^?_19CXF}T{G~zr4?-E1gV^D1^p6%0sc0kzN9ImN(e;o1k*PbgvTwMB9 zyl2@=b*e$Yaba5LbpDKa6dlU=F9Boy?-}1cO?+d#u<(9aHn0A70=vs;I_h9^b2{(a z-!6!Zkqul5Vunl1_VMYDMF%INo{I;|a^0U|--CL9hnor)u^#2S-B37Aun_=quXDoJ ztJzp7nIpr+NI11zif|wKKO5}m@J)>&Q@dSCpbuD4uio;X_2Q57dUj#@xJ)ew}#RaCV*wdF8cA5L;zn=E&SCRg+4}!v`3{+St0k;*Vh`GLUBa-@`i*;_Rfw@9S;)Mk93WX0qpAefFL8q(1HQK`7IBxuwvk_m78ADAb;Ff)!^E=T$%! z3K|;7g41LQRQs8$||E zuqninQEzG$kx7SQ6Oa{^m=4~OiFiL=D6~|UMwM91qsB)y;0_L|JH-FM%Qo3bLIe3u z+)@!_!GOyP*+2Y*n@W%B^)LJOuiW&uTmTh-;_}1anKpFE8LdA%sl@kFr@Memt#k+O z!H{zn2k2T~nYY)mNhuj<>#+B7VgX++bUvYjuY0@t@XS#!>34i-;9a1dWssQHG#^wp z`P}011$6lVK8ysBx6n>5kQ+E)>HJzzF#rx{b>b%np&h2^!UhHgxg#XdZtME`hBS@5 z!F){uFjy_T#_{QbUl!c(s=8~Bu-_URdcA+l-iZmj^;ca)Sep{h6O>K?(x>Eq-|+tr z34nabS(J50;-4ores!+F^= zDVRu75tnD#iV4({xt|w}*ht#Ovv&u$%zvdcSK1+df>@}%^=xOzr+#&lFMhBF&r?7B z-7I-`A;x~sHA9w2iXLg(8K3Cbm@d(#OzFeVsW4o$4|LEGjE5A%R?f!z*roluzJrFZ zfT}ArOv$LP7T$UYZ!)$t@O^RZV{zM-pT|5D3eXL!XYssOqtKm=X-JX(UXR~>atqJm z<$E*&%`--1lh}Fn5nvO}9XBqkjC%4NQWS0XmL4(uyjBbfzJRmY+~! z(%YR&5trB-tJ-Nj0|*hbY$xAF=L3bp=O z^J{Qy<}_1QzAfDp9ovo?@c33R_TT*Cs543k7;V?mn{eyi?l^PUT>UXiDQt5u2%DHX zc2-WXa(rhmxb=XCbM>Rfkm5A|tMJgx4wBUcgE2o zy$GN#X)^HI^|~pg44XA=bbEK+L(l&vBo9z8V6f=;!SV~QMkxNm!ech0spsHNkNbdK zJpl3m*HX&LvGQCdOVau>)EO>*wEXKu9*&>i*ZA+|{!#X}n%9f!R8VXRGvL#|yHU?h za_3G~d;~I6hlk^%se9Edhpo=e&(puaZ`=g`6hFLw|5L{Sf1OAh|1Yc3PH%13``8b~ z62{>!W9$Kb3Es=^bacJ~o0XTg$bUfoXv83j8~^GHx$i2Wjf&~=F!t_e<9T$ao0| zu_6_@zDHT;gR|Rs`V@FeXg5eVRP^;nZOj_R$K5d3L~yB_(>3K#)E?;b`@Nn((hvM1 zk0kGjBSs2qBS?U&VyTU0qU@>R+Nb%wV=j6%J$R!y!+2D)Xn1})K5FGb=&Qjp=&XOsBh$HU%>Q3jKQkM9Q+Gy;iEL*HzB?~(h)_A zgs+HiyzC9?u@SSj%_&oL0*kXiNzCRk?3N%2zl&Tu={43fu z1_24sB%!zdx3N8|5p6Txb^F^V#minfO35EDcgQ0Y634>MZEMzfG^9C@-ZZhKAiArm ziAwun^;S2ubcz|Yvf-#TmnCM?!H~__?bd4>(E3z00~Awb2N-eY0KMW!1c3!={{vQM zWV-#2!BaC)!!+GnLbU8(t;FAh$vH_4EjSdC@5Ig9A9Eav&89LclGp-B0Lyhs;4aQy zyYm9N_elPMf4dx31kr$5@d&S#?wUyO8E8JnlXjBJmckfqr`6eeGLwb9A3B4v ziUMY2z@HKT%nrt9C7jg>#A1J?_JZ~cX2Z;Bk@DT#hgV9C#L?;>B{S_U2pt$`AwdVsq@!Lm6 zkR0PL2lN-1PbUbL$}YQ0I!Zid2>p+)aJ zLKtBIPp@=wt_46wi%ovs<0bPZy4CjjW+{TOd9st8>6a;QF~deefsMIqm5e_rEVol1 zsO!XLno)l|-<@zY|Jq04DJAF%bPEpp zPkzF~;0vJyIZma0-o8jC*$X4BiaR4Z9ICJ--0!_KZ94&0p!#*1lVo6FU)XJ=>6`1qc5$#lJz zl^3&rOO%SM5hmdweQC#E{i&e$e^a*E8F-)~ z^uDHJ8Wxnbx_mvpLvhoL?{@Y1Lp3X{7V%8fKqV(u*qYr{^J2U}MvX87Dvrn{y1x5~ z%8^PCTf3I|fXo|yq1AF%wp&pzX05fp zoN>anBG5l6IbyNxWt;pEO>jP4OaN%BPV>vEF5q70)i;DeVgo;XFzH=;pjB?q@RZd9 z*D}vLZD<|(q5-dEx9zsWUX9UO9Sq>0DsVz$0$+Z2SusF>zMj(Iswsz5e?4veNnuM% z<=m*~|F&!k@cus9brS;}rCVTl-=w)gtD};hKwx({++!Rb;=@UO|QU3t)Qz$a-ssETrPUF@=~>o6$W z0pD5t7cO!T)%jX!GhlD;z6B@A(fSY$M^67|F&|*PrKU6I0TvH7kb4BCgK{Od*@@ox z#j|^JA9;`;VDEIS4}E|rvsoW--opr{SNpr?19oQfuX02y-(N~FG$)7wN9(dJLMrwC zdrv57?92F|8aPXfW)b_e>`Z?D;>bw;-bNTJl@c+KU#uOD--Psgp!;(^RVO>Y3lPJT z?MDP}P-}DN-L}?OKP}_s@xSH3>p?D}9PMNDq8ZE;#-;q^U~Pyjekkt?duS$HiU|gy zv`-Q4{hZa9UqlGOH63Drd2E!SNEKn*b(WLS%UpEnBB77Xd@-XI(&;kmAUD zL#3+b8MfLgku$`gW`Ivl9$+LPZ8*?G4O|5on2*0*svEnthJ1$5zh4Ms{`S#bdEID% zHdX;{dBG5sV*+p$aEK%LQ~tT!D--`*?Vj%nY&4G-DZIb;sm27wV`6rag?uArdF_m4 zrwZYHE~z+17X&e>k@Xkm_H>H+^3!;{^MjksJ}2~REEh}J@o$8zyP;KIG+D!rv%%>f zogpnqRSlGBF-S7hQ#|>>s98>3_V(DopM${76vuaeDsQlx>(I>)Trx|<-KU}<239;p zW7+MyDYUwL9H2_E5ld3E2t zhYg;gPK}fSKjFpikv13#^eu_km{o@yJ|kO}M02dR{Y7nRO2WZ7+?1j-D=}`!E{whA z_xn5~7YmIEMKUNtR=>8}8`U(UI~4S96bfq)RmY8C>@GYRp9r6jMc{c>h1@mF9h|!V z0q;+;;%ybUIZD!UwKc4nhr(LH6lEZ%drfvtgM2ciA|egf`zv&IjtxJ?*JGUSzI65b zZw6p&J8rkXuNhb#+dx>OVX4;KE+tjZ&7UpqVV>dHavaJhBXVZuaxaUgf< zwh7E*SdSMlM*(K`2p~!}8tvsXz7hnFpm=^3g!XSO63(ua4FaAhif0}g z79*WZNJyst4+yMkJq1=~dH4e*tHrA%*ohQY9+i#m2!xJc3 zJ>)LL zbXDrLmJYtl{G2CsURAV5L2!4JczMMWxhvgl7MtFKyp-a;;3;_a-1{gcW4VN4H{7M= z{QQlxiif#+J#YR3QN=^^i!x)e@2NLWupTT+n5#qUDUTrGGaqY+I<$~|1JS!OT`zEj zLu!NY>-0=6&ju_GSZq6T_{!FhSYD#_;tTPFBGV#-;Sv{?9bkB%umbJ{^XF% z-MwEtKh=qFVJlHZ$S#9Uq}Ndnt{&89Y?r781Wgmk;Z?AhVH}}UTv7*G3oxfKK{-qG zKCy>=>JwY8lk+Yc8bS!{{Ej z$&{n}7$^7jK{(~Plrq2j@KY+$D75acD+*NHj&RX&SVq$o#>x_U-@h~bSWYd7bs0+$ z+8rHUA~uahJjc?--o5{aqNq=hV3O(L@{&|+kB6|Rth?XyWWqY-AYsKwl(<4J+qKYM znRe5?1#uETLF~Vc0zCETS~L_~T!HJNS$VcG?2*g@8UU9I-96&`x z>^?21>bAqO^R8SmN~PzIJT{_SP4`V!KO5nhZe7C%tWyktAo>9O1tf_QLLY$M4q$nB z04SOU2Gfr6uaW$s z)yH8)t;kbb`5@37Hl$1uTN!Y7<$#iZi81g{jxtb((N3F?!m34k(Ic(S55u&syWF?4 zX_7odNRCBfOhR;{5j*&PTW;m5SK^hh*K-fng|hVC-Ue)F8m{YKV>nsTlT@xh&9~$l zAhEX7K8m=WFrC7@ckht+E0+~Z-S^uRb@Sw2_uTy|a~EzEXsWn%lUqryL$41E{NYQJ zKu7d&pt6_KTT;Kxrmd5XD5gc_FQ-8ikKwc9rtaK%lhtO1Cr(C8+;)ng>u!esV;ov2 zIeM+Y?}vP@c?SQ(Eg4asKa~?SJA)p@;+jfdlW_p)8Z@n!X%_Q@DDPeMZ{0l)o#xuS zqtwgdR0o_9vtt%_8l_~ERjz2IpL$%s7oVTJ7;7odfmMA#q4iuv?4nU(8cvAEpuVrk zB*{z86j1!l{adQ5gdPodO*?3|Bxp{^i`BQsVcUORS3ARj#4u@EH=UqVdLrViNnY^g zI28H#`hho6OoRHVW28k$>~uvkgqXu(NnU_bonckl2t^LzhQ3FADhsOb+c%8CA>EIA z8YeMHpNb;_2HQu<07AjGe>J4seIy7t#h=5Y;*DCnM*VOsRJPCer`SZc3zgz3#W0jN za?V22)uaj7_D^`+mp3wT3m!ZpOp?ig2b*Gx@lwZDy3AYuakE*Qtg)AM><}sP`#;Tk z<9KzuJTHD)812I|oN+vp{2LRv_sbPOac-%|u#{j#mWwuuP3qFNbPFV0;%{6~4F$mf zlNrJIk(haO#?)?H_*_qf20$j&m7E=7#GOtyYdaJfTAQ}oMF#9FMMU_}2nV}IjIMwh zFM70+Sy@>XWM6xO>q=B;X#okr&SFM zGo>ete46xWic{wxPyN1#c-4re7ng~^DX$^d%9#I@hRA1BCH=I7&as0d)W@6~9>;uk zN~er3QFjx)!kWcIB&@41N4aPGJ&ObyNtra4y(wlHDJb_9N5x&sQ}jgEe$(6Blc&2o zV7gKb+^n5D9upbs--FMuO|tioxDIXAHUpIHrRsROBVQAh3}29DNDJ(d!@8IUc}rfbvZ#Q4ksupw6|TkXWhWDRW+BS5?|!O z0|W%nt|`MJG>yI}ny?hT29M$6GWM=vT`QNPpte1Z8uzNP~(tuDT|#gWBBn`?Q(ZX^!o@V0^yZ zYt{$@3v(F&vU?wujJK~sejR@NDiYG!JC!fJqNt{Z9I`-B6;{;kruoXR4Jz4TK-SI@ z`FXr+2(S*LPQ|0!(pjOzljbhbfH05ltPkjh=f!Z$Yx=6isl*XYW~%n@*^X~j9oX8uV}<`d{=DkY8}SJsjAJ{plpiramCKy6fF>jEUK4N^wut^db?}9rdHY)b z>$^PF8r-DBc&XuvO#R`rF`~O)Q28G#NexI2oKPFdIa-5z)-_OKzucnA>3t^xh zPd3%e1O1$|C|d*@%iD-4nCIx)DNAeUABX9ENKh5(zk1b2c}$(S8*U5p9*^!`($v9X zV(|qU!jtVH46mID@Oskininx2P1|)ZJ)^3A*1Q~h2R210y5gGsO&Q`L{yswK49z0Da?r}{x&kL^jsOpQO1xN0YOhx2c?*GKFWgu4ji%G1WiQ+1txxfnn;D`}?WJVfnT*xRmC3Q= z7#}d1-9Li6|9x(&EEvr{Gr(APR;{WtZ27!w-4lyEiHgd*`ce(OdmFaUaMVBcDz&#Si^S*E_*yyfuyD+ zJwsjuau2e+hb{5y>}5veA2lviUcKwwGWclE*Q!B4cDEg?5)}?tO1eMUv7adunQ{CH z*^9Qhg|G50R(MESur0trXNR#r+Za+fDO%lK#@5|Ulmh!=vs*i*X4Pn7VXi0AaA@neBze1OA7=BVGPPgl)ncg~2Lwmw*M10JD=Fi` zdVrs04Hj_IM0O(+KN>MsHnB+t_Qb`+MDnKkmk=>9=JMq9B5?R$eEn*WpHJ1BT-6N# zRkxV_U!L;~s^PQTBVP!6hpIH`25U~aR?cQ#;@NEe_N6P6le4R)-`(Ob zdlwCUF%fukGrN(uvhh~NI@VC<%azwj6l+MfSswM~A4tN!;o@(C#YMX(C@@SX4m5Cc z{6hS;s2`k;k45So>tb|w8%ye&Wp1qk#C-j*m)G_}lV+r~Zs!=x1EXR#Z_a*R4M<=LPDi}CAHQ8P~vh zkGWe*6QHmm98Dg!hIgNOCKwxe+z+X}N1p5gUeHA50b~_3xV>2-=(nPPH+K$<{_6^L^ybuIRtouIR-L`3$@8i4VExhR`o!>T(7>v4V zH5>5Dcu#1eXgYNo^B?b^>Vo==z(y6*;q|C;|K5!Z!NG3%@h<&mQBhg!3@6?M$9Q&Q zqhh+?$#F|UL-uo%BOGSq_o*8oz^SVceW?Q=)-oq{jFZbu+0uKl>cDs}ZCF)I$QQ2$ zMziJ;f*oGNllJrX3N$dVi|dfW9X!v2kPdjVyG9Q&@s!wy*_JT9E0ozYvLLh4AK_?M zIZb5Ru&tJ1w@r-HxM~cPvh9?ucv!WEyYDyHt9^OcqSJhWH5<}A(4;~$r{%o!jdqUx zpB_+Y91cV^NGS=2T^maEz<+5$fm_B9>|yfr>jOhy=IWmhHUP8`wV|P5Ok!ft>grhe z3^%dD+Zh=k% zG5$iBGKne#5`#xrfQYU~9H=C&boC8TD0R@4an?C%Ng`rT`8VKYmu*<$l@GF*S+Bd6oN3hQvVSN}{8xLCY@n z{sGHu3n7M{gUU`M;q_n0+EkF)y&aQo!0SHg3O~l zBDW!Z_jMzZYz1S;CEVebXn~$apxmtlw4Wy1Lg8}B%M(oG5r;_LXos4(tXrtc^sEYA z3M(SG=L#E2!t4DtG%iIc{u(w)fjQkUOr#52O79HL3b3P_*o8k#=yf(CGTh-$U(lne{yO)UE@Uw z)J%9{Vw5Hut<%&0F#E0mhyD0OvI8LC?Jcz-0JYC*i&NhUK6|N{0dU~Ur1DQKc~@cT z+1?QV#xdk`soREh1qu$Bbf#+4o2(BE&4u%Mxn(u>sARp*ooQXO0wZxMKck;Mkl$v1 ziF~^cF_Bv@9a$V5&$DXJb75ZoJ;FrKejH&Al=T_{wNj`QUn(`dDv^t>wKp%lUh6kr z%g*}Eqx<3DPWcx6VS!WDFBEOew7$>ZB$)QSNHOQ;Ke3?obIp1KRcg#pn*nu=g8!7p z(d?*Be<>r5Rx|nzbJr}dy?L{NBDsjD7ugD%Y&&?J?|hSikt2rLhs|_ZF}F%nUq#E_ z^ah(#(MYssD^&6~LNQ#?QCDnPqttkAxP{-F+=*m+(_waNx_bYFTm_)*lS;5?XFCf?>njDwdJ0`D_FEJvn>K#D%Af(-qge zBsIEc`y?kzn9W13WYuS6B7V8Z=8Ey^ ztot&Cn$et=O7l*H_n4xkJXHPsd<b2nNZZRXwo*GQMAzbLDEuFIOsyJPBJ=dSs9ya=ae4hQhpo*mL;d*~}$826Z(1)jP zxo@4yI07${)S+khCI(+;U(FmBu8-$aboe#rqfVqknPS=#){gJ@OK+0sl%?r1nXWNA z{>U5iM$6-S?N)vs)BXf76PD{vb3K2YkU|8_PT7_p?NOf+a8ckdKdNS@n*TNi9#=#} zP|BE*3JVKY9FMs?>U4l8uhZuu>ic9Fe=iU@hW`A-NUL3s5da;a4&0z1m0Xlx0MIKg z7(65s#;_F;DRWo8|#6;q6h)x$&d*cniMf&{naP;A(x!vveqAD!?C-+5FH zA{JlmfE*rt;|-CG26G+Hf&HJj*Vj6OvM4uD3wQVJ?7pn_WC++k-1loC zr!x74nGCmt`IyNtev^(%ZmTqgngct7z1Rcvo(6D{2QpI_mR{m-MD+`P%7MLEyp4ac z=M2iM-OFX+*FB+AY);Z)gN>@yS6sJ>@OL#!BkOXj(fCO6U!%MH-$J~sGfBRw9TULa z#mr-udpjW(H*(I`ol@VPV}oFmt=H_tjVma#M8^&8K%k)!IAS)t=B_Etq10?vUyeVJ zv6k7E-qpNr!Fx{(awlN|93-XQI>6n{Xl=pF&Y4wv@u-KIl=%j*mV;Qfx$Xy^_Rxcu?S2tRnRlsKHQ1lu1o^^FntL#bI?=hLVMOXtbd;+F>_OlAQU`o`_ zWyT=BzDUHQVJOA zue#;cOgbU&zv&AWXLxPS;F)+ZV!kSCS0Otq#IHPWxYy0R{q-Cd)@2tcJ0!)*Tj9wY z=!)e`2)9zabU2<$;eO zUtq7Uqe-`A;ewC2)tu1F;3N!KHH_-Dlch^syRFOf(MwHxy&cyuEAclQv3)s~M@Q9{zrv`9#DAa_V_?dJ+_@;-~#BrgHqA4^{xnqjzoV<_8cwIl-%AV#k0F&Ya4x8oIInyJCXn|=hqi2h_Ub+EHlZx?~a3b#_QIy7ZU@T^ScSP8iZ zou0P%{PV6-S6=TF7PjwDkqh5-Gc(jyf;HmLz1>D3AFiQYGMd@+IF}f(I>nG)wqndI zy;oIJC8R0!rKhEuKZkQAoC})&PLm{6WWOd+_4f&z-1+V53zXPvb>%Irtyp!|RztvQ z9A4p?6+!2jrfV8xj%?C17CNb&tSPrY5ss8uk2 zh^2v71XdC6?m;+-pr|MYh*CCYZwHs$N|9ZwpvTo&$Jlrs1;y(}@!ubUF+%c}S$qd3 z2TreZl=JmHahJ0ppK9@H3}QW$UCh8c1FX1Iw(RTMlEX2$P!~4sJ#2~)PJLw7HOB@# zR1^MseVg_edQPt4MtGB?Z#HFLJ5K7Fnsn9>$F8{--`dJN2of8=$C3ZZ0hQcsnl!LC z?cP98LuX7Yg*-*_%?^i#RkwVc=b6hD68wh(c}0ufjVEs`oAfWjCdt!iYcy%SpDhKA zcz(N|u&r_uFGf=gzpzc!dkPPGxtdjlb|W)UKQ3%`X()ShjoV$riN1W%WqRSQ!BP9(yCnbL@Ulnut1Y!*cpWME|5-5JhIbn6K zJQz$i+*@pViA|rX5-e=GdTD*)iCez8YxPM(1s&KJWiosMzkiQ~C{O(i{gkC7HvPLh zuipLm31BUT=qikG0?boJxWVQRHa+wCf+xlSQpeG;kXeN^h-{{Zg^KKSnym69&q|3) zeCv3jwh1FZuJUBjL#VImh3~Qw3g1B_hGN(9r5m@Fw;EhS+|zM>eWPUQX?(9~?Ljju zD+VxWnN!X~1qhMq?==TK-BYn(vc@yJj}m-t6a>lHJP%do>p=JcA&c3dn7n-Xe4Bv1 zenvB=`Nk=i&JyLRnzcTDC0@Io2Ge@_wV2+B-?f zz>E=e9#`8=;8zQYKWq)<16)Uxm46DVSjzQc3DD&lvYXulu z5(kl-UeS>AT5ml?!Q+Ho>yZ|n{AKam*U*spk-$x@F~$T$)T4@C1EZbqD`7{BzW9Qq z0~`TcX2s8AWl?`Kc|6$v2q_~x8t;HUZArIXOG_#a`p?I#Lj_e{Scn9Qk`B|lHzHaR z>lzHYjsBIDqwE#^V~m6$uHHukXEK z#vD@Y%U%udZpI^a)*pdb7?Q&n-JBNqQ zAxwUYh7p83LZ%qnm0P5P8BI?C+RR{n`mK8P5%gfc?#Hby^<*|iHgZCzP1<%~VrH`Z z#X0zeOClm8Wd^U7P2Wb+o1&!&@mp4II8y`0TUO$|=6t0XT2WTvIjReI^;Qoe`3YJa1 zBIiN{UU~g)o`?3YA`OzVQqND`eqMt|BM5tcX}mXGgKAfz9yQEvC+Ol5Pmq@&!20(E znjUen^S9#k@h!?`4McFQPBkgXP(tyzna0cZjon(iQtsHGT>=fF3$yeeU1IV=bR+eM zpF+aux$%qGj!`tvu&oo(B?%@PS0t!!vw#j(7ps*pFB`wy>}s7?Crnps5~fnA#8@_6 zz6~pZO&3-pd=LZigT+1esB;1cS`=bEnCmlq{|NrO<^6rS;0fWpsewT+83lGhVmo4h zUtHdsd0~BR&BtytOVU;a5SOA-j+WswmmsRf;r}o<1q=B-Krx}Gxm%!BlyK^^;s72~ z1mH)j|AibaSEq$&Yf)Bt&NBp~-uJhX-=z%YYtrWcVQO)M4X*2U?OIua(c>tXUzwubSExti6BBO zN`mN~AEZS!Ur%AnYdcD{=7H=F{oh>{(?{Ey-e+gnj45Rg+Dhyl#r;YZuKSjIvsEzn zt6rOr^qE_VlWwa4J@H5U%zt2?1~4NE-}msOW`qM;@Ym{7$EqcbQw{}JAy1k2_c^`Q zd@i-sEJNYf8t;POfL)mnujHNGEdRf-Pe;$~N=sZ&jfvjnjX|JJzkJzklStYe$)5n| zrT-bS1Esh89wCpq;hnE|=)>%$x3L;fT7tblt=Ke^kE7MQ=}fM( zQS!XK(<Z2sQ0%?aGTs@82~8~c^pduApw|H)qC#`eI-}Qgx0Yht+rs}K;N|=3!}K_M|y4j zC%MZ|r;-1q?WMN;E|E-;jV~uhv?W-~W@Su-P0l~Y2Abd~to`w}y}1SZ zxxHl2)y;`R#fJz#4GT}$2YXohbPkc-60xl*iK9VARCc18<+ip$39&S<(%-hAMmD=W z0f+{!wWX5dDT^a%WoFafo3EmvtNl1+5)VNou%z;z?;??(dK+PWS9=vV&2>-^xeiBHNu z=%@ovQBXeT_z(P(kwOd>V89HDZZg?67y4K6Jb2lAXC1w=zP=8*G-C%=&AidjGs#DS zd!6s?M;COAKb#brg*Lq*TZaF{M|I7%#*dqT$74&)QZP;V@o9#qOMsmz;|u&0R3uL3 zdM6Bc8bvUBa>dy~zPbvpS<@{M+c)$;H*N6(IBWclWvw#AbqC`CuIE~$WX(64$t{ac zN=)N2HSU8PIg67kT#Y)?KEA?dt_IrU`f#l7S$WSag1p*{-O)?|IYEtU&_4--^tR$+ zTrjsOH9uiAxHSf}$rXFwbd;yV|FtE{EG~kxb+hTeW zN?`}G370v7r19>)gYAr4%{8T=xJI@@bJTYdH(1VdldhgcRQd8Hu8_(lj5#m^+ATDx zn4Bp=o5thpq@Dpg7XF!Xc)<1tg}kt#Fqv!yKY7Ud#y-%TzJ)*8|9ip0(;onsb=K4M z@E~1-jGzCxCj%c~lLv$Jkk0r@MUx*>{|Zx?6htB!{Cmx!+21ZZX1#RGCYwedH}iG- z9x$fbw>Cq7J^#C5#d0_U2OuPsx$C4({QY^z_5R5$`Sa1JbL1*2_|jaHMr!%=i`tQ^i{wbjuCp#o`MO0iPPl{gdkS$nTb@nerwVL1Z;%^V7QpG^+yWG zim$hHnkjUrcI|F4{k-cl|&_qj6LU#B4`&a!FqaY8z5uJL}FV-OkIiK$`bwrZo3x+KlmI~`ab!8)JmBIu% z!~;;XMnp;y-PtcZA{q$c)(U>r?#!wwjEGOe{ql`bkBTJ=6cbq1L_%Pw;9;O)#@n`N zdE_1fyUn-Ub8$;4?g;TVtTs15x%fnu`wm#Vq#S7glrVEb-q-e`=r2Z#2}zd7dw6VN?W5iyHA~$HtavZDI`0UB&wpUk&yPxN7k}gmbz;}AXC@ikQ@7dO=Z*R z5xi(BEoL;@&zZ1f@*KyZ)jcIip!a=x@rI7a3);|VHPeD3i2*1pC}fkux{(tNW6V$I zAIZ{OT&SKcy2)XGUzD1?66gO?1_x^WLU+ULzT;ZivR=+Gcb<*e5W}q)#6oC3^H2DZ z#RB|HIiGWU_f-)HR#ib(w`v{1j!2FGHCaAWb`0&!8(TtGN>=bhs-XY+dO1kO)}}hm z)ixuC+%j*P$I)grOJUnijgiF2?emDasHGI~WB8Vb7}t+fx|sdd4^EXRN0BKoKIoA+ z4#ePfqr}d<2V+Hf=RznDgo4?bOTx$~nf&>mul!%HpLOo!{kQ;6S%|vWb|H5F>1c z9c@ZUL16?8MvvaFmz2h&bTu4+R=n;6e*F4R?eM=+cBwJ*m5CM{(&lrmRnkb!$noG_ zXj%)FOh5-MA`)gWnnk!60X+uIG#<=@s2WaLUV9rBnvG)bTcIDWVV5t5oyhNFlOI?; zK?$L*WNEzI)nE=-zi2nh2USmr_MLPkcm zs~u$UiL$b?TIX-@6U=l^*EPL5IX(sve{_3*8@qmXZF1WC$PvBu06w5S*uhkOgm+kc z@2{M&Nv%d%Bk*|;!?F1Nxn_``6B52jOap0afLEap27n+`n80)>^;OTXh_H7Sis0Wq zSdV|>3sZa8YRQn(i#c(wd=jEf6CyvRdKZO+6#`SEN{Ql$$@d8)ZDetpq3JSCK@(?GIhEDk2G2tv_0934?3AH+MNhjx$Pj%{khSa1nR2G( zHXZ|8@tL8!*;+oX(Ny-Hm*Kl?9n3u1vpF_%f#7?i7i>e?{Gh5e)27u444a(W%^Ar!In@(h z>i*TqGDB7HZJn{RyJa@iTj}oo_-qw3WfDzN_jV=@deDfi^j+g}EFeq%Gdx4+5eM8* z^*D!X6m@rp{egk~CYqapou3F`RTTjs>p&cA+4EUt%+)!;!-+}+jD_&Ig^d5(K3hqt z1jXP0V!IN4v|gXZzs!v>X+txRqP@jcHgXl$4GON$>GwA-6OluBe3K6TJb zblkLZnNv3l_&+$;o%ivyjzN-meXrJj3eVg6zz zLLF*m1|=<54Rv9YIY4nZND+%tub&<|ncZP72*P8FRWE$_58y}mxo@?s2}Xr$eonKg z`xc@c>a-R|xMWEC>G3Y|!19zPJzG8P=#uwqcE2X$Z_+N1UJ>5?49VptKpUT$dhVbqcZpV=ZH$mNDd|^m zuY-Yx4siGC-sMcqa2s0!41Zd{c@C?wQZt%LCp^Ml;#us{qYenfIIgRE1-ow(#A zJ}07D?*_ySyP5ZZJ@qK{0@4CUqEq}sdtdr<;%Q1fC8s3~wJBl5d{xg_N(L;Z|%1vpH=_YDnVHQAX2s=5&w(yWriqX3`t zAmmyYqHa(%ZII2cz*`&YTTvpiE~~Ml2G^tm$6YErFfh-f|9gl>R-2{kW8-?t_wE0Gibqn@<9=1$3-o;lufD$vL0A)ZSUOU_ zec?=XG2&*|t*cHd4XEV!x03zufFPZNbcnmQfZKG?w zC&%lPbU;kVWIFE7WQ_&Bj!;zmDKb`86zKZ^=Dz5_dm_&xg(PXVOZOof0s#jMUqb;O zK)uA_wa4MVJR$^7Y zs)b;44B^%CIv@EyHje=hJ9A%4rt~NVg&qN=p!#e_NDz$X)N7!M+?OxgP%hH1;?~Em z8^`}*?dmY+-Ij03yZSa+u>d`%xIn5c?|8IN+Q0IZaYRF;rnO>qhEPhP)tHrs-!~Sj zm&^`7?;}PLoQFQ+Sha;?4+8}?k_dcsJ3t5ZrMEyC^+fdM3?$kw3r0;xyODqzg17vX zNAlf%5VD~s9?=T|nxHFF>0v;?zGJ@5F5h|`CTeBA8=x`bKw*7dJKO z|H=7`EKpHq9$uljn1gThtgeCLp9*!DvT*#O(=r5I6eODlGm9?;;8p5wLLH)LsQPsu z`NJD&j_Z3267STVew{jgQ-X({%^xpw7d838<{637ZqKd5M`kq0XWsP6wPJYHK0k9O z%SL(+tDg_|DnmVA3dLNT-N+46FeWx#?|^!~H}|34Umy5vdLBt6H~kXKDSAF4bRZ}? z`0nN+v$F7sib_25tECOP3@}iRh*-RP_UzgFRG6Ei{_ALm`|n)ICBMbZeqb0qvPXDL zSc{8_L~IvzfnEOPgcQ8%7t@=lvTiGy|J<^ISM%piHduA6@Tl?s^N->t#Ydh2eBCtl z2U+~BW)7L97hhoD5ZKejt3N`kV1*1m$&Vj4eY~}y>2r_MOqaN-vz4|E8rSLV57@rG z;bNkLdr+8UN*SYmc{|hnvRYRYC{94>37S_Fzg54Y>1s_RhzkPbQliSO&yU)wB23P) z+;#=X4w_F6QYwEPEa`e7mbD$L5pb5HpPjidrdS?VJ8Q#C? z-pz~%tlk6rNgz{i0=dFudM{gQLHg8Vw(zk^-_nP_Ztfo3_cCDDR=n^?r6#w6-x$`4 za4@1EqhgfA{VR~6@yUl(1%iyA(Yfk1TJAJwP<7jv5X}Uv;A~=}98x6T{(9@t;ZjLZ zKAVf% zR7U{a;S-x~R|?yG!<*eokmn$Pbe*%mR6`iP?g%e885yS*w+4x4PL3(#%l~$m^A1}< zM~A$Yu;SP`=Ftbwn65_Rga05?cln5d=teKgU^3!{X zXV@v3(j9+}z`RU)k7|Lo$qAK;N=oX_(CsmAluH$7q*iRA@Mk5mxSPT!-+sGy?bv6{ z8YYfK{6-Fp)_fJhG8xah%JOA(KODN%7xJtJN?_#S^tn%B0$GfmU;w{t4HA1%C+1}y zeBu6Nk1s2LD~x>4ky*hCX1Ih!m~d zIsA2|)o=dLeJj`>lb=E3At^20(<3L)u@kLE%5ex4j7|)oP533!U_?>eq+~Vm^J-`L zvoM46QfN1y1 zHt}%A5~5dkPr!{BmTx?hCb7#B0aA>We}~7|F97>im%sFRgbQiSOKl2T;!SQHinSbwS2BQfH7ZnNw=F`;x_sR%pXKsU+b$As^ zs~BsVNO;B(3wTJCIs4#b+3fczdKZ~9vAQAMW{ba#DTvnJlV$#odA&zxXDYdky1>CL zoyv$Aj!lm_tU&`nFQ!9D|10Fh>Lp<_fTfA*F8V%F+FEC7F#%mfOvk3A*)ty8R4=}s zO@4p25%lSspr~1JKS^tA)n~@7`$e4UY7UUd-0KfFuBc;UfOkFouA9$E@e|V;ooL`t?Bk&ZBO zRx^M41jw}mQJ%l>ei$ScAZ;%WWh5C7rsbpj>X@}u@80~v#LPlP@bXT0C2!>W+q6Yg z^G`AoxDR{f;l48Q-nbJx@G~6j5;>0G;f%Jk*A=@Dyj1cXhA^z2bbC_lf;~w$^|jiR zw1!R9;kj6IL~L%^%MM}Z*_i?jgyCZHs`5PtuOYJ>PS*suzub=Gi5vmr1SqtCdO=9o zdsIf_mP+2W>*Ech!*}DPL5g2{OF>yma5${y`8e!wAPnqZ;&Azq0jj82#^uaKz3N;9 z?4HGw$pbEL5Cn@Rz8e=NM1v90f^M7z1JxwD!kusoYY*l;hyB||l=cDbq$ppf&m(TG zl_-+~NipwF3qmDR2%>*EFi8@;jI4j&@{EP^Ny`CZZ1U~*(ZFY^ThZ;DewPxs`Y2}~ z{N6J>$wqxD`&3Zfl7!4Nq;J~`6-8wJ6V>a*xp(kbxv~)=vlvLvq@CdqW#!uWoRpX( zo6`d>!lxXKfKLShmU)tthP+6|=u4}r{BlN}IBJU&dHTwG)ipuCpyw=4I^}Jv`d+SS zj^MHAXb}4MZ}`8R@-HhKP_V3ltVFnwU!P7)@8`SJrgeW_AS6QE|4?QLQK06>UX{pW zKJ|1>GtX+P6hV!wLtXS7$*y^}E7D0#C4cXWc|pyxoV$jFq4v>icY=9M?;Qw2U%l+msx)qM~iLEdQGjQ%kaO9jg;aO z{zoFt@r!}nUe>#~{xK%sdE;-d^YjRTwCwRFe`4)j8woD06iX{B5S@yKft4N?Eh7A# z(d^tif5LKap-u}8{NZFxK5d#GLpE*io1j~)oa~2N*3*>OFyAVrPfV|HTk^-=$ z4rIi}+zA^zT^AJW(G(8wEQP{4&g2X5GxTJbB~q#8<5}6<6=fhMq}QTTSM$JiK|?BR zz&aGp@!hrdFH3xBR(v+CHGu6>J{QCvSaa%O?8cY2Rfl3p_j}h;lvwP8x1D$sp=(-^ zcbST}d)L9!th?@e2#=&^biI8_ixu9i|Ai)h`VW+*iTNkH zP+Qx!`y06Y8w6P4ys-gVKd?E@FDm-5FnN2S3{vUp{rWq>$msupq!G>@A^A^w4_yDg ze}3bxkKcI6Pw0FQYG<(APCDJNt3|*amlt6{MA*056FFDH;d+rZafrVfd9>Ienf5Gv z5YA#tfI}r=Er+D-TLsfcB8;f=YQamfYhkWNZ>G$PBoodHIMPALF5knYl^4{aZS}`+BecO zPz9%q3l#JCdLDYqWZzbs5%$CaQX~r^uOh9x(IcMa~~qghlm>W4dci!Jw8s&T#)}4Ih@VYv@Up3#!C6Wl z%GL4WsCB3~?H>YKnAeuRAu2ICd9s7rFfcPypP+9c1XAc|0@*F-cFTQC;)%z61Po5v zX~VSWMuX5@4SM(gQnLst8h$3=uwRx3lp&+uQX6-xZb$E0k09?BR$1Ngzxi?}=FMu4 z%FzX?ExX@c1vo68ynsm<0AhTtnMw+&ok>WF9(+l6{ z3X9*4@wG(|9cwroT8&6P7NMkmvUrIJFyA zwrgvKgnh<~OUrM+5;38gaeDq?DYf%>D9^!NhtzRNAd1(LIK91Ng3{q~?a8n36^Tjg zm91bU&f4wj)bp;*Zq6$)MA?1L5T}Mj1<&Ru@n%-?S{mqDK?|7j`JaRWoJVzXV96|X zX5bOw5U=6AUeWSBMSSH&>Z6%8h~hpxkAb58DeSZuRs)ZdWcFzq@RR7fF?cX*S&M)Q zD$sRKm~z^u``mO~>Hj0_Er8k>h86Ay8z0^c}ukfXV$ZSzj2CL7#r>Ujyq7R-qm;a!3jY-3k;Tg+e%MC@@^EsOMCRW5jJ|GXmu#B{W{-LXD@v;2!} zJRV*9WMb!BZ{&u#=lM5!JXZG6bxCQ1@|E>km8N(B;yE#vZ?2t0KNb+1R3$Tb#?SRY zGf5WsUAG~jp~(W|Sg)58X%IfNn(@G}O=4)pmq%=5aJbHn83s5cEv&TE^2s1SqRnGK z_5U`Vg8<$O9N+=gUW*H_sLwpQrUfg1{TWV#k3`Pp`#YXPU0??s!-6^MX+`%L&8H{+ z|JByBFtKKxDE5D`_0Z$}Kihg1o!wxL)4)nGj|D4u{HEDK*L#15t4(@S-R;AJ&PB(Z zin5p~IU^He8bw^wA=t>Wx?a}v8lzGn`Cs-_Pj}(TcAM;k4(b`w9JG({>78CO^`=cB z$b_p8?!+=kIGFsc;zQpvBa&UiV$t_zi2Gl~(PK+KOlK%=A!`)bK&4+@DjriWE}89) zO;exX@8|0eqO)Q~rk-LA1Pu2&aiEHR)r{CLN0+B7JEFI33fgw_%rPal?s8aNa)o&h zp`Ss;dx3{cmoEES^i$@wbP1|3mpmwl*skM4ziJJx@L-$G6SX z!x!v`ZojIYd?1mHi;4=XTH0UQV>kyJC*rpYO}#`}O{Dz1>Ra$FH`^Z@78G1P@R(=Y7S^6mFW3bL zV1E-?o$4gC!dhR)_a&YrZ{|D|rrD`q|J%&x1 z;Cmg*J`N5c4*LL#SMleXA)x0aBZZD z>=9fBF53h4#`0TxjD2mo_a>@b73&;YUF;6TqA^{e;gdcjyP~G=z57)l#*rgvNEcRL zy;hb;Y;?Mx>g4&&`ffQ6hWDLZ_6JU}$%>~SUNZi^zDz_BWBIf3)dl5kPe{G(z}8iY`0n`e&3cWZXfvA28b+& zCXb^aXv*zuIc97h0UQP+#$v9?Kwp4&xln=6n|FhKrNmsgB032VCH@QYz@Gp#q<7pi zhTuAO1iFGg@-fxI=+$q{%vr8vbb!V zVf5XNruTOJasQ3&?Z!se*w^hH%gbwxiZkr8qQD$zrRXZ6=w4a@L4dU?bT6Y7Z( z-?ykVJ6KZxFJ{RXEPMxojGb5#NtF}BaVa$1w-Hq7;!VFcMyRjNmY}_3 zYSsX!7Y||0iUG+Jo!Vx-CZ>&u=t8n0XQ+g@#JvL8t}Bz9Nww_S0U_J50#dT1%a2J_ zgYP{<@T%lt0Iy%cW;X2(TuqAL4GZ6+5DYN>JoL`b({}{z;=nx{9;gkA6`*Dq8z@Hd zy`p@3i}-Zl3gz`VP%jDQ@-l5_m7qF%pY5RqWf zqvz9zSpa(CSb;OtyFQ0;>DGAE|3<7TjWwwV{zCw1iOirbKHRA5S2A4dy^yKkT~o|w zk^qotC6c828_9A-PE)yBG@EpuaiPMX=EoCekfM{h5wK#J=<06ZNKM0#apQF+Ux;dw ztkL5K?Ae;{2OIDVZX(B?khC-{>W``OeovS#&92w$-?q|=f=qm0b=$qFAaFSX=ytYl z%4)6_rT}Bhvb|wb?mfUmhw#A2ULZEilk|-sb2ua^A!+|%NUP zN8TL|UgyH@w1mL*8CqmP%(vvdf+396!C@xq?eaHQa~IC$@Mv8*FV%#=JqwFu7Nu^9 zS6bdpD0(6Zswy5%>r%^-weX zL!C-3MCv00wN4H+q{oaBkH9RYaC_IJT$^Bp66}{S=|Bh*wTKNASc!#=`bQ9YHsGTF zX*pZUPG;j&xiWB%lhuDQ_#!nFzegD2F)7m!h^BmnF%0O9u}TDn9asg{w zSDKYr8k4gFBMa>v(A`cmc{~_7I!%1}j z&BruF2_m{C5Ftc$o@2QiY+6#km8fn@qgt~%cNy7!`jVpZQONUbq*o*{#!W0@nS_*Z zH0xVMDY@UvQXX)Lt1md_#_sOne*5kuRZdwx)x1~M<2<#v?{~Iz`}#r{ZnAK`BWr1~ z_j5?L{@Di0%%@9;v)>(finwf{L!V>~e5Y_cB_jTA3Gmx4UixY_$R@du0K7x+2Q4`j zyS8k_Ju%i#^YlF;jW%)gbptQT-KwfENM|=bK23E&+{1dC#hGQUT9lt78sYP1D>*rn zyvnHQt=%CKA?jKc*(&qKEFDtT@Zuc0l^}-;9oXU_&ZTf~d%~6ZZfuuf+~t!paNV=3o3vCpKe!{6W9ft9?C%P!nmiiUaB8bxIuQJL~PQVeeD-(8nT7?f9>I&8(LP_vs2qA>!-v; zlft=xNv?aRL^qUr?^0-}cq)9X0*o)$WCt`IrND0EUSB{$kzeUUx8I~{>i{MMR*O=1VhbaWC%z7{ zPP%Sv9(+Pfh#0DQmrezi0Jv+S>Q+VqUVecs#8VuILfn8hlhO}ty<1SspjZ+)U>o-k z_1oOuzLy@guTaYHTWqZi_MiL4Pw5)MCHKe9n*z~-86@^UCdPT}=Fka;j(T+Q#(yd+ zFi&gj_!W@7den0@sZhedSH4VstDfT)?88H-GReIh?4#u1TQ~h)^!b|l8CKg%F)r8v z`|Zm(_do|F$-T)vV4ut{{s{?F5;O(ahXzn9J_cY70-#s-8d`6(To){!%pB@d_`uO+qZN!Qoe_zz0#hGzc7SJH%8XTrG8=A?E136d=GdfGd=? z+iY2arzN|}X+%HF4^hdZW6t02LwujZP+w$mM>})?Q*5q>e-#)+HTW zzXVgv@=>9W4Mw#Gc9E%7Ikv^Z5xbHhWlX(B3q{KvU#eOk_r!AaYb&MqNEdwzTlny8 z6gGVLHcoVcO9^fG;O9<1ieqj=-ON>qVD_NVHy%WXV{grlG+ZVe3b4v$w_(42}?WC zh7bv{o9d6KdDIzco6Z+}Y!yRc>G_LS3W}`}|&*LJ=y`>!M&KcXrK=@6j(#Tmk$P zEK^{cnmv!MbkK&1TRy(np6{2NM|-a}m@kpO?3*G84O59hakqFOGW(JG)b=#%vPt{U+e9Q zr~;%fy)j#na3|y#{@L2#_gul@f&ad6MN6_*^65a)7K&AWu_a*qjzihHf>@?NfhCS) zOf5&`Kp=UFw^8X^LvZhCC^IBf$JyO5ixH%+%<3t4U6~%jhlzC4tc_nL!4ROR4dl}} zExo1iQ&4gf0?+bdW9`QBfcPx17(M?T;mJ1z$&?;FRQZIB=i;J{Og#qLg`$7pKnW!9vH)DVS6%I_4Q%{5&mX3@w6 zDVS?UP!zeyVT2j9h_eDVm@&L}2<1S;*yjdfEuRABaRWrk#g|K00%|Cwlvi87lzY`L zRaz3sTX3O{OtH^LT61W%`W2f$kB|{&<84Hm!3Yq%NI-miKe=t^q4#EGl3E#>mMc4I0|op+-WF@(`v*{y-7#_mvKdL^p`i90)K-`vJRdC@TI zGob7jgbtfe!){w7$O;vGru5J~tYN>IY3_{y!9S#}gcp>` zvcM-X%E$h)-pB=?fh^+X+5sQ+J?VP{h(ch-$E-g{Wvq_sA*G*<_)kXu`1@c&U`yU4 z37$wqw4jS9RZ6XKu}bU-#-h0r=9f|^dTonj+c?QNS*~eA4L!ZMpyQ#<*Lx!1f#U4m z6J9`+W<>%O4Gqr8rxSBh-=3&M2hL)mxJC2c=b0|YZM;4ZLBInDZRBNZaAgJGO+W7p zY#svh0W~@c@$yWKCj%@iyWbKC@j^wT$Tg^Aju0}Ff$MR|mi`V(Gf)~3epQG;q`unp3&u~muLn$<+Z0U!8#D4g6) ztx?pW^ESt~rn_ZoigV~B@E^mri(u^uLB;(^z^vS7xpqF%;;a5Pac)mtiC39XeQHn% z01CnbWRdwYuqY8)>FolNxER8qx~r79&y?yLFkQj;=s?9oJ~U17<4497IDm9_cSOcZ znDhi&()+DgN4Z*PO5CXg^Otf@#*Kd)Rg(ct4%OBBO zd19~bG`B`A;84x?Mt`jI*aPx?IEg8}!1dWmi*pS!QFq`L);RdbER#%5FV+9WBX*spqX?E0Ay4w|#wQTF}gjO1jP zg5}6W(PtiGuWLH5rvsDiyDY56RSodKNu*^ye`G_!cw>N(o`k7>u2W&oPmGzqp9 zCH+RX1NcB7bd=vQk(wiOpl}T4r;RQv)3meXbFuk4Ycuzi6WJAQyaQw6Z8%dpj?Y%E zD8-$pbDS#1PN|JEY#hVejtIQ|*qe{3s(}9El_=saU0oLNX^a932R8?pL+g8g7DtPJ zSGG=dgDw>q$QMae;MjWWNh~RC*E*wkZu%4!sF04oMWnRrDs%lH)UbE%X_Aouv?{~l z_x^*Q|NaCvGJ*)wzNSU*rHwCtWi^T}9@Ox2-$w%7J&|p{qHPWg54ZcpH>m#i0S670 z`24#MI#51YP3Sc?KqHZ{NbdY527_x|;y3zSW)^bQJAt`?7QFb&@4KMWAICy~xTK8S zp%%)MLC=@1Pr=z8HiM92>2B{OTf*Cj=8ewC8JmuHPxE2ri9&N*?8prwpIM}L0qHt0 zJ>yfk;RKbxKgKmHdxN2SgubUgJgxQH&aJm|M;QTG;4^qci(T53YlMS zMkkyD0cmEKANskh+VrOUnG0Rot|gB`nc?uQUE0;D_AW@0{g0#^ACu*=i6G)9lll?1 zg!3!dqAs!}VPsb%2e*97Lptny!zxs>brKIyPQ6}Ikhj7I49rAE7ldwQSLr=ghMUy6 z5?dnaEwZcRH4`tUA@=$3YU+ur@;7x$&2+CgUaiL}om?O9nI#6OInD` z+=v)D6mIt^uiMP}!(=8Vb`^WHQkE%ede`i^!IpclSYmy_a-K=%kX( zE|`N%UbU473^cKvs^iE~Vvph$5WrYX(My8b-j!AapK_%9#HiiCse)*q`uG$fVY#!`QJVt9STBn0Rgvs0lFZaJ+Je7lZgip@gEW^ zBhG-=GpM-4MA?UKg>>!2OV(NOz+eP`$U^);Y#SLsJNEV6$Xit5PR4NhQM|xRa=wDp zu+iPZIUVj;LAr1@roh+Ws?EJ5*)#gGO5M~dAMqCfBj{GWi45zf8o+`#r-Fq`$&cbl zhL?knFsskC1TeuoTu(_gL+xHvCmUX1NE|uj`Fb$9WtF@`#R^(y zQoa)OOBSi8E18VAbICz|?b}KGy1n=AK|HEO^3}fvHuCKT`Yg15Jbym)CM9eKwaX|$ zJQEmplWf>7?11LidoqQ`B~qgi2UIa56g|t4s>3H@Z6Unw)Azl;Ken_?JXXsolaP-= zboUQn=K6TulgkWhwm=44^aQLcczoR#qk@ zK0*q;ar{%XOrvCS#Z|6oM2#fsQi30MxA-i~*JQcrqr1PzI79TJzhgOj&Zix2hJs9s zk%il?&P&UrLu_Umg}OTpHHb2N7-WCEfKM2(5fS03Kj+;{RLg^yix>nT zH=lQv<<3ZxLI61|(yTJ1&o`PKZA$RZ>ZYl2D$#{Nm^7GP#+Ir4h0fVY)3w zJ}jnl5O}aqR};4=kuN>C>Ll!}_W|o3BZ7;4v3C!c+8 z9mR7(-<-N|?%+ymPR;|kRvjX8gnVeCmxNro~6l1p+*6mZ~|nP*;UPK_Dw(o znv`czP{O_gas6hDoPT+M`WuJH;|?*twP5o%m#gvm9(o4Wv5d&R_Ojzdd;)@yAr1y( zvaV)RIahd0$Y&^s%3?1nGcf*h1Nxqx>pI`;b9mp5gQ;wbgP33Jgv!Z1^^u}kVX$uB zCR*i{FP`{5cb%5v%KHH@F;!j!D}eHpY(rL*hzB8>pQ&4Yp3if48LOmImZhb$FWgnG zQ|Z~N;?w8t9lYW-h0C1^2-2^yE4oyoQU9~x!}MohZa}HUY^*OU**Gg?_r8N(KNW>1 ze4M%|K8Txh|Cw)HIOZ+3ofu(S>G4NnY4UGi0 zUJ!Ocz(cZOcS(}rJ}@IUE$+w!h(X9^oOH>+$UN{nZiYE8S6>&H~iL=?7{% zQh!!sZNX*vW8C3qFxFQp%oSlk+xYN^yB+&}$z3ZzoA!li_&TsE(t`TAo|`*`{D{WI z*=np07=DwSS9uYy%`1{W(YjLtA%vTWtY^)!>nId)HKOO1XN_yckZiAASPzJ@DrN)y zh(qpP;0i84L6&}im-J>Jc^?Mm=T%x46mFrJq!B;v4qUS_F)2sHH!ZR7qh75ABvLqE3T<&C|ly?Y@2*=%Pf+1Elu;U=u> zeo2rHY(hGqiss_6LxC|Lm1@IMCUf5&6v~))Z$?X?sKAEPDE3?Y*g9^M9sS$CteAO! z=&NI}!jNXg6Z8f_UTSJA3)+{Agh1JZ0B`9#+4gmnxCG=OTW$q)9`OrJ>u(4;_9@un z8vI*C1SBM1Df~^m@9}_Vb9DHmq$y^-4;KPKxo5fK#>2X5tRTYn^CT z_=S!zEd?O5o!6p*tro>~fC~s|P}0hXn2Y+)VEiSC2a|Ixlh;oVm8SH{S#{OD{y_#~ zO<=<1iAfHbe1nE$1IoaKdOs1h+3AlyxYpo(pzMVt^_yDscV(7^_!lF2yL4OrDij=) zN}wvkD1==eAq7Wy>R!mn=B;unP{}T|BVg3qoUYneWuK9%Cbx2k+e@9{A~h$x(xR9$ z_I)ssf{=`%d^PbVWo<=3ku&fR!(PWRi4T|=N6V1A+VY8ubmJZR;nJTN$v&gs`kUGR zBB!+*vJwm2(5yS)eRuSQ_Von(#k1itF@%fD1!HC9qd8K6kB|o=U`5Rvc16qt#z{oVTPrSF;TAriT;7*}wmNQE3miJ*h5@sBCN`-WU@ zxbcRSxr01^WE?lxJ5}zA`keQ_&PYU$`gwq;=1n|wc)x%4#%KCkC-E9CNc_Bug^Y|E zmWM3Ndh((3KT5Uh2NX`XrKyEhi>br&mBtHkhCjadtr_)QzNxqy&BM-2Om?Lot|TOT zuwO|0Za$WlS#6J)fUxOQNn+3Z(-~_>XX|FgeUoAO{7yMXyBsdFzmcy-z3XX58HS>e z)!#}IahlKCV0o$jj43c~-0NeWk@@XJ7_e_QfJXG>u;Udzi532-OFPPzHXzBBDkqaD zetN58{(ZcrpGpx2_=qPYHd;~O*|F0*aD8d|g^<`+;rbeoD}FL6toQ8)|C0oiaO&nb z>2hihyK>xRoC|W>$yRNDMPLAORoMi+UV1#$7J%*L(N@y?Qf_VsGR3f~ws$@}V>r2x?fVgm_Bn-*I=~y~ij0C{ z4hFjn7hQ9Dkr5F{<}=Ch2U9vFjg|PYJrExAs`JE$D@_d?g1nrk30z?LA2luG449$U zxM#B4pAZ-gB?ML`79E@SOw7?^69O5(@jT5^26BOc4@cO9<){=k-i<9g1``&qx4M{P zl4mO?GMDdkixCA!@VsS+eg2}lfjTB*8&0u{FGd`L&#DYYfa(W`WY`=T=aV7S7UDl) z%V2~;ufTm`J#QjhjP6~Vrb3anY#d2eVw9SB=1PtaFJZqZVL^Ru(%z@>DIF}ZnoV#< zKSpd5-uEA=fY>Xk1Af}In4dWv&3*ox7b%s^s`N$Jyh*Pq;5>BY>B^y!$4*Hpj)IGc z=+`eKHGO^7DOF!tCShZ~zqv@-A48|7KmRSI<)0DV9^Ll7Ns?Ju?)zMGx7LkF(=1ql z<2`gD4j9=QB*cv&z(M8U_~ijeOe8qus7Ex@!ZFkKlIzM=`;yLrkfif%$2*e6 zr{fK8Jss}kTmJc`LCo^DN`1UadMUS zR}O_xLuH>ETOKdcbN+F5bHikrIeyR<(1AfFgn6cw7{_#X=vWOImFJsTX&`IU0Bf~h zz*?>5hfyiAN!dTx!d%$6^FJ`Ci*tSmDp3k|Mn?|b8s~uHAwv=pc=_{Cx%o^|LPWrF z8KkpdU;TwjE+CK)kMNJv-LdZOeJVZ)Q94C+g@tJ&**)Hjg_osVZ^5CdvNB#-qtj$h zVnSf=1@rT>kJW0$-uyMg|0$Pk@$fnP^`?L~r`<G04V=06M{h`<=6?j5+sivKTcz_%a>M70R!m5fSs^It_K8 z35$fWX`vZ3%4JLOb&t;7GTIQ=PHJqJLb^r-EOka)V zpo{r3N8+(lMoX(+C>kARoYp3qL1wX$nt9wh^?8Zzq=-kom&tM=tEJJJsA!>bhxmS- zv)h!Cy@2GMUP|5m)aqo$xx`{@LUU|>^%uos$w)K9W8AHKMVar|^`1#dL!WN1C<&N| zIY0lRLB2x13HE4W?YhcIc`=0)$JsUiU_a8EyRmvC*?5v5#F4oZY@uI-w=|gl#T%`{ zaZxSA8%5m%S@Tm#?$KZ_?O;f*9;U}8ayx%psDCeB$_b?TNn38FtzU$<&j{%NY?l;z z6UqM?J-GfM;5RpHAUeJWT%iU%M!QU*?$=LY4RbLGTu^eDUovQh!W^z2>xkcL1+3|R zA)eUjceCQ1%XTK`+Tgx+>PKAl{d(y|_?weGuT{2xd z)`S6N`gxD)B_$BP$is+w5 z$~8`4BWm)Cv>wO{Cgp}@;WeB*1ArNDpHDUj7MABg@^sx;0k+)wm0J5(BO!fK(#Ey4 zkAOPZm_o>*AA7bScOrdM_i)J%aFX!RQqujVT!ps&sMVgh6lMJKy%||sbC6>|Ab&(; z963KXx7XMsz4p%4BJ~WC*QK8yCZgaXVH_imQ=<2d8}#0m=R_SGkU zHS5)#pOI6&5CiKr8HWL-FXz9f)6hC$@8G`%fN7$XTjt7DBg!FzKg{|k&|KmE{ozY&olegq zuy?rxVG0-?!c<3G)&Ygs-t{{6mM*BibD&RjtO0WoO%Djre&Wzj6!cQb^9HTsCdptx zZ#7=69IiiR71j|*&(PvU5B<0@eE~}&juXyWv5)UI+4vQ^KD$ubt8WuZdzHRh#@Vo` zAtpR7|HQaXGu`vwC&5bl-w5<}Ts?M!7S zc!*6oRssVt_N`nhz)cD9{xmiW)(W5YPm~?qhM~K8ZS5lC!Qjv<{QW7$7YKxKC^y#x zpi*a(I5%;!E`jw$RdwxvMIxz`yE}`5!lY&KI31rvk-x8xZ)pioYvI%#OcO|IXf(S4 z|CKE9BRa5zT}}TvZ7|7TlJrx;V0@?4zL#wa4v=xI2-CtC9~t+CU9(1F2dE!w^v;RC zliTp%9Q4T`6>KDaAL?npQbB@cOBg)53m*wofZ>Kk0b8NW<}Gl>4Zbpjmm-5nRH znXCxO^bp2YWX*GY7x@e^%!(8_T6E}c3iXIqib^*-hF(fI=7_8>^w^_vYt)fh*3^Xa zk3Lda!V+R!__9d;XY;en zW0rXP*hceu(st0GRBB<1M<7AUoIvz0wI)pA1?s zHsIDvVR50QeJQAXxRj99bv8;xEPkJ|AFfJLEQr{6N zW5;@qV4eSrz%sm*B=r*3p)%r_0Idg;_Nb!8($&ns%pz;IKf$@|b7^e0pHUi{%lp)i z#hq;#BGh_b%GuF_$Y@q-wd+oh6Y8wryjlsER9KD7z3TAS09Gv2Sgj}~+P;)t%@$%F z;HiD$*W%%l4U^CSjj=EO>>SGOZM;$ywv&EW2vnAjkfT&c2)xr*JCkDDnvsutwsiqf zsVDidzNy>Z7P6s#hX>#Jf09PoN|7b(=f%EgOgTqrEd4&-y@@m(Zzb=TV}#}0#Tu4f zN3#)+=+M>IH<(*qhU*JF332V2)H6*VPPff*2`9D4A`=R2^wt2zFWb~ zfJb-RR{gYeoo%ow%j@k3F#2?BqozO+5v#awub5FUrEbR1D|iDF@C-t`dV{-++Dn{T zP9r`D`uF76B_;j!x2gfZAw*+avds1`R8%MFJETHFg&11e?zF$$eNcJr#@HkAV7OSY zb5mpZ_+iKp4FPVEv6Y@mU+JOK>H3wIJ8&U5nl#dH1OLiwXP=$)`eARi89m>yTJmdF z#Yd$(^08j9T5=X@HgJ*$?-h!Fnm;VI2Y>de@{~)C*Z)D{>Y2jmZE+t+_gfx}kPcB{ zgi*n!!61b+7|$Rne+uBMc{lCb&q}$`lAxG0$&eu1>X4h(B)6MTeeboX9z>dRx&--` znD=7=guGBr;mVbMcBMrMPr$P*s-Kokdhy#Pns~l}8%dTTmwG!RkzHtZ4R(;agZQK5BimjonVb#?DQ<2sccwH7)*<8q`!&Sb68?$u?WLpg{>_hW4c&je< zO0-zWl*%e9mkI;Ztk`cUQUQ$dm@^oBoHd%3{C~dnb%vSFLgyQlk9=FxymC>X_(N=G z==I8}-**w+ofC~m|2b#%^C%GLWvK`f_jlK*C$Pv%O=h{p65wl&?wB73xc*dc zVI@lSXC@FnAkm5z`Eh3{U$u+4 ztdC9S)~)IYw{pUfF#R;Ua5uNgg9{97BJ2A@7I%h0LZ5rJP*GDjUzK#{PS#taG!hAw z3JeJJv`l1A%YY3P7p&~q;POZ@L*|eJ#f*omA<$68Z@(q7I2w( za32u}I_i_QIl5c6_&mY;S00K7fh0h?8GxZx*9u1l&{~HD+IPpbV7^T+9N7ean3`kH zfEV9D`a??@zjwI6#IH+JE7w;JXB++>$oy|eMM1|R*x1-+x8-&8jEw1Hg#yM#uLQY* z{*a-eP(L3&y3Opf%H2m5zW;QsR(0b#;0sQWKCfNg3(r_vZI8IoP`%eoLvXO!Bb!up zfh>Ezbc$$j=D3cPDFKH-jqR6;{OxqmQlMeRMH<}&)2vt#quTQF@;CaMd?bIHab{1` zEmm!l%oRtH@MUFBvj-MvkoGO4!Cmv+cq=g>|C}cB^z@}x^S_B}pX2rMKX;x=2j+kH zo%pbOcZ=eL>|3~ybgW8cjB+`|-X^6qJ+xEU3eO*E`_hSrXW|f4)cX>1oJg;vv%6ZR ze`Zi9T^&6IZXHS*O1D{Hk`BD-05E$krR){h!i^LZvM*7DV~Td6DWI6(njS`>Z-aSp zZ!~WfO)-;5H&CjeZ?wLAy~M6MDDh2QzUU#&xT5i3pSm3TWgYRTQkI$z5`5GYBAxwV z7z6b7&#*tDsV{f40{nV|YCR{{KDusFac`A8H!M^YEi$5~)j9jO0OZX&OOIQ%Yws2@=9Y|u?>f<7L?K_u_rXWQ^uJQa|&8g`Z(?IfB5`r8wu^jaE|eay3jz5)c?j z7~n877R}4%GlN?AlsYrMsnjioSG&7$ohXRWs)N#J-G%-5}*E zj`a4!W(eOk@@4vf7i%U_s7SZc6b)9bhaRRn#F9H;5f#jz>G)L;9vM5xwAl`Y^ya*L ztP@8mkd1pB-m8D_gou3KVg5>7D|MP-q|trp`E8GkOC&83%n3Vwo>d-9t#O%C`y6bw z7$5(oAF%YelK};lEK&?5K*Z)-P1s3IqExH`IBnkfDeeL~jXA@JxJe;-#|)Kn1`!bw zih)hCZi{x^E96J2`wx#qTTdkW2RKseZz>zSpo@GN z6a0NihfcWYuAXz)6qox}!@wT;;J-|#vrrEEv0G>3e>;l^lM@#MRg`8NBVVP0!fy6~ ze4@O#e_M_g9cgLkrn)V`3Qxp;R{AgOITRHy?u%0(`U=pc8Uh$GJ>M!l&$(10(V5r= zZIOiO*W}?91F}UIs5!9_6`BactINWVgSOaGR8^Efcl7TMF|x_SyiaX%0gn3}>tke_ zy1kK`Y#Kqsj=@gt1xyryn1Kt`T9x2w#snIBa*yAv8KsQZ8|i{))eGeE z027)NC1!L)XbiB2gu@OUo?#n9I^C{nmMpD^b)qcT>=0TV-4Xk>3VYpnhL3)hVVRC9 zAy6}kfZ@w>(rtlPY07Se|1g#dNb_`Y+~_L>X@%p^xO3cS5P2xzH~D780{zN$qS*zP zxcahR@t4Qw=NP%y1m1lwi5fz?JdK%`%Ieur2KdwubC5gv;F5B2;3z30ht9|km&Guq zcyQN#{mp^pxeM>uIvIwib*1Y?{cr17m&e6O%E;8V)9?AEO-Q=PTg!inqs;cm|9Wnf z)+LMk_92mQ0Ur@uvniU>m)s?tRDGPE)ad>qo|r-j#P5sMryHx7u=?Y)$tLXdFai5= zd^Ah9-Y3anexzPK4g{jM3fpwj;Msf%)bP}11qGW-nqFP++5SDr7GA=4^a)gn(l7JQ zgCSy=1PbD&A*}U3Nqjh!S~TL>)*ylJy-r1D+dkJOAF~m+49Z5nBeF0vzgZkc1(~$K zks{vLh^sBivLV<=frpiZYPh+7sWd@^`}`wIbRBPK-LW!#k>QE+m!E~g_apQ#VC^Bo zsnP`IuDa2Qp8oCYOaS*H#(B0vO-PKxU3+LPJ+?a3yz%0uayXJTKJ7ETv`E=+8Hb!V zQ@NRW2NTIl6oNk)5|+^(AFu}vwEnzY3n1SrJ9ijzwp?6e$(9k-PMyoVhP(2+9+rCj zP)=3gXnUAg?i2Qd$HMaTaJe}9Jhp5?CZp=umh1b|CRf0h82qfqM@K;kox1{SX3LV2 z){9OqhfGL9z`dTu;fmn4@I6&g^dk>q6}m2;f9(WH-9x%s|CCyp?Gd=)b*r?@>jp7* zoCaQ2d?CK;u~P*e+tX}p5@i~aCBNi-K~f>O6QK(eiBdknQNCMg?6dyTi=KHxCi^{G z#Mi4uh1gHYVoAe+)X=4~sId?|H-5I_A)>yMIPB_6Yw3tI+>9uYYCE^MfVaa&BIQus zkw7hMrk}rn=i2(8S+$2s4`}4A9}u1-uY936nMt!QqX5!ETHr4{E~B&odQ3@ zzy|rJT(d*S9_)DbTZBJ~(eefOCF6aAwp45K$O>hYsTQe63BWGv(DG7HY~?uiMJ#W5 z5m1#3feitJ_6_fd0N)+ig)fPR6DDL&khAX~*>^h9z|uXFnvA;(qNeJ9z^zbTyQnJ^)BRhwbONz zi+L|2@=wJTk);Z@$p3S(M;pl9*{JXp`{*Lv=vo;W6`??Y8!wc}Jh#uLlr`=uXy#2I z`4QxIa^-Y~1NIMPLxji#E|cdHr2@E27JJF&WFw8>PC(-{b5Lk`1#ZxH!MX4i>mT;~ zvx0HCC!S`p)OJd&0dH?nL@YzPFbE3jwDz1@z&dRoVYS)_@gbzt=`+l$Nc1c4_5XiR z_SQjtZBdja5+Jy{26uONhX5hCI|K>t?hxE9xO;%$5L|=126wj~K9`sGdb+D?YP$cS zDpgdtRp;Jw_St)_Z)rtXQU9O~H}kR;67`SO;ij7L;Hc1)yueOTHnYtUT*X1nW4(e| zd`BZ%<5}&xnV%=jbUvzXP7nk$`BQFoX#S@Opjv~R#B$}mjc z7>2;+m>35q1EF13l>12j^r^JhzK=w^@RNv0uFsZ_WEXQ?&)?qepRWr$UYY;ipi+DN zs!RUuUD?CJzC)nlRXwE_Jt*Q|Lh}(3-uS+R0de)-;>piC1YdrC* z*G$xHZ8Blyh}ma|dw<(O22Gc7z~>{=t{yO8&MT>TmxTGbhGKcU0pZ`*(J+;-OHT3cRaJgPB(lb% z9BU^7Ub(qaiSb2I1fuEX!I0olnJ`fnvwrD-scZoL+FQD53pv(;kv{e6{%LCVtc8h4 zN}dNx7By#_K^L4x-kux5-pi-B<|-Qrorq$sdA|oarg@UX=d(u6CvZ8(88?qotU}8+ zzn)Pd5q<8N>ZP3c1|TT0o2Hm3U(re%EQz;`yn6DBf&63-$7$R+uG!Drcnc#_r0Y4>c}cdZuX2%G|!>Lx?ySwE=5QtDARl$5?bcYpBdlkQ@Q$B}!- zpNHBy9R+#a!tGbN5OJZ`>cp3Ipd>~nm>`&8xQmuV9SWnudd_zM1#;0sQcJC1mehPI zMy2qsW`KOvRzv-+I3zMDFqYk0DjQcdPPR*1FG4}g4lO%r{Tb^YNFX0~Lsud5Xd zju}$!7ie2~UGk`^%?@Ua{5>{-pQrEi zl}-QtYZG2#A+5|pj1lNiOK^sKh+DB3AXj`UAd;2PxWP@kTUcV zcn>NT1yPP|V(sAafDfNhCm{Qic#oIoHMs4i0xilQKTuZtAh%NnCEerk z*D6IKpnvR3UGxuP-HtGopCfa%gvMy#aa*_RDPf~N%4S8=`fRM!6bx;F> z#h&{GoX2FIO!fIX(g^}xp~pOc&$vwV>x1Z^Rw{JXXF5*Wg7X*?S3pP**LwvVv)V@Q z#kt@n_M^pryW*iLwA<3v8|4i|gK2&rLkW}}!d6VQh}Eu$`{q?|qG!X4s#`k?eQ#x0rPl^=;XpaTpeuNI{T zms*s5#?&Yw;ED8j`HK5Zzr2};cJNTlg+p2$Qmd6v>psnCy?*;XA|gUbSuc3>8-0m{ z_rGP!TZ7l@IpW{lm@ZKk$^ol5e>X0kE&wy&oqhBN?6LN#pdoNX3EtHRHrT5{DCbV?b-bfmz-&`;c5@YgP8;eDY5j_q^D&WP&EZ z$m4m(k)#_5(OSl_pkMAUF))%a`b@Qt++}>%ooU#cH!!SR+xjj-Rm`o6A0&WdYE=i_eK;&KR%TsZq?lq?->`!0Wq z#RO`~W;AT;BQ9|T=7s9Gv4ZaeP6@>FL7Bml(*n&elQVSGD!_MI;xTo@_RtVU@M>Yb zb@#3ZaEu8~nA7!<_4MMAZ*ls#T0TKm~P->-8ajAZzTbXN`vF&NLws-d;8P0q?ZcCT9_p@<6EZ`&BC}#oc%GHM5a)ra+0)1cO9|bQ+h_n;pr;8g{qy$TyN(TfI^ZnHudux zHso@<^G>(&X@{f{rZaJE%+Bj|n`irmg@=a^&K=?65_UZjw*_*oc|koxKIhrBI`{qG zHJ?Le3*#9BV@`bu-tGt?HyJhNcRm*n;24-#b>$@gE+08q+i36K*d%$RL^{u4fxL$+b8_s?R1kj zCWaVB_AX^u%2e_bH>QUWM_B$OZusJwF_LtJu|$Ysnb}gye6VlDI)XQ+w4E+4Of@pjf6vBT!DuF^+u~ivU)75^r2A+>~|cDGj1}ZR0?th&7&oy(_q@THokd(ZqQCE%9^Hdws02+eT5Y^CJ%S1_imn>@#-L!K`Z zxO(4<``Je7rMa$HDI9%2sMF6k!T6|yC3qt#hXP{>wCS>0S&M5cz)6ydQq3}|V?_SQ zMOigeOIy<{AE-cTX&#%cRk+XcS$HbI>3Z%>Dv{*f*48QJl)>wf#ovnoaAKu`fF*e$ z+gQ))hrG5mDzJg*YJGntha-)EY1)O>v%&yu9B}%-J)n|J$_+S_pOu8sB|g&v=6enr z<@72)Wf=L03r<-08&{h5@eQA6WYV+U`)~rS2%23bwWm(fuEXr3)P15w83fhqIWRag zgIh+uy0+}Mo|@S#EGh;7>;*G7J9)4lBH~R+wGfpqG^9Tm;4*YKF2K1S!AJT{$p(yV zC)U;?=GVO>+Fk?N!0joOg!q&GZ@SxVk8~zL%{H1D8y6Qoc6&QmXQ$|6_yHRFfoyyi z3vY&;YMGgr7j3bddDNPD+9f#1(eeNn2bVC~I?YphKSeVkgrl=gsYOLWH;D9N_uD&U zPsQM4SBYdPiH0Wq8-iCV5VQfyrN@2Z|2#*DQisHVk`_gIeRn7Ea|JJ9=v|<6Lc(-t zm5~t)+<|sIC5)-|<=4Yt$pyHILCwtQ7OwQ#^GS!9PG{U_Y@^~6b6dn<8@7z!IK;#; z*5SU=e{Mnx(?Hn@uC}gduz$PU(nh0E^d7;-dq3#mGWRcgTtgr6lBqH-K1N2KGr`nd z>Nqv9M6zR44PcaFUX$>Exn7d@bXLd`?z=+6e}?e!tyC`jCbl z>N$C6FLB$#7L#1C(IsK&)?kk^WQz6F!3bFG4u8djwrpTtWm|m6TJOX0?CM~}Saw=) zmh^{T-2&}(F*cDw7zSKU888oKgsj-Qn$V%gC<&^d;t}IVSvL#(HC8x`5z0MX5I+-7 zg`fF8|IW@YEadrd<3UYuwze9xIe*p}6(e*H=JIH=ox}`~Te@y-_|E(Bt2YO0s|_%p z$LxI1koz)6!_Hp+oJ~%7;PMfzF+|({AGORKIvAF~%cTA~C|W+HKJ$qT=sAwSLOHC#frtpWdpD-u+cN6E z`xo1pGBsw6Nm`A;N7!pD8<1?GpJ+qW-Gcq;j$F-MbIxS1Pb!|nd|qsWUJQ&sSmeto zR654A-QJByubs??M+lZ@x~ZV=;!gRt6l<2}8%d6O-&%TIQFc-y9Q>n1UsR;U0r(fU zChO?4U*>B+?=emZap0HUC<79(C_N}+D-9)q4@;^cX>`*4HU%}+aeMM!tOrQB&g6ax zx;`ND9PjCH1my4yd{}paJl)IYUXb+lja9J9?b#G6^VFm#Vct5nFARjyI|*aj82t#} zQ}MlU!D}vrNY3+9Nd$~T><^4_qbqsXm3fL}!xwH~&>RNrK{@EApTs!*l)Lj#jll_( z(sPB2Bw&ki^gnSMuIl+H z8|@a2qH7i!V3qp^Yx35uOtnA~8avXOLZX^8`B%K9UHaIcnNR;KiZ$SQt2=*2#)WZ!g>zoJB5_3`ZAgMp=vu zcis>9oX7KW@LBfB@-yelQLvs{-aZ26(O|PO1W>Rs;D2HO5l6zGGdmx&5c4|~)l|~4@_Ga&1eqwCXM7cTF6Pstvw??f-56Df|co@>^+290qOauwd z>}@9G3rt;%+ao&%bieyliMS&AN4vHXh2kFTxU)n={gL~{FbA?{t=y5}?HS|{&?Yac z^YamROp~N#WhJk6Bt@q{-VIMvcGG4?3G$qY3ej)O*|NyS@S2n; z)dml zt85p$??he7D#N3Q5qg&mGWK8l#+r5UUFWq#Z}rqOK7nP{I#>(59E5}wheN41aTK-C z;tbPqGisOfBk!SObx@islD?!g$*3Dp{Yx8e{D1W8t7ZRf5VOE)E-YU%lq~6a0oIF& zZoue|%I-Y_9xMvmAW8Rg?OAl{S+eCRINjMKt$=E-muUy^q8W(K7PvvQQAiD4F@TlZ@a#OsnL%v5NZaEo2+00?^*`k&mnB z7^kle+nF zccc%SLLeE)3(|9U-H0{OExZUWsrW7|JQ^ta!>6Kg!JrA=-nvXHP`&{R>*BrpqBqo~w))OYY0D+Sbz+VRQtOCt6ert-jh%Ylyk;FfT zyl1qc5N`0kNPo~`24jMqd~&K4ybZDlhkowf#t;{k(*pZ2X zv*As0LC?Se7f**wD#Es2?ud8(LA;kZ=f8bAZ6`{4xv$FhEdqIPaji};Cy97!1+vnk z3|Z!`Mumf9=gAvz+C+*7#NgGRefI*8IR`=N&#b=c>1;KzVI9(LJ6M?<(S zGm>O_kydYgjJ-@(Z|2r#uigIyy2oXD(DA__kBdD$-PL3lB0FH5Rf{R96SSA77FGFW zt~%=Fl|eQ92yxNw$nLRk`$_$2jU+&opbAl1qz!nGsTF1(`!|;?R+!l%~9dT&jpPA563qOQ+%W46rzmQjuJ?_fuRCAkhsGN zWLoYB-fX%twSmwx1-;R18{ru$89=jiOq-8FAeg6fsn;OimzVVoAQ`rAaR00P8vB1@ z+0j*0&+>x5o!rqjsSuEEa`4Jd+NE6pqjW1(d(*WDpQIL*$-ZjC;Y(1@J>^8!gA@37 z(MM>=jq^{V$Ea{qXJ^6ddyzpeQI!Nr!W6@mcmn9Zj&Ae_&aMMVv8U1tWpPC<)8Ra= z>X|Lw?X#OZV?QOYV+T2oMs+-F4622s@kw(zFQWDioKy{BnF;2oiNL)QBj`#(R|M_vsstEey{c z1D94@>Fm(gxgy;6iI?oq8ht+3X_392c2s^wo7iXxW?!6GeWOS_hy2e<6RoXL?_XB0 zb@(l0{b>t=J|@0)gp>4p`J}Q(SdKupXZHrc#Is$j*6Yw5%ApT@#HNv`?1C}w0hPaS z>NxN*lkd`1M_+GLKQ4V`=k^m!LXXZN)-@Kn=7wNJxLXaWL81};N(*uBXX^(?YT;rq zV-Tuu-HCrETn(yn>S)gEh_$#-i&aVmSPRPPm9$A1^Q@y>6oB>!Xtx7_lI=7*CVQd^ zq@U`bC@4%zO7)n3AE{R>7Zetb)U*Hy%fzC&W(rm!#V*zGpL0*7#Z_(-MO53l z>IQD+xgMJYF8L=%dI%iA;mkU^g8H83s~PpNMc*j#;389r@}N>@K^DB5L^Z<_RT9oo zTv+%!*BRFb@6A5V(=q3=b8~c{jjJ-NbzE!ut4wvo21*N35<;-4&+W#g*zD7na~NDG z?$me&!XYa}tozkd@eZ}nT0i37j8w8tA-LgCX&^H7@|2CQj?Nz2R!%DCx2FpvFP?Vr zg4sBjjwFR8)U*XN3kIV<4Q7ZjoGA#9GBoIiKQ(mGLpku|jc!M7@f$SA>~+HY=1km= z8blemiS(G=wBP*Zh$e)0*#Alv{kpUd7R}plKe-TNdqPd%UxPA8^nRWTZEX#a$n<@J zEOFcBZ^0OD$p;*dN--52Pd@PVDDJ+!PwwMf`vwu*ogYK>Lmn_Y10`t=%enQdNaZm| za@PR5%H~1fxUS4N!4v0q?_I7M+f_*BGTWGG=7w$mq0G8IiemX||$m{FF;E;4?qja+x#jWa>IP0(yL_j3LvbZO_)mr&A z%05+}5*kI}drGjv6;h>&zn>yau&+~(6f-HmN|`8gN#drn!xOqUFVJ;XbikS{?b)P# z@S4!V5y=1UKP%YFl{ZiPkeeW($$;vMj+F#gm`9-O5NUs5(wpWWnkY8U!I-XQc8STPEvXBLSu|)P9CoNL=hdC_ovRjd0yQ0ve7epq$Viq@f>VW5GDN}+J7|DT+WzT*q`CS)k_j`OvX%;T zbA(Os3dMym&5ODmr}V7Zr9T)H^_DOF?m zOqn{GJXov{(`A72Xwy54`K2bO2n!cilXVmPzb5#o(>W&Dzb}B9QdwfnZffEAne6*a zls~SIlRgD7-p*nZCG0G&lYBUEzqW2jP|l@(GS>!Rm`nv&ehYt1%=7v*xV{Gar#(`T z9eJA6Qra3O{u-`=M%CE0AsIm)XSYKH0$+>WJXJ&Fx69j-@9#g6@<<7&fLt-5==1pZ zON=p|8`tFmex<>O=J`7js+O8$XVLd(ZzoC`dUK`Ps3fYE%!(La8hMAqndxpT%o|XK z>R+sB1>y?6EBty^U<{G6s%ZbsguL8N=sN}Kyx({vye;34zIAA<{(dM`z0;zC+ea@P zRL3w1Xu7#D2azgIW20kPr+a=C`a?A5f=*+IWO_0J8QyP^55l>QDL9oi;Xj%NExIVb zz+pv7l}YHaytE%XZzNLNk*n%j$^`IX^nw_6X(_YNe*VC~u%Qv1Ot%KbHyFL5u)`J7 z_cW4 z{YGUD%Afj{TnrltX>?&}@6V%y%y+|a0LPI^=vnY(=h2sHTCUTB2mrOHAh-ceGy)`; z_zE?aXrw;?$)`8Q=W<;A_IZ06*`V=b-j5%6g@u>^co~(c{RzV7W%J|3)3|TY_ccbq z+z|wTnyGCa$&5dnB4RP=g-q6>E)XVMrVmyEyru+*u8N9dx(&Z->yZK*t9>V4)kJl} z=|JuC^PjqI7So)7h&kfKqxoNfeGg!4V+yV$t>#4v@iL6zi@`gf+1)NwHU|h5nwKMp@<0tNl*|b z?%O5Q2hA^R+|zFuY!66KJN=F z*hh_GXTl_CvfD-0^PIkr>1f!6Kp&0i6>hY{0f)NJ1Z40Zi%iy88CW!9vvI{u!;zuP zIEZ8vQ7$S$5zH1;U4_;MG0s-a6#B%HF>tF~iwe^H)OK*-08>*V zIT;f}5W_4Sf2(}x&FlwqO~_OfiOzgO@o%R7$}At#gV5*{)Q;uGgtNy3W2htxjQ`9P zsMfpd?4P$xQtVO@e?yi5KXk~)47U6=H)wAsFXtfPj&`G9Ih$*sz)3&Cw$W%|TC%dV3D6|1(io#`dD$$mBpT)obuUo>WJ(D!F+we^^o<*jIBU7MXH_4c4@3(!uo;}T zBF>)?uODh+8F?0=Dt8SBKN1gAWqIz@-W{*SJU=4kl+TRI=6)Lu&P5C8_aWQBvY11_$j<;q=Tmx6zal{(_0AhLPGjM_gOqfxeq z2n9c6nG!6KaBkxmV6BraqKP{x$5jq^CN=t*ir75gp`Euig^74Ab#OIeER2rb*xnmxQ2vi6Mta*WrY? zJmZb^=Nw+m$aW~TZM-wR_n(pq`iSQXHXD1rD970yH}rpg4*!9MEIX`2O?#cA`qchY zC;S8xuc8`^6)p>Bf_GAAv)5vPn-am}-BrNCr_zD%R)i5FE5cnUaB?dpG;k#C3*g>e zIG1vSB^6GQTOLYEB+=T(eJu{b{HIYe3D&|}1}J-ir=rX1t6%YY?~APrF&tS}ZFGhb zd4+Ti@|DaKg^Q_Rzn8S;m8taFW^?(uB%x-@mrQQBics8U(QuN*CZ2}S_CW>T&N2Fc zU*Ll4gTa-3`nAz8_pP1{4NMM1%g(pW-W3to~OH)`T+ZnErHjj zHaBS7QKSAV&2`=x9wA}yz?O)fEqF!pSGPZHZ4f3VCZg5o-WgromFF=*o=0>1ew&+U zxI|5j)lzFXE`0k7c$I2Ic86NwDy3-4PE#i_ypx4;>Jhhv82W_w5JKKBFmA7bt|rpP zm2SlC+5gqYpb)8>x8aC@ArQ^^{X65C5g@R*FPea|GX8;Nihos&ej5z&;hG~l=j4c} zL{&EKbj^FqN5n`dW?8{En2VVOxtS7F02;?e-xQ)fsGrDFE1eJ^O57AGW_qW!oLgcB zR7pftGe*vWOs8xJY{)24d2{p+?zl@y;p&oF1NNtdau%;G*#5X-H-i`Bs3csSgCXzH z@q{>2KN+f(n)=E>WG|k!GFf>wl@NwXp$cbrhwF2E{4+l`WrS z1$0mmD$gw)0e9I!{|8+dYaYf-uA4vVCtK$j(n|{IoZOz32P)g8p+SgumtA?>Ykw(7Nf$zO(>y- z%Mo(dNN!Mic=r$=nlqZ&JFEhAOp^~%No<%8k-SI{A$%#)jC_5BujUsle0^w!5yETS zJdZ_NO`!sWw)hM;`Cr4&V)F)U#7>Gbk_N}&ex+9|^-WkCLWypYv|nKU_Okr`_31m<)25@Ayo*=qO%C6n9HG3E<_{AdfVMQb zpb>NXQw`*0nB=Pa%;R;jvj>E1#GKD9R}T*WVjSz`7CGMAhI-E2dk6Hgva(v6Wku`d zALucso&><`q_I7&R@H0na))kmdO0wmINH#Gm7e{nKa4ZPR*U_AVWzRO>%}64u=$U9 z(n>(`%GO8sjWV!e^Dv=9Xi}L*SQ~66m$o6;ugbrt#%>;$r{@+*d-NxzYm8N=1-33D zlot5m zK;y*C%p2d^OYxf(QvcUEp0akT(J9?b|0kRuLqkJ@Sd>J$FB8_B#$I0hKPX<5%P^@I zQ(=-+f`NvU#7{~=2U`<#$Nac^9 zQX<@&t=Bd>=wh)Ojm|r{;QUk-ke3)zvQQmVP!VPtrpp>LP=dDSU3F3aB~6WRCRlk=>FtUb{Gz!LjkM_?BL7Y`EmB^NpwP zVtN!#gY@j+%s%I0lzL_UNl)HskfxTjZZIGhiTGhjsyfWeEI_7OT)7q(uMV2ww~PEY z57O!tqT#!#B@+{~{Tz&~DR3#2+b?Tc&HRXgSqUYtP3TqjOl7V~IXS=Hdv1{+k{U`+ zMX)-0cHt)#Uc|z+Jdp#2*>67;er>(?<*HPUn)zV@Ko>+GCy7Af%~~}qFt6setjE{{ zpbWV{i?ATt_PB5I0>f6;=aI-3RrrC7nlNG!;Lnv?U!z)TH=CgVxwrqCu|U(c43%Vy z9q|t*xOR|Y-)GJO6WoILwT<_4-nxroP}Hb5qk_kh)3rVbdTmR|DvwsoINQI4Y~6dX z(5saMBWTe32PDa~ZINM*j}=>;JUi7@tZ6`}uD-~Nn$4g^cp-7`=VtAnumJaZ{qgs@ z1kj^+Yd?iK?v3ZQwV@r&SqocQHWT>X-QA6_xT&dezR_gu53gbE+sgC!S`)N;+RCb`wCiGyZ7mOMmjnR%(j?oN3UCef%fr!}F2vxDoJeH^K>EmL zqb@CPz@GLey^vQnGGiBk3`;-{sZs`#qFFY~J!asvr|)ig(!N4pkDBU(3cYXwYs~oX zggS02>HA`m0fD5pCZ2hvb?(#4o&!Xq&;HPsloW~e8Z;3C;gFOo!rs0Hb59fpuDK{` zNCR0+C{w|Pa7;TlW7hr|Y1$=G#qjrR&UXez=nn9OEmDTtXiz|S6YE*J}cSg7rrED?x+XdaPl(_f5{s2?U}I;YqWLVa)Tek$Shf!O+j zE{I_2EhRB3#sVrQ;DQ&hq&bHRgDIBsh{8A*3 zK2u8hj%=Lf!!(4c3!3UZ=lx2y(+$U`WZ1dmqjlrEAm%9^n$@w?{Y?jGX4ZX4VfISF z>enhPC?`%x*127gG_!u`M#AqyX*YGd2#`})rlJWc^jGTHsAE^9+vsGoRaZSYM>TTT z0%$XqtfLbtMIss5-weudeabgv)0#i4EMMcCVePP^h^u^ulHoCP_cBf=>QUI*=Uqj< za|310%J9<5&XW#Q_cnjP=3Sk><4DIK8~to~$rp3eih^;HCR!Z7x9cAXjIT!gB+E0$EWm-|jSZNnAjPLlM;kyu9xT5a*xcA5^cs`CtJ=#0FI#gqt` zgsM@1TPCYcjbu6Mj%N7F%UVY7=N(hb66*5qon%TUXXnAbf#G2*fMBz+St!|TV~~Tq zDzJ(}a2)FGfX2hqtkGee#QiGM?A_u{5ROpP>&t#xAb#~8EoZt}-lnyZY>YWW=!Rvzy3=7SqRAcoAQv-JJ%&5$zLrvwOGlp0LqPw~GF;hr#%K z{e!r}FG4@hZnA+9`RiOSaw^)GWk+?_vt>92fXp!9Hct-C6xF?#o;D$0`B}t#R=Bje zp^hhfaWLEtuEnycd-+)F zX**J}ZCR*&BwccJi(0&Ot|Hj87x7aOs>j7$81=vy3v#_=6h<$!M10JAn5SK+>=PER zGLuEePOT?un^;!PeSyl1@b9#>e2X@XnEDxc{<~w>orW>uXHa$_%K5MSkAwGa%|UFTALfIiH@# zJ(Q+kWTb4L;1-ZaB;umRsYb7lm;pL2ofF`rgW1`^+&?@_%=Hzz-|oq%nvUIC2ZS&i zGY4C4k6H5@7fpTU#6Y*L+?0DgUkX{SSctbVB*X&&5!v8MY`NBM+?d;YH)vtc^x%Oj zW919qNH0-h%G5>7@?wyG@9(J#COBv`K#qZWFa5mtPX967Q#$D5{~T|XcCDm$xuoLN z6a5xVF!~L6Wrfl~-ZwYc+)uL~j=kI(S2))RURCiZoXlFj5WJ`OV-)!vtN7h^8|Mw7 z@jc(iEPVSD9yTs?fSPp~U{?kniIg}LOm=WfMH2eWkx9SXVxRzFfHqju5h3yg4RFy8 z%Q~cyqW;klb@s3EgskxlC>d5F6Vjy7ePxdeUi?0Uf(LCjunL$T(Xii!{UyI;xd()T z(eU!MTArcW*w91CAwIzuUCmxp7jWZ7CD_(0cj&W)G;V7#vvB&am@uq;$@X&$3COh& zC8a=9n{@Mkx#jg@%{*%eCn%Gd5LeREI|8|u->sx(_Shom67|$bSFw!OKk8(7*|x~( z`j9&k=KzsE9Dk5kW>+5t)xpFR@&PDw>8~gfa4OijqLHIgr~LD%;n|WBY_Z?QUlCf0 zyvL)$hiSt%sdcp(i+}3FCox@3$>StAHGOq|m$fw=$+Pb{)$G~d{-Y{&>U#4IGBcm( zKv8<2(A&aC@bgDAh!eSP8KI;b+%_f>L9w}6{M*7eS75IJC*fwf*jz=&;z`W; zqxaz8;I9BE_`)82D=Xf)N^jwoeL?{joZ@kUf!>1sP6zm!uN!E%JcT5kU`KNTBoECb zf_&F3yu8abH7>9~f`Dd0adCd>eLTjUu0=@?fC;jvjW1cj!b<*gzA=8WA$4ZxwYdxB z>+n=wrRA5Hh=HSEruzAFnNQh9gYkliHp;WBs?%L_%{;PX4e zg-Gz#78hTDu%JDCn*bG5EaLDGu?|l4pkATouPEo!9*_ybK@yo8=b}o~d+f*6qEGo( zX-=Vi-uQCtIaVU(C7)@8zx>E<1lePM8M;FN0&TE?KpW_A>ZknM7*Wo#MwZM>{NF%x zrA8!pR}f|e32j7z6V4h+O{a+++&CA{oG;Ue0PB&Ad3fYYu&*m*c){rD%vP`16J zQCZ=PbeA^z_2}I5CM5~!)_{3#T46yNKoGkwtEUC^a?g?7#sp_vP%~+QQgB9p0vU-^ z#ELA)ybjkjPDia5+|Q8S0NQBTf?ckQIBEc>Y=NxRSDgI({3&NUi!cJ=CzV*K&CZt> zpEssqW;Gw#y4<|^ew~Mh$HdeUf2B5FgM<@eUSR_)WZ?63vN75VCKg71aU%>~gJhdj zC_LicDX{(v z`l&f*vd{5J2=)yJ4}EyEl$gm^0MMo6&X*D2_?Rn-87AFcy$4ka+D2^u)7^O-)qO#> zu}f4b`e}k^N5XP`+{Whz7^LFP9aD8w6RI%qV^|#YA^x<<^l8TAbRu-njk;8UshqVMRBk@rv7imO{DK%3y;>}1eRbmW5 z0NO-aUz3JdZk4x6DldBCRG}7X)iev(4n^pL0)iRe%$fY;!8M?03Ro4S>2sP6@2H$3 zZ`Kf^9DDYur@=UMU7pKwyQ)+--Phwqeps7k5S?n!NJ!m^ii!z;&UB<~+4?8ka9gP} z9I8Q`{AEk2^8y+8uL*#pp*Mil+h5_HVFH?-Y{o#zai(Rq2?E?_Uy^Yj$Sa-9Kb3!e ze=k~7ppo+TJZrg1%~LbpeHL{aeePsADniJ1-}!+c8Qy{IctZDT z`V`_c(NRCUvvrn4B%*JVxDS^EdBHY@WQJZ<&+lE=@3Pb0cco`3YRTVBKdOK99CM2H z`G{hCa@+c%U@PzX__$6p*}N~mWEtbWY9>8B)6ok}{<&r$pq4Nz|dyciLE7 z+xyj=l7R|G-FsFY=gyxp(QV@fHl9!`9->1yWdD((M`2tcEx}ZmJRRiu3?4=a{soRB zO#D}fS|!*)TBr4mA++*%2P)teH~%I{W@TmdR*G3K)Sv=gWHO;@uopjKGO`|)*04T+ z{N+!;73Ps59SyMOtseL4w$>}V-6;38|KMG9$J_N;>}~4?D`wGAGo8&|#Do)46EB9- z9*fe5ovZ~r5@SAU&_4b{mc$t?+FD;lLD}+Yne%A17mAwAJAif|+Xks?sYFMn-~l>o zN})5go7f;tr#h_Q+pLk6;r|2p*k~%?Su=;!zBLfMZ*K7U8*%u;sFp^k837wZFv}MV&Q>mBX%^ z@}?-IU3#n@FR^~iiOjI#fc4p{SqW#wMnRXX9uM5%k$vhTBBNFLoM!{y>GhM#@AcUR z=J4UvE)6&mm&HsA0eWbRkc(2tX$#Ak1GbM^E6_=8>}7U;k|N{rc>xn z_U*)@C4^<}-eiqgjR|pO%D+2SGO6BoY}}KnvfrNKLNAI|SU;%kT!rB9<|-8BwHuUb zY0d7XBz+u&{qpm_RW_GplTHzZyZ1cd`IxnJug;WvuvOD^YU`7L-@Ls3syNYH(#P}B zbrgS`Z7G{ju3N|oT_vKzZGXKgaXll4Kl9jiGvU<-p-P#F+8SxLp*dhzPcU0*t#2H? z_C8P+7wKLu#!-SABWV0gDx@oBU8``wRsDb@-8^g;adk`N={gbS)lEF{_oKz*KsT_& zAAS&doxc^H$Vk4<@2lG{epvGnFVx6bFGrylM{gDCw}Zv(J#c1Pe|1c0Gd=1IkeZ^ zX%+5Pi#yg$vdJ=uB`YzW6te2t*D1!Eft1+uh##&eo~IP6CuhD?`#QGH|Gf>lKC{pq z!<^V8?fja%_#3YGla~@)EsD1?lc=yJyKw1laevPG;V*!>Ytbr@D)eH-m@{7Z@uw$b zj`VJZEC;g{hEhbC(bQx!xPJYEVQ0)(Il=Heqyjt{ildzKdDZ0Br(iAW&*35R|N@j_3N= z1Ax>fl(uxfYK_?gu|mqeBkMwQTqwl7;4cv!f<9r<-Boc|KXHRd0E5j$nD2 zI7gwR>}TQR4Vat5y12u!{;A4;0h%sB+XAwLQzqtIaGRU&r@ZY9YLElPATR5HR}__x z$VYsw#QtCkN`C1CKw8rPUAxJtC9i4crQim+uLiH^ms;ZxLFITaPi7)H$FqS11TIy07>k`yV0e#n$Luz*-aE)o0|*ybaz7`>&= z^hy9^N=nBS3!w;0nJ-s_H345T&RYQqkgl;_R>dc@(pBVG176q^jL?Iq= z4cRbtUc$dcqAC`n!+*ihE=RNi@>Et$g-}y9qHPukPA(}-H`9(tC9$4|B zjG*BWy6uKoWT6KR!RKQ#0Dhqo^1eu^!~0oa|Mav?=37EW1{G9TmjM_fOLESok7-7# zaZ|5d(X5q{mI~u|^^ZI}C@2Y~a`q{a*xige#Q(o=OhZr1H$_b|%uDs_Wijh-E{`#U zJb=OG^!;!^d_4;a^`ck2Q_QC^cej>q-M(#^XnNiKaHUFMPkbarJWy2DoJl^EXBK4< z0@MzE4e&{WsO_BM!YM@@VRUP*hu|MZpMnS!5AtqwLfqA;6Q`WNPJk1f6kNTxQ>ViO z@JvR=;n2J{I&%s4QL=?WishL1%he)Dn()YpV>Jf->0&$a;~ilv7scca_FpX%5bcaFp=N97 zr7Hk9Lei*!@o?V%7?6ee`@amKxnp9H34;6e~s|%8HBb!{_O(bm5XJIhki)MUcj@9he8q3K+YxdmEPNp9_bFr;iA_YT3 z;e*M&p;(XKy)Lz-6=nOcn4YA0?@%RzDcl}P_qMZ|gkFa3074FFTZ>ftcRkn? zFwB8nBxen{om~o#YX3si5+E1TXOAubaBQ5dJ9vws)M(_O<=*~ekK1dHD+@n=kPxU{ zNJP_;=jFlfm4KmAec^s|o+rvi+jaAIBM{G%uRpX; z3%JRm0HxzbWutx%kS*Vf9wQX!{Dm{@$`|J5X0NX~DX%6+y3Iu+1mpY3gNQNlA(MbD za%)5%Ph!fp4k6mk)bCRXSa98uoFnAPXo9{`!N2&YFn3Dy^@$td$Uf43k^d1azn920O@}C%m8Bn(^u&Gn`Iz{QcdQb&`3Km02`tA!);7CS9t9(`UEo2hXY;r=l6iF>yXcf%R7IWhYH5Qhtefi_pb@S#3p2+ z1Kz6thq1E^imUCqbb5|AB+KHEMTEw z6WIR|72gUah<>(G;`skwX$|}*nd`qf-o3$FxpRf0qdAx<4lLlg(l;v$+r-XWd+6f; z#Trhx5{>#flW|~we0P_BETcT(RH^N`46@ba-YgT2nTsG_k9}M2N0U-)XduErOY;4s z+d{hE>e3)|;q2sE<+i%fmh0164(#7m-9v{iCK8}#UI=qTms4FUukLhRS;=g8C3j5n z1?Pj>E6@CV!>FgfSkS2{j?+z6|0uu?AbH&tph8qVe^}IxkJYW>#gRE$k4gEpwHwIY zR~(6z^0dc3Q8Co5(B%2!4hQ4nvGwX=H199h;cpe(tHqgPLHq620uUXG0ld%$&*R@G zn^Q$^0RGtS+2s#QRUMs>fZP6;P@s1;&s0X!3AjagAwqr|eL5d-fd}XuJ;}hmQoIv= zJE+_p4@ls9uHFJSm216IMZ(=aS?vJu*S~eaYHoM#pP#&fKh8?cWk!Aq07y6_ z7?KC#YX05YEYU1S<$rbp2eyPm1i{r$jwykj;R=`oXAb`d^=gY3py2uWp3`LSRTTX1 z`>dLx)@TfZawh{0rdSBi2GD@SPNwPnlSbt0;YQS9t+>uo2~`0uoCGCrVHiS@JCj9y zoqtD*YzsU>j`wDfnTE65`QGQ|*XYOM9hyh1eH$VaxZYC-63Sj_R_cexjVWabdfXSk z#w zQS+13ke;Yz94c37|CH$JL6Z*z>6e`KrRB=KRsuYDF>ln(P7aaQDx_Gz8(XsdedXa& z1s$sUI2~8lJ=^b8XYdQV3&V4rcvD0M9t4d-a^a%ZBg>hl!mAcf<6uHMAl+M2V z%{PQ=FQ-w!JX&q|8*lhE>N*eC`tKt`8Clv_B}yn6=yPQLU#pex=i>sfcepK`_(&g< z&Fe@KJmBKJ9FJ#d^q(uR2pe&2V!7&W{%oDKEM$jf;I?T?U74_e%QTefwrd!+`;HlK zo_FoBW)k7EWt9=D=kHEW{Et8n1B$AE5^#*d42Qdo_i1GjAu&6fb|@UP(@&7c?SSXt zU|i8>w|MqzOp$yl#Qg_mE1OL4R#g>R7LOZBY%E@}pYmVyRc|1Rb}hgJA>Wk(_A)_0 zi3v>5fYuR^iC#nB{mq20-@k3PKZ5;>9G(H-=*q%RAf+@c0OA;&&40aKPGuxx*$IRn z`yTF}y_{hpX%BHg33s10ptLM1+b&B+ zR-wa>m6;ALlQXlz@B;Fa|Kd5ijyeC*C%1s!{g}8p7T_5+9LGqzMRLfBx0L>mVt1vQWf&-M5ZpHr z8ANof*7+%L^i02E9rk|VL|Dwe7dOUOA>O!J(W7H+ysel@Le|jwI85X=G{jJA10_U> zN0F!a^1Tv+~&OmCC`+DAIm#>+uvbCs3M!LE|Gbx+^R(N zIm1zI0lJ`vSuXvdC7n%vV@7M}BE?tR^Q++7@Tw;jz440?;KfEFoyWg8l4Sc|I~_3r zC~pdg3#4tCGPA!>kg2PyL$^A=|J@7$SWbV_7$q`Wai0m2+)Di#P>MA|e3e2rBZUwM zSM3wua$ql7RGr=lASQ;s`ne3F+od(NneTjUw^1~sk`)N^dRym9>1bgB$)*k)7)cq? zV2eVZIOf=d=Ix6GTBA8>ymZ(L>X^!&X!N-I*%Dv&`v7Ime3u=`rdwL)wSE!g?R2duz$zC)^bOPiu8i~HVQEI z!6FM+%pN%AZwM?d>Ge6ROG`^<3b15*NLkRFJ6}MZ1~N{2BMV$M7sqIPQV->2pK4{oqp{*xUGJZL*cch3v6`3xuZDG% z?*E=ty)g9z2RFKaN2_5mF7k~17IvC!7-+kqV`zNko*Ov}H{|H+P2GVLZ!!7$3L zt~RwzMFIppG`oZ&R1gE34}(P9`<+13LdRMT_^vvST}pm#oyl2t>ml9C zNXQsR49@yhN&Fqb|MMe3hITdlSL*>XKEsS5qxxa@AE@_hfFKfl^~>{Cu}7D9AE#rj zwCyz)oSn2|aQDi}1&WIx1%YAE!GM0g80b!xLBq1N@GuwU=1?=wlqqm$V~=_DpHKTw zppHqaR{8ouE|zU4^Q-rrm-4(dh)Qq`MDaT17GHwlr-W4?polgiiG_u z-2D~5K!!wsS{Et86n1a^7X)&ONDk9;mS%j4}fbOFGk=tB{?$X zGBq*56zp8t!|qR)`$$g9k5^zyrFDIBcnIX8I-ntGX$7DG zJ2Wo(9Ww*mAO5|@Mg;zW+x-tIXynU39mZK0a4M$fPtideZQ_51?~J#x&9NiJ>N3JB z)+>^Kw+|TU1O=;InhxOHBMH*K#BZwm@#&HnqZ z1B|^vt&WfKvb8|o5=v|u;a}hJ=Juc|K0dL?QLyCHRdv-d(+pwb@~)aX)vE8qhb+)T z!h55m;7~PeZKD8EK$u(q(>e`QPb~_gql$p$8YWlt>u*$7+!z-e%b_-Z3 zsj$=o^IY4Xp9aGi2|Cxv&%WuVav63+l15r1WdZ6-IZ9Trr%e|14-%5 zWXRK!6Fg-Ng$gXxZCOgSE6jN4!$S<)O*1JPw|&p^aYdy9VK?+?Cq-MB9)vHGp-(`< zpVQdKfnml3yu7`hz8M&5(YML;oA391RC(S<|7Pk+yNHW^ec3eZ5+`&4T{@5?@BKLmIl1~K}1a-dz<#JX~y9r=PdIw6e)$&iiFU7Pyhz*unF=PqyTcCkWoD}N z?+o5bWJ>y>T_rc$geOVzE8i)0V?5E!U;A*2`B9N1$7M4wnt$tc{bPF(F&2OC^=z*rpNXpL`p4v zWRNu4x>k7UEUT8odtzuDpP`*>xzx17JIbUP3mS_)k387c;LZ1z+%VXGKRv}^_8@=f zS#FAvax-J3{vdIr(qe}g|5BXiA90V(S4FHBF`mD+_scy!Bc~PgtlH(_-Y8Eic<`1U z{cZ1Ue9v86T-@fLR&`(w*JeTH<4HED$z<4}f62bZ<8n>?hdZV|mDwN?m52+HUrTD_ z_Y{i1Wk8}n>FG5)ofB>yeO z#BaZn0rl8x+<%za@5BDBc0tedDahI!F{LYdZ;JT-9N?kAv9H?o_Zc;d70nHdXWH!r z^D@J+jdbSa1wdG-oF+bt;-$b$wtQZ>5}Jx1N70&Js3t-5X<*2oRQ(7X*!)mg!x>}2rQIHuHh(g4v zW))Sxkh~*u0LHDrnhC6YT1CR&jDgk@x!vTa$~_j#&_QN^I~m37&^GkfB7 z23p&KME_!w8v-fzNs;9D;71d$$zWXceG_2=x?;&2K*Zkyc-^(0XKH{lE1Q-uIk};C zjpM3XR|i4X>%05x>sbFND3c)dc2`_He=_rC`_%3<%zGpu;KJGIu**B? zl1QlpRDn-V-roW=#-e*qj@pSo2aW`T5)Y%$NhlRfK>2b)36Sb4f^VA-YhF zPD3sF@VTnNN2{sckAVQgorUx%c$)ET3kJ_MDnD+)+Oh)t%^B>FSqPv`HvC|V}{KOvP zQltU0Pf91$Ik&N#d!S9$k5tslnb8SvR*}I&Ly9im<{O@z(*hhc(NAm#;|syJ=7#aF zA6)T0H*Uukn~f4m9TJ>1#k60elNlHs#qyhQ-%pi# zB8|*0E*Hj@Yy9RM^rw0*vQ7*};9%UULjs!IQ*V#4g{dP{jwh3N*4x`M5I<;r7(@IL z>08hy^K{b_DqobBAmpQQNus`Kf*E3mxkYz?p31C85n9bxsTAf?B=n_K(1qf$;6|-o zWDB36d1gnTW`iQb!N~WH0N4;V`(5-{;8z-3B=qnDflHKi=mIa?I(O%Ah+#!rYzu>s z$oF#6XTJ3bgBr%L!HQi$sva0PYscQhGccbY@j8B1sly_D4g@LK>)Ch!;s!lff&|Jzdo zL||a1{OO-{;rFK6!2DgIA>-{vu5P)QlpFH7mWA;U(daHx-jgHu8$#(mOY3d;qTgZ1 z!6g9H4kq#hTScr)Q}C z*`W{T_`3&6(cSYVhpE|Wa8nc7w5$vXD!rd;*Ejp#BqH#AjzGlU69mLBbN>klp8;E* zq%bD0!Q1Tjk@45ZUJ6;J8J^170?PYQnjb^Do`o*2uOel*2dND^mtCLDDpA8EAyW z+m1^=Xl?I<43jM0rO?A8R;$%xuc@;9+4sA)`YFLMe`!q4Y{0pC{(-vhUBmcoFB~A` z%KRXJQt@`HuCDRqlsLI1eISS@^Hh7bcf%VS7N9&MKthnko}HYeIoxk+MBn`WOxt&| z#wwf(7N)ME2rp~9zu=zTsL88FT=P2i;TWf%A&x}f_Z{Gk6Pr&AjPTFxM;CY}$#}JS zp{J);m^^KhSp5?s&=h%*!MJym?|h@zednV#0RWR#TL%j|I7@ulJxvvZ3#AT`s&75GxItJ~@^vKZPVxb5(- zJ?CJr4Sly+&pPn@sm*l!4YkP1(|`~?6XaZRQ@3rP&3jgt{@&Ks zA@QhOvmnu~Ut@*(Owm{rNq6z9nV#&OsCkCOx6V$3I)MqHaiI$$wurUtAqNkSlF!Yh zB!%M?s@IKcnTtEwkk~u{a_$m7Rgbbi%ZSWhkI6SqcI+U&uEVCP{z>T(U|kJXnJkIT zni4c2#_T@AK3?}|UutPFXQjhGpMNgw(FJoyumT zOX;+GcMjZ0L+=c*uV$gn!|pwl_i@9_iuEj9lfLG=`K*JX>1sZtP7~ledxl?lOokseT_La+~2ghihOzXaIC zm#*ZWNw`5_CX3BwOKSBZA1q-pSrgao0E*?gk71juD)PJin|EgnS2vspMeOfdb5#A? zB(s6$HObE?(`pIGVNQBdPPO7LvnyN+UYxG0t4T-(Q_mGt z4K*5#)-2!**ZJjut)G-h6PfECS&P7XG8Kyphg7U@UxH2Y{PLUs?RTE4ayv41y(D-* zNwZM;v!F!m$fLMOZKp1${@m4M{m}fRCX+G6_I~j-Z7mi*@E$!2?2CZh`t~WgR6E&J ztyI4pSDT|he~w3rT3t?j*4{hc;%ohyYOJXJROzzIa{ts&%hUEHyn{A?C*WRBdc_ZO zcWI}}DB_kxl_FXgWH_1jX>G1j?{Zb~P8@&WD*gNB@=Vz2I2o?+gr?JpREu)^u;8rG z9(XT>EA;`{uX8k$yY>0xnZt{}<^^?I^bL?&Tb-jxIRi`$!?QLE*{;kIi%g|LAW^?nq^$} zvrsM?CiP=jbI(5|>9R#_*qbQT8IV7q6?}*FJnJ{0jJvThFzmNn+MbYhS&gx?^W_ko zzzscTiHcItep6~CtZfjV?W(TnmsZTzAK(B$;F2n0-vIIgJ}6cqzCd~kt*bR;KjSh| zY>;|1bX@A|2)mS+k&REjMuejbqG1(Y6V#toC|Pb1LqeOHt$)qh-3NgQyr=lV!Na1u z8BOWd%#X$UQH9(^DK}vR@-|n1eZy#0gf0XY$KF?Av-5s2nt^U}gUm@Jakv@EmnE;U}*FL4|vAJ*NM1zCx%#8B+ za#0C5#Hz3E@N!i}QT@6}SG?u4wZVW$$Zh;6fnz0bv5dtMn>sj{+^?bEN6l-}B7*kW z`GsnP2J7}=`g%xRS$GGhokZ;n(HI%1+AB<683|SCHtN&rAmz$i{mLNgC})xO#WT*v zp!mGf_1j>W5NCs&&--q6*Zevw-hAo0a;imD9rZ4}^z|-XRuT~EyB*tX_=UCWRzVUj zV#<+(GW+^?X-iNK9WijR*_IWnI_F@@Gh5(l#I(-hh{4SvUxB8V_S-AeAE=usa9-nr zFNM!SVuf>66&qvHUcDdDRN<9u?!3Gh$#Gecd5^&;o-#KM*d2;%wMbf*<{y>IN)C~|Fx#z) zcwyK1b4Z2TTBr=(YfLm_L@RoaPVZ}gHW^Hv^<>`(W${piKVPGL-dHTXV`#`9*|chL zp5?$H)pr?fJm|XqYHPXrvQC2Y(zoe4*WTS`Fr_p*6KL^Kbm09_+rBsHxWJ83TGQK) z6@iF2PP2p_Rfwj8tVTjfxHEeCTEdT3#Zkm90hBUg3bBruLHKvmU4E7xBPc5i@oZ>` z9GlFGYHKDK2b7x}#Fb?}2xvvtq?$8IPE0*E#{~^RDd_qLNaacoor8@*U@J|_$>UBa z5QdcGVcAi3_2q$eER<)70z@6dU{bS?&V!>*rTJB0kZR9oA8NTB4exTrb4=@SAOZ z%Z(#pa45xe`AU%Yby5mY>hFv_DD>?tUw{TSoueHc_opPMYD7)l1ohiKCysc&*xj6% zRPK@(Jmo7EA6-U%N5oKA=^KoNC+5+04ycm3?LS%Hl=dG>zpL`)v@Krk25nGINN7aJ zkcM}YR#g^u`rLGKUkNxu_x=p64BITl^b4Mf^udc9V~xF3=+n z!}$lKBXQ~Z95rc=R?hXsmH_fs0b7+?+tGI}244I{Zd^iLU8&=X*p74XHhQ<=pW^({ zC=HL9!H-bGsPtKGYoF~?3cCQ_uygq(sqA9A4gBS2$3L-=v4H#KPNZ)ID(S#snEE;Y*Ou`>_ec$h+q#njHH6%Xh9@4$YzM z?y(ml_}f4x4rO4Wf%F!&m1+yWV_@h2>>i`w&cepe1_C<*K zN*W|syvQ0Xjc5gjn|0%OV;Y=MV-(?=PQ2O$gnaOS<+A>U_52L7pTr7QGrJoqI@m#Q za;iOeB{{Z99sCn@lcQHUVlq(H{>`S-qsYJrcyl**T+<1cqRSD(ggmmwD@$DxqVNXw zjs%WbugPw1tFErjEf~`Y6)RXVlT(#~SgiAizsZVD${&kjWYy1Sb~fe)YxB_7YE#d? zOMJ1>wpwf;m3e8e@OFpU*CUSnUVJZ%yuy0A_<$>dF41MV$u1gLQWJChVM0lUKm|wH zLiYwU3oB#bT?b(>f6seLLOK3~Vw2>V8EhQw`;eLtq<|{R&|%tKd5X;tuFA#z?soU1 zDOd5w+l`7Fd3{U+E(U#W7{v2W8xXJaAzM~t6U77X&k7a8<@kQW?ZJcgZvvf|#n_@8 zlLz(QfMji#A&z+piq?CJQIt3yPUsSdy}KYSDQU^FVGQGfjFCQ_^JCVP660I~Z1IXf z&sYw`EXE#O{}O|CcfP^-Dm(tjsF)PD!?{w=FS4HxejKl~f2;_OC~Ep7Mqq5J=^mED zyC;Lpnq`4VP95pi5xyu6ZxA8h^{$p1cl0M|>wHG5wJpIck#thph%dBE}oOVD;X$yUG4W)5`;+cosyYe9?|VX{WuK7y4M(S9^{;m z6M@5P31=Bg_+irdduND%w2DH;*5aI6Ue6&)Y3W(4y5FGx0ZPv za7iZPq$I#T@z)cr>UcOm?3S87!n1bug@Dr(W$p9K32AwWi7{*_n8dNgh3Zsk2c&ab z!rV!@)s6!YZygKy=S|3oRWY76Hp7UhjyTOhKa`DC^V?|~ro6$KlsCFPK5DrO;d`ka zgClYt-MJ;fM90adQbt8buZ)-P{HFqIwiLFV1RK^`wA7?pXq}eNw)#JIge1gpWtf6K zIOX=bRX2%%lvgKrT#jpOchTIa6kX>dtY*oM1tV-Q@~YNa##U(Etutt=9lsSRk$^eI)0#Rhee+(5ZWpCRDl< zaZyIQJzSc&rN1KX+AY82bZ@qtZ0+OFdB<|}v#7Aps@1$Fd~2`1)+v`Z_iL;3klVZV zIypaanLQVacO9I;-PBrX>goc?0>6sjpY(}^F><%grMsqBKTRq+t3^fCc*UE3ZRrx~ zUl)<5GXT#iEIOe)(Ti-4b_QMNoInIp!`CEUEtMLZ+Lga{GE`Ekn#*0UkI4nM_~dnw z{n^)dewkm&!0X*{=za)0H0q!qHVAlaygf6>Ez{iv+mNe+;hPm|&v1FMBqlz#tOaGB zu{+MfnAN=PvU9U^@!F@y`k=z{xt)`1-r{4+S`%NR6Z*y--HhFT9?1jpi`rQ{jNWTa zZB1Us86T!{>newqpmD$;@}K#AO!OO;fUC^;TTu=1k$ zq2+8W#58|E@TU$UP9MX5w2we2e}4)8)TD8sf4Cn-SoFp0>F zKex89-m$Zb_Dv~(AJ)^i`s3xSWJ;0IKql)_P0mmbUbpkYs!y{gW5XZ0Daj(%l-e$+ zJ25daHEO_!Z#SaQ<9_(jQ>UCDR-^}slK0}z*b-m(l5>dA;Sghg+oU2xyl~_slYRV3a5^!AroM;H-5_#LKN-AJ6 z8fC32{VFsIXujZj7CmePlJA6&)7Zv_an*R-ivg98bLu=a8AA-Y zm#L-92~D5um=jzso&s4r96MX6HBCx%(uJ2nymk6SsX=7INk$x=7v`EbGDlbyx%~)& z*%*T6Yb&)pO0wx0vWa&W`rPC&Tx-|blof}gA7E`fxF0Q61*3iML!BqBN*m`0Qp%y! zK5OOP@DC+_6NGkk-e&8d!M+OVyZDHr{VdeuK_zy9*K}(&kr#w^i%!a^Kx6b?vA89c ztq~kyaf*-DcBo&o{+3xQV#Wy;hzBw0ddOa%5R@Y+rG^p6mP z#tyYy1EHgeZIgdE;V- zIn!*e-%wg4&5(y0Bbewl=Us>ksS3^g70KSYdvX+)W1j$C#6vnt|74>3C_*r|h^wfN z7y)%G2H)K|aUualiPLRCwxIK6Dc^wITCHMtv^N1EX0h9{Fv^mu7B@!i84%F1s9W`A zVpb@QL>O}Px|?VRrvMIl@p59q(QfMro|VDS)tvoT-UY+d=ih3aD(WG%q__$TexKzIO}Ig zlHU<=ReYrQQ1j_kJ_9t^TZzBHdlVV9+<$w(xUzjlzURQ%!I%v6j6X?sYyoDMCjZ7r zI|c)NwL#yFp>Zs-h(SE5+i1hQ)o<`loyLfkd(|--*&YKA_Ac1Uvky2F(YA$isb1>p zP9*Z zYTf>Rc6xg+l#kRS+)oVP8h~SzrXgjoq=}4Bovj z&obs%r|3_qMf*(_-D<-u{@UN#2qx#)$|Jaa^%=nF!Ook0UYgNZ`sw(8{7x-&Mo?HmYM0-A`2{K4-5(c}v+x?-h`Qy$GM_wjF=I+sadL11 zqA|Srm{BG;1apk)IQjnT1+4_Meq6W=O%Bls5o7xOMbrGKRJ^!&w4-5AE<%K>E-*ZZtPLJNtF&OhR`sEdFmCV7VHqF!9 z4|Yztl)j9c>c^2N{*tX7A~7PR>ODUwF$)7J=_t^L(c>_t2n4zT8}Ip((5(&};m5np1YQ>*<~x-6@Fh~EuPtLA;p`2$4FjU($C>VKr_tpk z^o7*a)r$hUgQkk)fguwyCla>~4(QDmWxv-tAJ_-|Sgb%gJ}u}e+)7VE`|RcM#^HRj z|D-$wUrFf4Woz&wawDvVl+m-wF~*-HC4Wh&z7f(%?cd{X2#4_P!b%M21o0NMSwu%{SNMqH4nTKVdAdSR=O4S5 zvRQYfmK2v>KbJ(A5{qqIWWV$U+Cu%e_l3Va`-_U^++=m|ceox+iJzE*U;R>?rCk+A zku^+|vbOdacO2_Aw(9{$656LWp&)$h4Qy%Mn8PI@DW~WX4Bl}mfi*|Rt303R+JtD! z)gqhmy-hQ@Y%RP|ULjemaiQwTC2jyh6KHB6Myx)K$t12TxB~Tt$5?mC2w`Dez}_th0Gl zM%0xRY-xN;MUuzm%XeW$5F+J01nolTh9aJ)^^$yu)vc53o?i+o!a34|DCR}gl*{S# zZQ1o_<+0&Z@70lxUR{%a1z3r(o`R5s6} ziF__c`#wEyL?EluMJqxH2308p+V?h0)gZm1Vjz*891dRI+c=1r;WWx&ITeISa|B^P zn>Ewd2kDYqQR(AgEeZ9A{rs2*+aUb?Ap!$2wB1x%r&$qQYdt5hB+&brz5TF)%|2q zN9q&UDrlmGM~nh4{qC9nU}z7$TeNgzS6>M880U4*Laml;uAyJ2bcz^%?+sPpDf&_* z78xa%E&@3FYI%+4$!W}?>#6Ope0hFg%UXJf^RVQ>^M1SX!5wE25JOR`~GhYm>cH3OZid4oWUN}@6;dzD8CY>5omn1_N&pzlYVN=9J_y8afW zK`UkUvepctKKIoyl?00->ktp5fD#Alt;9x8uOr^Nc^)C3Gk%1w+i(gV>ua3s_Jdz$ zrB%#=^!sw%sJn@a4M=8_a9FkQ#bSKLF8QF}A;Yh%m!8*CcutN|z$~%6OZF9m9X}T; zxF`~*PLXTG`+Yy|e&mBz%5lTWvU&?E+#Pdaq{~$j7jdGSyL<5t(c014?v8Kk{ONMc@P-y=t@ZJHW2FXIN6x;A*0)Xh^L~ggbp`9L!u|CL zx6C-+Jg=CK@Wb-f0oQLp6=jYGe_}!JVU;M$3Rf;fYH_h9ZS^gDf>eZk`(DLa-I})P zzB|~z=+h*+F2zf!?6;xNYN;med1Ukp^hl9cY=7VW^qL~XiSuXVMSAXOysz!-w@PG( zt%KQQFQbt}w?i%4&L5(&p=dK+`q)nBTdNX6lhyg7H`(gu1LLeS5BtAH51|F|mfu3Q zM>0-IKfUMW{rF%B&94@JQC@;glNNH+{b@Jvfrcqd9nGd9YwhAYLRX2$Nb+Y7 zu#a3KJH0cnsuW33(h-Z`zIgI5Kbs1z^6f1zSZ^ZHgJyfIX-wv^9F)E~)_RzpYUq|r zKQkZ38)ll<`p%x(bx9pJCbD0nIU&D53drF-4f1K6$ zQmB}ixRhs#CaQs+Hxbx)xbnXenSS*$W@U9PVB@5@<*>!P4H{Iy>4NjDi;JX%gV~o1 zt(#Sz03Ba*Bci5xQ+&zY=CpZ3vlK_hU%W{NXCy+}*ZWIOUq86bWgP_H@^p1PD@}{` zwDSBaksOTz5%v0r6?HtC?p1$Wttm}QH`QO2R$VmOL5$?=Ees`1zl)3WjI7!IR3G1d zznO1+@#rN_0BCBRG}sX?O=LZ*^$iWGQ==%h)UCXJuVdr|QZR_0nzm%mtf?FSCqi;k z4qH72F#Et->9;h!JqF+3uSmy{CWRcgJJC^$`mT|XVpMH%O2l*xPKg8|Zhij&%VHp@ zM#RY^SW?3#C=K^oh?a5+XO|NE>e)UZ(6i7yTC*yVZmK3(2*gIt&|_UBYD48V|MO^^ zdqdefF+Q8mO*J2B1JTJtbWdcJ9G@8-ipWh!yyEEbdMxvT3WeRC1#I+dKMGXhb90m$ zFYIL`u2yAOFE@+?Z7?8;7cUfUc}*(htqsrUi^~%)RTYD|isAIeMAnVdwObZnfGkA^ zObm9p4DG1I(YJ8+pGizzy`=IZY;_(4w*}DI#1ZhC#F(|aTzfpgN(ci&jX6@N8DKl;Bys``#%P-+gM1LHq;J1w~BvE;*o&hESg1 zsjAnzN}WthbW@z~3b}|VIrN46V8n=jIr{hf#-Z&vS3$G-X>CL={oR8~NeoUldnvcc z=%-lixY6?H$rgu4Pmzv4L4R8``n;TA-jg`~n~2^dgL~K;tX{gnC9<0aKoav@d6w&A z(dX{jgw;2dD$}zuF@eP33+eWK%546eawF`iZ17YXuR6)@O(v-lG2pP5u)z>hLdt2g zAR{i0)iW(T!vHcP;B)px_Yf@?~Lb` z_q-muPEr`Jg{`e~yP|&V&7q&m-2X*gL_sFq{P-R8 zkOu_w^&a=|-eFnY_lWlf>)^wa@S>(z__W3J+%5PrMSKih6yI&Oh4Xi(Wl6{Nui^8x z{uc^mfjd^W8Nb7$`3B+jL)r-h-<6dYCj}P4osaV+3e*0W=6*kVyv`q^l?Q*t9?C89 z<|%(Si2;s*5bp8lAg=k`Ar zZ;FmRwC?6kTsscWjUHdgpWkA=IIrE1J+11HbyvgK7#VQ>yOmOKY|`^Dtz!!U!o*;b zWj>U(GTVpKRTZr@BfaJk!1&5J0m_+Tvq_>ngXc#j*C1&&$HpDH&SIj#&x7X-HR6XO z77_oB4Ne-9QLTh~``R}nVVzLKj(C@OFw*>ho4P~F^%1{kuTSLFM^8yQon9@d@!FWd z(#Mu4ek8Fd@GTrFb2OXD?%0tx8ZZamziM-TDun6-X zz_E-uMQ|tSeCVS2TmO^;qju)t3K8F=hbwh;Z|`76|ox@JAzv> zXXhgH&aQd6%sjI$2(eygdWWHq-ykxf_Lq4PCzzz6kAM|~q}Nhm85{Tw3Nmoc*G3tGWX}&C^y1r7e-c z@Jz|!VYLXozEko)%V0OHn&NA4Te4`jbGGwJzsVsex*=J#&5EN(05je|-@*7EAjjo> zf$mL;d%sAl$mD{3;IE?VqLvt8M7q)riPw)Kw_$J0hnVeNK|>DL4MhzTZxDMxcce~s zN5I%xU&v$PvFK$k-_P>s1Vp0=fBEYjsyPY;8Ctt#y93qwcKXOjJ()U=6<({+7gR=n zdq;R_?Y%Xvv+(PbE*7!*dJ-VZ<)6M$YK4lr$*||=YBx~L#mFPlI4R{apuRZfgV1)> zbRbeg{+;9bHM)~h zr!#s@>CmtEVD1>qWkFPK&Rl*qkyG3aNHB~wh?K>2GwJd}`}-(lGj*ln&!fERdmnKe zKNzd6?6x_xq{E@QEd3T!Hb7b*`*lTSbD{9amBdxKBvWnpG*f9gW|_O!>+mxy*6gy> zNer=yH#eX{NlM5H oN@=ZQD1n2u5QaFzo{N`m)>Hp&DJ)qfq|Nik$ON**qRke5R z8ns7lYSmsLrBqRS)reH7y|-Gm_uhgOwQEybY!$>7EBtTI=lgwr&-tI@I1=aNp zy58^CdRN%bdmtZ8*V*9rdsL?Wde4~t_UBMqV?x6ElkKs#MmN;t6l7Y%J%W=r{q-+7 zaG9S4gdFRY(^ikXWKLkNQdA#Xt@L;(vh^CY*XX{zpa&(Iron!JtB*?(M2|zVa5_;i zxGW>*-brT8h{zz$D%uL2$@Z$<3qPdQP!F7>**hZ}hzte&!phU!`Lid;9Yrl!c0a_y)cehjzP%p( zpL))(1OCX?^J-P$^nh!I2wsM{re3B%+l8Xy7UETqQpzUfKD^Yc)6qLHk)}Sqds=n! zcz+R=wF~H4K8#!_e9Dcnq9-w99Dm>yb%mwC5>#ntwf)V*wBB5bXf2w+=WHt}Xb4|qs47;0}$~P_-lZ$kif}-QN zO@wX!YFq}s{510-mH$G_-BnH^bTFHjPEM|CDm6mTu-YXRK%#Vo0~6{RaeZD{HrR~} zpm~dvPuxk?$9N_bUHx#IKEPZ8V?urV2YzdJ(CxjV{mM#HSS^ ziY>C$(}ast9w~_g`b}E@vJY9pg$!Um94D!BXs3JIJq6&Fk6goj01;ynQ{&XN>e!3?xIyGkQ9Sy4bn3;a$<0muo>bEJUIT)CY zT4+yxvb_kdNEOuEPt7A*qr)xhQ@6JP^lj(ttt+PiNx7&+XSTV^{8RBR93-klt!pyM zLl07-5B{5SygLD_4C`_h$z43ct28qFq8wd^h7QZe%r)-+GyODk5>3MIqRC__fYu&( zyy|wz-tNN+d4{CxbO8Gy(JjX^F^wPgY~820p(7_a{@~yC#);)negffPHZ69Xd?o(}H;&|xvGdq*U$VDu zS}3v|d(~>r1HpIVpVrb6yE^AJKuTr*%HGsZ>g#0^U?q2wdo7h1W4w+NBED*cc-2ER0F3gMt3A9ims|&M_3!r@#i{p9n+&nK%OIf zNFf;>-t9vD20BYnixXnK`R7p$N-itIIqN%eP{=j}UuuLR(<>AC?yk&@PV6_L4?>X*Nmpa_eK8hcq;Zqg9D!I@lqPK0}_I@3bN7G3<2hnd+B8bHv(S{EUxGO^O9OQ+6GHTB0CGU~u9mQOEan-vM37t=f5eD!)R8;Qq_R9x#(KM|wg`3`0U z31IEuLWr}=+_vSGe9nSPU#w0Rlerj9vhx<}U`U17={p7fj7b$HVBS0mnG=uwYJWr7U_35;6Q zGbLfB&%LN`&mY@wbp%|QUXmyEEBOU@uaQ_M$bv;B7`5Rz=(bw*2wEqE zN(++&)4HSgCA(3pWp+bWyGuf*v+=i;JYHF*0i27oZEAzw6A66go+(M^@wI0NAZ>;mTI zBq%Aj*Yo#>OBthDY6AOGwe zV8g5rA7Lk8)=*-uF14>FxiRD@SNx~L(@yhE-}%o1o*hjVuOW+6s5XgKVZ#})zZe%x z(5uUgz+tP-010vL-KsxWV85LB9C)|BnWe0_s^sR4JVrbj_uF;e@2b~W!NX*1f2{%c z>8CdD)051f`#OXqhH-Q!<;1rW1rvp$zUNg9WG7f3#Yt|C+beH_bJ;cmgu>Q7I+FOY zvP_|qqP;jg9@pVE@{oR90hcC~TkX!23i^sLo)hkgH8+STcID}K+DJPG&Lo575S!VyxGtS1G_#fIvN&XhB&0+|$d6 z{n72$=R!7GKDk&x4c%j8u5G)IT3j-q(5QXN9f;ShdQVJPt%>j1hi8h*MW0}uKVG}M zDG6#Q;pxck7%#GmdyLWN#UntATy0ow4hB(*TXy+~yVWH4AH41ijD_|IcNI~7bt8=l zRYW=Ze+?eEZl&I)CSiiwjH!GNQu=#yy~=nYfeG}57|asa=zZ%So%#EWygZ#K&_>Il zbm*;XGU?-lcvcaG3^6ZIw^t$qpfO{^aVGc13?-NDc?AE&G9V4~Y}(R&IQRaSpxVz2 zH%x8VaB2rsUXHK28riNpd#2mHUctpzrGTi+!DB!Yl5K1Fqt8nf#XjRS3&?CX4#qp>2)#^{TZ!KGfpgA zBr)y5R0Vt6^pw?@1g@+9Gau5L_k^5*;f=`r*2n0u@USVo@`eY{jqLJ}%H$u}vLx+# zXLI|lYn;gOvtK&5`fL@i9}#@KC7y~y)v;KMTVsz`{1~wDv`=jFJa4BmH}vr6MyO}7 zsR_d)`yQL?+IRgTw^v9Kr<=^x4#N9G3^85M^FU1$20Rf;TnAG8HXRSV4#$(O-k-fjVELYqiYXYF> z21+6F(31uec;0X%Ajr_l=HrPG=^ggHr6K$JG<7314Cfs?3@JH+{YhI@@! z+Ye81UbRX))NWJKc6dg?ZvCp?RqQU-&QGn?Y$+Apu*W-&O7#w#iZk;xWQzS}JcI0= zChLCk6@6_I8pAKlHvLP%o%$#ah#N(<>&C8abgg5c;w`l1195>+&p z$JaU7U@zVz#Gw3?$N0y_4>9@%M#+*(uZ0z-8!-dpV_KA+9ye3^-~}kOPpSP%yA83r zq&sEJDE}c)VX4k5H%O#{Cy&8KXUV;+WmK~iS$}?XWwuvsnkhop!a*M z)WDHEmD{A9d6{e?-Og%`^r)@`O^EPJ_GCE;_AMkR;4Yvxd*oEPn*JhOPqgrZx{0#w z8#(fh1W&BinHb)6BJ`kZz4JPgdlARhrYcbTgl-4tHhyO-@U34Q_zb zl!W7oMt^4vLqJd|XBlm+Wj0U5&kj;ghbZ%FtAiieF6VvbUMt;&dSz3F=!OBCg3w z0J+ezQ9HMOrl?N#b_HyiB8W`lVy7XU!Vn$RKC&Dvgt4G?+aKND#|uE{Wf%%Z43Iu~ zQif%FO{Hw8M~576D-HR$O#;Xjdg$A#UME?m$K_f(KQDgFdW|n}G67hu${7&%x!>^F z5(avv36)PfW$KadQ6{slGnvQp%AZ(IN|owY#sIFNl8=K$FybWl!?*`=Ne$9Ja-`Q* ze;!k6eB?CXZVVyr5yi_ph~5&D9h}Hw5`1M*0K}wkPvtg-UHvHsb6%jD*#J(oj}K>D zqL&%=-up~HmOk4PZ)_fZOOK97sAT+Jpq^G75`9IC`m@o?O1wruC-kNpf=@Bb>73rP z9q%+#s99+DZ0BeMF9e^>R8j$&e5}wX{BfDM`NOJi&^D>LtK5qNv*;^!h zLN?>_OkD@!{!bBi2i@JNM#p*$^L+`NcL8{}JalTn=X`g--4_?Q8&(4RfJm0zp2R<4 zXIRN!9O~?3>=#Mj^Zw!QeyOt{mq_q}iXw8=0uBF1GwJW`166&k?A^hUSmMgRG22W_ z;Y5dlGeZ9X(7Rr8;F$YZkEG@g`*Im-lw>huJ$xM3SM+hV0TMo+?B2r{c!@+nt7vN> zz4{s05m8ajBUUn(7bHxIgOmN!gZ7QSDiz~aX4=X`z62juI- zpL?G&3kv>*H9lz%RxspRw&b6!<^8o6?sWB;^>Lh8+oj6jQ+vw}K(D@gtoiYyedh>M zJ13O(Ulc6Jd{VN^!13QJo)qaY?;uTkZpwe~+xG11?)}C4dtnbna=ndxi&p-F-R*hX`|bG! z?Sn<8f)w1my!dq*$jwFbNh4n`qfhR~HD5q=KARigHVU2jaBXMuz72hO4t&X9^9qDw z+F+hOU{jGHG@u_r(J*QGTw5_Ul3`>qUdz zoB5qH%<%!O*YUaeq5(~cTUcaNtXwdmH#$8dCFNPDi;4H8%|7ja*yWcI-x)YK9_@Wk z7xU_b0eG{vFcat6SJ~*JG%TFQi7I4;1F=lO>i~-#V?W=BocT%=ML7x$p$!<_Uqmk@ zl+QOLi1uVjwOtL;G%j+_dpg7$m_gcqi`eg*Vb+J|_nos4Tz6aFmHJ5S!cTv>^`taZ z%qu&Cf%*M*cEIH`hLNghM?R+%KC{K_DTGrLVKG#E1t_tEJC_`*42(sxCL``bemOW9)Fl9!dr8iiKd zFSM5p1j$WOz-x?a1BUhzWw4T8Pt-q{%k=NBol#IyGoF4OaSQ)fo_P0rm>`^iWLy22 zw3bGu2+#h|=i|p?Sz9T7-jBcj0&EqgiYLM6Ro2l<<5lKNcm#N3kV1{DgoOnV9zH?h zQ7kclPBH@+Vq8K(844<;9k0V8$fc{%gy1v6GxSM$3c5sX*v|3jbxBJripzK7U=e+x=)t{gBanqLrQVlF#?%p-?4kH&Ra zZGV7fJZwHyKuVn8_U^W8?Q%2dw~cxyce-{aUYqv;M_pJRO^dOaS#rJKdtQw4bTS7&Mqd9lDLz(vXNwBRx;;< zZ&Tns$7fps_x2B}LIhZycvx|boojX}$(b}8b~(nDaAxAaT^t=%2ORzFFuiQ~xBU5c zvq`@156n7|$Wua_xnSzHQF>LrVpnprP0npW3*2raMwFD4VimJ(CYqA0FJ8!h7)fQ> z5iZly%A$?W15s!_^DqNv{akwDZY~Ii;j+t6GL{TN$YSH_fqkDD2%2dW_Tc;81@# zU-FY&4|-l;E?rR{a7L3cGBQd;K~q!jR_udVQS`_JD${b(o_JvG+!ML3vQL{^(FqZd zVvso=r@loBCFiVxl|Quw2Dd?}fX^tIn2)d8rR8CH+O`^qgXojk3u4Or6}HD+xYUXE z28I~m3Yli7%5rgk#2I(19^Z>>^^t#6^T=|3>x%iNl}B-fpTS>U|O-wL{F&s8h%@ zo~NawqB_sU-cPVzK;@W7xF_CUy{2?}eH|_8^bQU%o2qt=mG6Ux;|$^Lb@mf7Ku`LD zO*1nA7*{5L2HU3;`EsD8J z8j3otYd!4eRSTm`^V+W>|E3sj&s4`>sc{xl&{gi!IZuxQMJ6TRr#fafHq~6k$b7qG zl=twU0aq-Icw^R-%7I0PfWWq~;oFa6lt__vtD-+6tG0OdfZUqkn1=dO&cfh7AQPJ% z*W>&1JyfqWV+K{_n-lkND3f_In-7%-2M50t6@Ba<7$~c5|CAe4eCepI9d~0w^KX52 zx9#Kx20m2Lwa9ytEp^m+zu!kn>f6E)*pPZ*(7e+|tgf+u9 z+_bjoeGHvqegzvAf&X{=FCl*ZoMFDvF|ql0voBlDMpmSuF9dfHSYxRW1BUJ>nt975 zimX8Z^BJV>vgGOAU8tA)Fzl<8#AWgVz_G^joXKWe>}&6aSJv1ZEA|f##$%ys2a~vy zBz-(f-;yZ7Q}^MV^9#-ZJv7+{R!sGBE$&6$5B9sqjzjo&`CFT4ehXbQRg$ zCr1i1@_@uNZMpY)8)HVDqOpuNtEn%p$E;jz9fRZ*6?fWS%UBDuiBB;7=8p)+F6Hf# z>tbORtTHbZeb^-SqzMzwuQ4DgKE1tze0TTy|51`Z!L=+xgv(gS zLhuOj^p)TP>0~REWTa#)Y;2k}3+}`n%b$pmo(nUj8^F;xIocNT&qZX|g3`0_NavBy zA-J`*wYEd{(az@hr#@akh83P7(KVg27&9ruBE%pL@Z?<$4m_p>g=PB<9sWnZUZnN% z1RoDyU$};JA>c8Vzdymjjz6LfhkY$aj*`eplxdPT3VNZ??UF+eR;c*0AjCLBd-}gs zArH1ZS-EjT5^$ruk(K=#^i%%HM46H6_GERf8*;MV(r&S(yqA1bRarSi&|yLM&QbvQ z=;A-9r>TDa9Ee^FWB6ov{Iq=0v0?0GyNXIbDYCtge|>YaI1Mq24FT?~QR1$^#|^}= z|J=*Y7sR9{yesDW2;hkx7I`(bvUDl;fh%OEIeABZjkc-*km0DJq0!(8OtJORvoSkEzJelD*3d1GD ziz8e!euIFuC?mwhRHLcHmC-hPZaW_KCJ^3c(;Q2h!KAXa2Jb77lOsMJ;UB$}d%1>r zMMZCO797zbQSrmE)hG#xEf;2K@RFpv;iFP^#m9t%u?JO78BfIMR#sL>Hyc`8LI1Zz z@-M=cxLVgA*WwZq(wEyZD{A6?^-2%m%sAdFYR1q?dJh6dIe>0vBBh{WBdh$*C>Io{wX|QItY^&m z`R8qfW7XAf?Cq_aJ!HFpD7z6q|HOvd<@TeDTSg{E7GYt7273e}P&t*7eU-{I@CpKS zR)Y5|Xo6cGPhA@M_KSRaPVvy5;=Z-(TG`c0#eR%{Fz*Pgx2TlZ>Y?!Pz*#_qD#ge` z(+=2`p&AjBqjEQY|8bBH`6V2c_@^uF&6}%B zm(LS?{UZ$>3t{wa#DiMU`zl(_vGPyC-9p0>Gx?m6|KRq5Qg8oG^7V6w`4|?tIOMmj;?&ita~iLcxZ0R3BZq*D{m@=YB}a~89bTK_pI~-`a(?&UH1k7+ z+{aY>+AgM-G@>qju*L)))0V+{or>hA+{Va#Phc9IA7&6ydfXS6kKd*=^*8{BARZ`h zGo8zCbcGDK&fvtO!>Sp-zOI|`4>yqpx3C(_=<)dReyVKd%T{AsCXB|=3Lx!VT5X(<&JR)ICp zSfVpc^)%p7Gyp2Q%4)M%QhpT~X)W^m5;;q3z>A0X`D8|m@|8>a^$8n14%oPj;Mtei z2u_P%2V+NxF!&~Nu3oR56-<*SvA@(-`@1cd-~O%o+l-UP%Fl1=D#s>0z@N)y0y2R)A{=by8`|n@CWcRM&x%8;t}n(szyjN1d#a!2T00g4 zYJh@nNxRalb=M7^q1DlLR^{Vo2sbak@h~MBJKX)$2_M9w5$0b(Sbh6%r~M9i4hLB3 zz+;>{I3vhh_xXQ#H~sTuW|i`_#eiai^W<1w@y)&D>#{kQ>)LzDbWPU;EpV7y>O#+k zUXka1bM|7C9WgieL*b+eo76bJgYl*;kTG_13)ybNK)#LoPeF*5dGTPpP%IxH%FHV>wAI45;`kmw7 zRUwuiEL%qEk^q@)pFfsOjVFRuvY^HnbK^Mp!(Wc2*stdI#V*d3rky z9M8l*AACV)gk#+=E7qx)cCxjbE>9w3i}FDnzO-4b=kQ%g#{xzVrmREG3*m`?2wWwp z)S#H1z!Est1Q+!yj5j5byYxwyO99eJG66xhX%e1G)cfL@f;XGf@KW?e`ogDVCHoRn z+L4EVdC=kU+u6ieyJ}0}UfqIi7?e!T^ zo}YFckVc}KT(PM4cMuzLQRBrE^q4jq{w;)9Yb&!2*0%ro^$*3iT|g7}h?uzcPB`Ly z_Lphi^V3AjTb%_cLU=t25Obr1y0v4#Z#V2(wp7t}H1?BK)j*EtYmE#kBVdXvYmtWc z!$-F#3Nh1w*qu_$GO*03_RR#+H@$o)pWtGOmp0ZPE2t=jnV&=ZII_Xz=4-3S&gYZI<$Gl_u8Zi!%T!>g+hVy|sI_Rgiaa=YX}|BxvOC%y zFX*_WEi2c_EWoLA_0m76y$o3QM187cH>PI)%dy;{Q3t;x(+>7smbIVUB-Wq)rwfhv z{*%g~4tsisi-(H`#`}75RASTW-nR6?lEb*I@&u_?fwaOmg}CHY-Up)OL^F!rN=+l4 z$q8qnOOip+KYmz{uRb6hl1VNMJRTaa(#-dIa~6a%zh>qh5*isBa`4g%m^W^9$*uFI zf$nBqW}k#{TD?t$ZufS}S~_N5;^fuE1uryD_7Qa+ag}ajJewxc5;Q{F*5)Ho=Qyop zEa;fc!ze|pv*5a4epCpTSY32oS@61&k(zI}C->21UfWLK}6OG^@k~;Zvy$+=t4+W367bt5=b4y?Ld#;-xnnyMY zL%m*@K&}X5tOlRjnfdK>Tx}3V-(*dOhsT%pXpX;k>itzsurehKhg5D}fp0E-LV-jv zrG}EPU;M>~mSwuyeBNY(1e~+dOvFA*0QP~6GnhjR5RdZ*w$<5@?|42s#&12`>w=g@ zjoW#jXx?riP3n>kkH;$aUz1lRQOxP(6O^!*e9!d%B9wJ(RP`>ifOiAAf9tFJ4FTw9 zj)03Ap4!yNbjuL5SpeN!HcBJWei$ln5?b2Vpv8SBTO<5cHxv9e(f6W#-g0P85BNrb z`Z|7%Eou;lZaBen31SN9$&D51P2qlVfx${me$MHSsb4BnDBa0e6#n3TARZQU{3H7f zaxzAYb#wtV&qV_C8cSnZV@r<5U#hAAq%O|m9h};UDu*hQq~X(IUN{;kG~J}$b5<*e|c^yx6R*NU-xWs225Arb##<06*rB#H8i^z z;KFz=!sc0ob9V5(Sz3%yZnL@xbEfFbT4`Jbg53u zzYFdol;@&s&GO5qMhg->%1;j4g8dkyuQxT3lc$?@`-Ah2!*S9#H*~TyN_0bM4GBM% ziA5oob)K3=x)IYkiFYO>Im-LDJfM?rf_8Jd8G}{Rz$V$n0c*MQ>-sGe&|0QY0Zu@mU5!;K^`UpJ+Q+eiXERU z4t@a--k9CW;u0t}BH2=nFBd`9Cm>$}-+01|E11Y_;t_7YKx z0Z`0Y{lFDYmys zBg34y0O@^ueh8X1lB)D?bhmUP;*7=pCNex+r&p%)c--$;vi%A#d(Df6oSZ4!+$!rm z>FV0lz0HIjfpeGujkKSdi|aA-QvDRUg68XwAy;C1RgT##!(Yu&)RGP-GUKHN((Zv~ zfFceyEtQR-7{;2Gl==@V@Jw;B$NTi(CbCDNP&q8WsLF7$|9<$@;;|Y%n4MX$lx?r4 zF=jbtT92h`%$0*p0O&dTMT%ui69Foag+%*)mtaW^^K-UuytXR4H5nyu`mnDWoLI z${#~Cu%oC&HSQph>up|myiTEJ-wBl?r!z6oIrD~|#^)8oP+_klbW@#bo!Udj zkRc<}@jPcJ3#a|&M+Bv$*~C3`&jWn;sLFy};Zo5)T=>{N8Ls4}O%aPYopA8j`lBP2 zAvu^BFvL;S6pm^!105~?G8`029`%vYG6XbYtT8k*-q&St@>XP41a$Q90`H{d< zTmAtAAkq`Nf{C{6qIy@p9k}gGS5865WlTR#v_w9_cK*wlun;+o$T5FS=X1S(=Ls2i z@z~+^OvLp*f1J(rgMWjNfq`t9&QDOz?<|~dpk!)reU5#C!_A`XapeLn=WQE_3Wd#b$WcK(<&y7PK$sS?<5$raiadt*V zZ8S+qgL7`ra|_3b%^GuyiSaYlPoQ)=)#3dU!_4}fKgzzoUt3PkmmSX-GLM6ILPC48 zCnoeqKM2~7gSy;qcch_Xqk+Zj#YY=H?G;;5Zv|LH^`7Sm#axDx2S1g$uJXftCuLyD z0xZb71P>rm!NTKFqhA5HYe^99Ss{o0sHx`yr>W`uYbqHNnY6=vad_EEUFjRW@pz%V z_DYE#gl{KP3$wfy>WjJ8mw2Ur?2{p>4GOB?S7=*zk#s5T1Rc1JMUa<=x=8AgGr@we7JIlQqSS!T`L_&$lW?nt@@1t$z zk46Ke<0O4NY%j?)KF^HCdM~~f&D~ozdyskMpw#Ahy)uRv;PUiKkU1i;t=SU>kl`mj zXq1QjDRrteG*fIB$bU-$W^CG;%(R=bO-Dx?Xk}QR=blg!u+3gyOWc~W$hVx;Fq(rk z94cUrn=rtItK|NIXZ%}AHDC2zwgh=990Q<*&Jz#&OD%Dg<-fI;vo4$1B4H783a(>E zM>Vf^@*I|E6f?S$le_hWdGY%cVAf`4hJMc}u>PMV`@tB z@OMV%(j18u}t`2)o{0W;Wki)?KYA9k!keE1B(VP0Bu#}l`frDcxDDt_GOem1B=_YN?dMz$0PEsIJqZpuT!9= zBU~=L9d9imqyp4tPQTkMfriD2}c8eg3eA)gpUpa(PYyWq>Djf zc_!l?3_sXDiL8{)i!#jeJ(39}E@UL8WK8l#FAA&v(zjkKyglZ0XM~+5@%97Z9UDNj z*IBy`AK=%Fwa)U9RWJytP9ZjHcPaM0?hiH=7P2?vRROjym9-B`nh6`e8&SlbA~eI9 z$JGTp(l@8pLpsq^MpHkx9Y2=}uyt|D)ds$6t{u5p9KV+N1+&1$&Dl54oNtDVymaZ3=$RSsIsGS3k_ei;C-Z&W=fg~44aGZ5_tn7= zCNA!cs005ktd4sX-s=<>K`(}qdwp@NQfkyvrRboT3FssZ=@^$evGs4?`K#enH{?t= zjj&8oxglfLNXFixi6kI5<_M0a{&T6cL|=qCy!EU6SHR8`n;npmK)DMJeb-pBjlV$T(6XX$uL^Yga{EgH#c$HWV-6CN zmlB~E47-Y&S_1rbO>4Fmy|9uf?P)LB`E;)5l_=g~f!`hO&h7ghmjrgE(mRx1i}lU5Tfen&Bm=*MWX?D4{OAQh+Lmc^ z(g*<!g0QrhZw%qFU8I<{*sidx@;F{8T&E;p7?3d~2n=2Wo*wR8%yM zc%Ni{S;+0!etJL?dLws%eh-ZY-=1ypU)4tEzYpeN$!QG(1{=qYjv~QC7DZ)cY5)}b zqn%dJDJAICxZb) zo6|+SKp9!YPaqel$~Wd(GT&CFIJ$;2#C3}lp6P%tMH=(gEy+J61|#0dp3Wn#+pkwV zq09t8y&Ct0a|2{Q|livtGC8x zV>szbv$d9Lo%Qw}6WPd}TLRdF`i$tB9DTfbwa3n=`$ zErL#5i;#wgz{He1r<11)i^No6h@Na8=IMxTKN8_wbqWfgb?{3F25huS-(T4mGrS@B zX$nwTMsU9jG?{fU-Zip;e4BE+2GHfY*D^0~++WcL!VxX~%={|QMYRD`vCvGWz7n3M z0@6&1X$kBSGy+YqFK(6nc*zOF9S4r#t!%%{@pL@RCS>A(T=pk#K%qyR$%L5o7_i7m z$=j(D3CLyMqf5W59iFXQK5iiGFjbx%d4Frj~yOqp8ySzLS8D^J| zSBqx=1BreAGD)%peZ+-(q(Scz{c|X~e?e^;MI-K~Yhm#oK%{EI{?dy!`=3Q>6lRS- zz`mgt^;QNT`6A#aUsW>GL7-r0`Sekw^3U+8_Td zo;$#Ad%)e_c8^;Lnf)oK%)ABve&>pHURg5Wp$iMM8Z7h~XgSXa&NxO77GtdBZiIHf zGxM?M$CE04>3BmXQ+`F(3-9E2uP}TR#tRvQw+#}$>50WpTR_yl9J(^tc=3Ykt$zkl zrwK{aF{tb=<#;Y-p=&nUjZG~{`jl*^RMk-5W7b>S$mw;bO0j*~8UK%4dh)Wv7BL=u z{~l=wuCvMu1L|@ou`W{%AWwu%flAmmhb*7m|HXQ+G?40aab|aUSpwK^!M7(VYVbpS zh$)RzTX<+>q95jA$q!QNO5V-npwW}VHM$#+-*s+qzH1?m%lUy!D!SX+WmVqD;PuZ9 z>?U|(R#`dM9ZDZT14#tR<)G`WYPb(t|2N==tuPmG?qsByO*!wzEAVN|d=9iqx3z=WX$+JN+(?=>V=ETu|cgq9=m)_NG_I+e~i*qwZCMwmhA$^ZaAL4G&v@ zkZL7JH-RpXR|iK1rr}=Et(TXo$dvP;{e?m+pdXcGO~-uXe1X1UZn}JHXt+C`}lkL;rZnJB2I`xSs08*SGSRG|0!r#n(|Y0hx9%z0yDA zew!eP!%6r?WAwG`gHL;N4axhyJ6A_r(l@2dz+x%>di8ODlqdNhgv8N-=suR7y(`Hi11R5b)n!1^`QXQR%wPXai#iOj zjwsR`7Fz}ZaYMnLBM=9}^K5%GyY>tiJ1)6<`Pi$a372^Kl^cd08`9=;mNb*{Y_`R{ z*3$CBn~8!U%jKU89A}EmZ2*9+qGD6TFt#%l?uia~I$o?}FzvDRH|OcEWpWv4wN>~Z zj{94wfqn*Vx^)0Q+XBtKgZMe#AX>fR0iSyKk`Fjr@YASabaBY_{b*eA!3;L4aGY_? zERU^iFNnEGlFtVhlW+U8`i%`-{l9b44gJgrsZK$+9`@=N=uf34R9>k=h@@n9KPKM# zjH!l()%TxDRnto2d3Xi(+Ko6(^q#)_qwp^;0N_ZVC)cl(WV&TIgx26+T)&Cm@Lepl zItK6)Mi}+FTHYQV2j`x!2{UP)alMh#(wB9^DBFy(_g^6g9{St-P$&p_1e%H?Mt6~P zbRbK%1K^-U*gAV3drb5#Le}VZ$zxW?S~Iahz1E|@49pe7WKz0WARkf=m@mD+p>BX% z+~WNj^9U!h`KrPP*bT1-`7IK9BA=@o8a9P-ob4k3f;l4ra0w(EanIzd67~P?8^xD3VGi@c&?TDYM|`%3&r070QO+|~SqiQcHZ&BYdIXcc zmL>=ocy%7Y^e4nT_f;J?!#t{WZBWmQ+k0=4%5q?R$7;K#`>vBaLP?6Bt|CvpkoP3`5U;E>mgqtRC7-FXdU-V~^NPSGe_BzNw^z0ESIetht<`tx1#93Wdw{ISHEUrAz_H zH6!c?NkFAht!zdxK1e!`(dlw$E=_CSubvYK+tHn5wohW#m@>&pw(d4Tit0Wg9AO;UZbJppE05qZcK z4a^SLC&58I!|hMvFn7_ z{I)H(?ByTwBI9WeP2I3U7kc9VA%^bYT^0x9E!8X{>F*4U2=_KMxYe8fGoUZDDe40@ zR@aLzI%qCJhc^A-=D5;&y`@-q+YZ|+5HZy9#VIbYN?|^ABjX}6lY0nP6 ztLSVbrS0o#jTP2PCjd8m$>*?8gLXPpD~9$CDz=>LbH2O`miDSr6RQEz^mPEqB&#QM zlz$*jK^;;PgwrcVNqP>#qTjwy7xe3B(DMvWYu|>eCIYOT19^?}84D#}va8 zExM*3Ih|bBV7RjeWV*VD!GLh{@J;Rw6bv<8qw1Br;DMUd8x-0DUz`OUEN=`R0?|xc zb4|G%Icu1Kugm8n6%`nOV`nd}8Lw)a%IDus-5cPxSkR!6h)SFkfTG_F^m9+_ zPZCF0wx4|sx=>fQ$wN~CW&HS4`USb=E0m98lE`^A!^yX!ZIf9cVF3ZTZ=Gnw)t3S$ zt24r6Ph13EqlpP9q5<&|&@|pfs_@**)LJKtt(3c>KhJHu@95jn@$)gL=$9EY>^DPf zaL!b^_iJ_R&e!?(Y`zvd6k?NWrNCDi!)85SY+0w8|k_CSv;#wBMJ?yvQ= z*w25kL7^H?(#{uwfPIApXeD=jI+1I&@! zy>dI_@*FWK8Ik$75(Ly6_W$R23&Y3A`^li7*&RrYrynbv7PFWeB9cSA^UeK8!f2Mf zJFjwV)0eUsQa3tldW1u#@bGe;zA?9me492RiD|h$ z9f?+P0Se@1--ku?t*a_-j-i<9g)u7H=qc#Qk(1u(;MVbezvY@VF6Jo%VGvZmS|Klb%?EYoDPuV zghjB1pQw_BJXreOXgT;t-KBTWb1=eFDiM5(B)V+Vls=~0Cz%RR(ZJ>f!kgj~dDI63 zLU!5!#kZ|(Nf^ouvMp%?go(kD#yv>2OyhfoHijC_^l-#kz42VV7wd(K57<~)0a_Ec zj zhvfF0K?F4U<|&BJ$5waO95V%gFk0CMIV{I*KD-fONFT4JhFhsa-q6b6kUdZ1r2=)B zk!fU;evyNZ4yEsDy^O6~dFAy@5y|`Y+~^O$ohf}vtLM%UpXu0k+(IqsAy0dWims28 zJqdl+)tk^iq5D{LXAP~F5dNMM@#@25AgLDWVm!IwhuoFMoc?kJ(vEGW{o<$RX$rD{ z7tK9~x`zqwi@$|v4Sq@daHj+_6vcMYg~>Hu5ZT?|nNFtx-eOI^ru_Ad~< z4g?V)MKJ3+x6WHfL~URiX=gR)#D8h<7WKDXtlVYRz}s}j>*TopKcfB6IMsXDjA~6; zBF+o2-j@-t-_Ck`dl}!)UMz+SqG4R?%HK(7822{R!3!sxf=~w6uCF1ulG5Kdlaf%< zIPuY%yH!ly+vABhQ61T)X{(e!s>Qduk=7r$(F<<#Dd}0p@qSY(6AAodoXBc`4Q|G0 z)LZ$<`l&2?@V6k$`*wmR>0jr=a9;o2(?RWwNl?%~1tYryES z(PFpu{r@BEEr6o@|Mp)|DN(wmQvqq|4(VMo)G6ni&lW%&^^)6hf2xJkYHStNI<&Bp(9B2x==@#XTs^*q z{iE~$9!T6=w}n?v?(sCwPD)yuk!NCFVo%77ow_+FPqns#r^Uq{MZg<=<>bM6r(VC)Q5L^4ljxwSRj0O8bPkf6 z!pc~sfk_Z7M|@2bTDn5z~r(-d{HLR?d`}#LD?Q{Op%jcQoVV z4G|@w(dJ70&k(7n5(3N^RDAaOe!8&77pKT(8HY$Qj4hwiC}hFicP~GrQ>FLN=u*;T zb=>1Qrdj19+rF1Z?>3U!7}WO6MUT_^+|t}?4*fU64+}q+S4n-1rcrt3^xZ?H#K+^IffA{93y;=WKlFNDTB%-Z{ zEoxN0M-i?Qag7R^H__6BWZ^DWT1M&&f@N-u0`qfIY@WHQFl%k8P^7WZQ%#W|yj7xC zGRg)TCX%YbgqFHNQZ(W`rqMBqr<}YZGOJHBh)7JL9@pKvE!8G0 zst);|_Y%0x;EBn}m9*TUWkx4{d27CNMI& z+}bTtxz71{w_SU3dM7#N)2%UL2c=)g-A3Rau>|;h-__*h%O8_-%Je3*E1sOVTER~k zUC*}h$oSktA5-(aUR_&jmjhOy{~U|iKW3)2Hc%M#gNj;SMyBK%yi1bhZm%PMUoTVI zPPBYB5;d!XJp&gY&abdv$vz^Dv2X*BtRgd=Fp0V?`<>?1BkX#DImXA10inU4pG$Q~ zF|crrgNBsxS-PhkvBW`Q8<+{y345k&#DuQd+GaakV7+fx>dUK?R+1 z>*aqm#>G&J{~QeHV+~$6zM}^OtmN~(>3$&C0&sZheyC_`i8W-A{B0LFIdU3iN#P+c z2079wQNw7iV@{$`9&l%oG+D24?U1_DdZqMHc3zF2zjvcp86> zsb{|*&;WsG2>ULTkY$N-$L8y1&s|>InWD7N@Vw`Q|AV1(G}CeiC#l!RcC08YG&Hn7 zJZ`U~;EOuU%Ccl=vAwl4&^(z}-k}@;4&I-Tm{`H_ZNgz8D=%7F^=^B6a_JAbR^C_;Q17H}7%jT0eG1%RL|Nk%ftSvw}Jhga70WkBV1c#SFQ&I86@_#7+>qjfni+Vb1ckuvXLl z!L*@vKUv3SxN4T`@OLn@K(_o23}V^+@r8qyRub$_Ru(KsduPfOtiJ=lT{VrGszMsj zIQhV|bGJj~cLMyZZMjDGFm~D3v|oq+zCIS2Mti7aDU)g)^qz+7+czHg(6_Zm=GPE3 z1x19+A!`!7p=y7%Df?MRrO*p>{`6qe9bGykD+^nCsakD@gWJXS9&6GYy-$Er#4tIP z`+nt-N1$0piG1RG{+vNNDO%?4R;@@<{@_U+5u$g1PWFFA03joA8-cncXxf? z>ZV(lX;@?8V5NaIk%AiJ2+Fm~Li_V4J_*U)=<4L|k7vQ)A-It`q!d$5=zVf}QqN&X zfcxaQd{1s`THUlk55m6KS()(9ZirojopEL9$@CK29VA`^xh}r3rY3d*C4$f1EG3&s z%k?`^!%M4;8)sV^+1xEx%`Mt%+nVCvN%?=s5dqa1u!u&3(BaQ#yL*GU@++n?MosQ{ zldpwL78JX$rLKM|6Xk5YC6w2;tcPNeLM2)FIm)_F4u*1F#3`RuQ11GfM&+mur6vTm ztGlY|)Y?}Z9WFxg@i)0}IDWIH$|lsN8Dou~ou|n=J4c3wt##lEFw3B!pPZhS_{f)E zf6-vwp?=E2^^TGQFT!cS zIv-MVB*@Ojt4-2?>LZPU2o3rr8>ZO@E)BH+ES zox~}2lnM-wxRZgt=ga03VWE>!waz@MWiN)AdU3?Z#iVJK+0TEkA=np^JO}XQi857V z=b*uIA;FoMe-Gn6q?F-$Z_fI?KTKFW6%}i1VCfdlyg))p`2y$mjK8Lvj9L70%xgqn zWfvzz*2-Be2@^94{L|P|^=+ZTms5CQ5|3`Xilj@2zq}nsG{lo{W}qsQp?|Z4nx=!b z1Q|XaUMbG!;7}q_IOzq52Ax0}!vvk!mdO95$MN~pqWVmF!l^UVu>_2@Dp z#;xwtE`Q^mP7jS)L|C1b9|K3_IrMaB{lLpqZ@N>WC_d;bS8{fWU00_NwhvjwIl)T4 zAkr9HX8DqYoFywuMulsa`}cs$E!auj*wFNrqICID+^YKI2FA6-2q766Y)n3`nJ#Ps zwJxe>!xVUfmZyLzpDK{*(~A0CB&~G6+I`TSIxwvSEE4XC&v{PgtHMvP9u3+*Zu^?H zmBtQ{f$7zWRJT2Wjh)BrlAPu z!>USx=`YnWb}oTQ>sCfHA?G6X-^@7@o5&e|$^OD8qhO2cqSHP@Z?18q9E7yfjQ#f= z1ZpE&m;JwZ22Dp;xo;?dqc@A&=c96~W6LWYg9iI3zuR(HKaeO(mnd!3a05@I`KxZ;E` zFSjOS=Z}*2Uqs_wWs8YNKv3m^w0pE4!qHQ7a~ZW( zXSMeh;MA*tD@YkQR7*n?!5qmip@7t&Vyafo9h(cbwtFpd{UOl6+Ji{OQY=qi6zFbkvx7(!cUjU81X;Mx;CmZ91N?dP-j@MPR;QoZtAIP;96+81I8pQF4q6iIz`L&TQsPp?{Z!-96v5DF0JjlO^8x< z#h&xiS3elLL964PFjklN#g!Bo40@@eEJdX-pOH4)6_D3qr*7ZnCR&wjNKacwzz;fj?O$k-*o2)Cu3+7qk)ZU&e)^ZdJfd2Nrs zx3xWvjE??;^inyudiqlN9H|rV`yX>%IibkD!%~MVF^53`tAZYwkOGxcO~d=%9u&}( z01_6%471XnkgD7HT#r(s9sj%TV~FT$1!w2wU*X?Xt_s-(giLgMW(u^5LiDV*&{>i5aMHIMY>Y>@&7!qgS?* z0XAFBdZZrIv$Hw&F;Lg$#FSj)10RoW0g`{8XO$1{OQ1!vuLqk5SEd=2+5wGaDd=NW z4~7$0@7F#XI2W- zD>p=|$eVcZ!pp#!R2qT|JvbT9dCGx9iayfV*2eawjhCJ2-;eta)$cuT>`?xPc`3n$ zQ*AO0O?mxDhKBO8AQGizbe{*5@#mUmhvOmOMy6(wrk5%O)-xUa>X+L&?&11JXFrt% zp(mG$va&%xgNbfXKF)L%Nku)uPQp$a!)M8JlTgowLTe-bZ;j`F8=-KQ8G`?}XHmz^ zJo+;9;lS_TXC2&+*Az>DRk6nV&Z?NJ#V2B)_MAo6tLcw)Hh#$DUMy2`b`mrCG=uSK!F_v@l4 zyB}7JjmB@0yw6s-x z#pC}^m)zhdm8T_Iee5TDrbrVoeSD#m&VELv!*jMfCD-8()4MA%6#>_Q;e|tfj)1|} zDr*p-1=fGIS5EL3Twgo(|9({w^4wmpIb;ITnKxOm=I14pe-GhBF%m{bqRoM`?mJt7 zdw-AVU9ZlvU}7is<2@COTjcV<3G~9W8DV#Y0592$+|~x2*(ce4XXn!kY=F+3`~ZDo zaB{4?{8=-wq`CGvcnw{C)l;&!&#_->Mbb-#si3cZlML1R`DJz-21GG$qcjVk0#+)G+RvDU*5$uQ2A?&yofZs z!QU}o4y*+xRl5272Nr8qGDgXv-a)64tV@SEA`IY3dmR`AjFGsVH)qVZYM`Gj2q>FO zMRT1BwJS04Zvz1H`td&P^xCCc2Qpsga)v3jbiw?9)dj;xQdJuCeTtwo_CFeX44h_h zkHHpJRu3oVaX( zXX*Go=aKNzRdDoH`8T(%;o}as5?Css|k|gy1K#^u(WmTdprqfYZ%=>N1+1T3?l=_QMZ&6`FIs(V{ zf6n^<-s8QcU8T>izo~cr(<77i{wmbJD-yeJu70d=uyARbYZYm?m7p6F-pS3?K>^Re z&o6g!>Z)_5r>|eoRl%BckbjUS{WRXk)F(0)Cj;yO=qZ`&E7iXc17=Tg(4^YyC&x{E zS2XrV;aS3y?Z=AYMKasIgSvqx9qBK7oKS0Wrb@cg#eYG?L#xlFq)Gy0VhOJ>4b)nf zjI#p*3-eqYYqEV)qRDR0GUV0`aTYmDn{PddPM?tR8qL<*>g`xmb`7L3>l93Z3h9sv z=U~nwm3Y{&|La<*2g%PVQP^T-r?J)@T&Ak_BO92UkK6ei6{4>*-y#w%t*wW^XbGIc zyZm#;38yU0x7o+Yxm}}1e>lD<6JJB#BYJ9J9~h+QvwrZiLW{XHLP=|$?1na%fM8e2 z_X`_Jx%mi}-g6qQf&IJ-74rRD;MPo>XkmZ*u0&xxCp2tXB2I$JQq$dqC^C zc{1I3qtOZ5Te_@kyIpA=|7?ik!C(EH3Vu_&J_K9v)CkM|_gwtX@2@ugnY02X>VG&P z-sPs~=G;<)eA}t(c=63oV9jf-szW>Vk3Fud1S5ER0%ciR=yEtdu=Rvhw>0b1Rk!r6 zykIVIfoVrYV!@y;a7tpfIKL8kP|{3IaWp%8sHdD#Zc(%2Sorikr3sD8N+!tWrPqjB ziiZo0XjFi#D)2fb28owM>L<;lZk<-Z`H_hGGu zZguOwEYM35m)a!mA6$Cr_}87qM`f^UxTG!4%J%l>wNVVzO5GqArNxsfQpzfZW?>x& zi{_2=))LdD;5uuFs-W+FIWXE!wgx~DD{-CXSB3e=C1o8R}B%K~X zRuax$;{i-!VkX>1zO^RpU5NPpm%s+)Fjt=;jXnWbOxl#U0)0!!tAt(L$jeF~VmcWz z6`^}R@cd*LohrF@{!}N6_xSa5cPr=9&A2PkCOjl0A&3@_J^zoBei`Cgvbxa$p_>IZ zDVCNcdmXXA-R}S-NGPz%>~^>XH$iKJJLy`DUyH%dEIu+#D}Stn`hI)cZu*>Gc-fEY z`VY>!@Dj3P-g7U*^13MsJkV`tzS~8>B2NjZ^J(biVw2hto=U~xaZ^MIqxE8r9Xr@j zRV2&?Sq8rCVtt)17PaZOH^&?sd98;TJN%0*?Z$#LZC1tMk9u#8w#0AZ9---dHmT_Y zE|sNpuvT*rbYCynpKH{+TX4hw@OLop?CcQbIo(_->;?*WEtaw8dhUo#3-u@d?6wM; z6$2v7EU55P*doHiHgQ~CEMM4gw!wh|@Pss0FTIXNQ4H$rldAQbwI`@P@;R_C7{UM6 z`?QB&zeuRvueVv$?6?^|9Lj*U=v~36pGDwMr{`a9n|V1bC1*RfsQ|ryDrf`tV5@xI zOYJyKy!sVg1JS$MjWwc1f`(e-fy>qXu!W#Q{`JirFZo)kcBS#Vz75nv{+Vm0Q;|_o zDM0wM{h?&Ngz$Mk#0&DyXEsC0wKlt1%)D~Xt*oRZea5@Sg6pZp^B-RqTF(#jJwT^dTchUEMdvrQGi3$oO~=5;2nH zW@g09pW((n+}+tt)1l)`(f2^?t#^4TC-WXB`%rqiF2n+~ z4eprt)y=ieuKlqrnEZlGDerJLujdvKKHJQUkz3)W+XR!!x&P3XReO`bfEQe<;{il2 zSJ=Y^LO%c{d!}`oxh79=@|>Md)pg-3Ti*H4t_4e2?99lGC!maaIR5Ham6A+J8B$8ubiS=rcRw$wK|%t;R6ucbBnG&61);>A`>R*`UgHvB#%>lTF>^}6QS zHzZJsHM`St%qam-Jwv}SI@T_3 zzl!P~p^qskRd5`(Wgr7;wWIIeSg#k7wkTbc4l`Yz?Bocg1zr1et!_c%ly@ z9;)<-P!b3{%{x_`JT-Tr?nCVI!WRIPclb~nHcz@b{uG^IiN{u!v-ii#?@n^a#?-wF+&#uzjnwxXAphW94=s zmPa#ww%Olls`7hJ^Ht74yx`K!in%mgRZ%hh&gYG#mu_J(-`wh%?Duqfs%OKAAR~X{ zG`U4-sV9R!)ozd{hGRzVqXv8VR{i%>l6dhs0oI*vuKK-)>4JuNA+@3m9EPo~n&Mmv z6LXzUHb`f6$Fu8XZY*Pk=-1}g+Wg}W_x^tU_6k}+_4e(jjOJ;#$5whlohpVI#Ls=( zGZYy}M{1*#0M< zg{AY>r3;m(;<0!*rmWC&@FY3uup{k-mp@sdR;#=t*1oeLIK;+CzOMFHFbtR!jYQo$ z)FJ7*IN|IGN$DGd^aO6viC0NTBlvcc=**I=y4fN^BjlAeQzxB>IL^s`RSJxf5H~%{ zC}}1BG_cw?-E?*i@=jdwsj)%;+HZZNhQ6vWdb z0KE4|YO=xErNHeVJ{||@aoRE-9Tp?Z%O~h7OHl-U z9uj#!;T)SmHMUW2pddFuga60GPMOCj+6pxR3dSVZ+3y|lI*!YL7zXCykKa=b^8IP@ z{jUlE4np_pf~;Uw=Aqfm)e{hofDYI|M(%ID-waLxES=I-bp6nsfPyv*FXOYm4|1=0 zFavrr2*#(Xwc8FSC3YS(C7ntnGFB-yd9J$0g$d{#7X^2b2PXb-gDkF72g;t_ zCr*IX<%>-F$$Twsl&&wXfc_%DEHz8HZ7g?hb2!^wB3F_;H0W;Vaup;Xmize4wTFbskN%PEiK$%wEi@ozJP$O z`dO=gkvQqgchy3;uGq-co)hQ5k|jTO71Eq7qwBDsbxNOYJL}BU=JUH8}DY;VG}H#Um~sI?}(>NG%&{aTP-Y6p$qNW(gq&URN1$O=kF#3oWL_*Ia`Dy&)b#aBj#b>d|lS94o_5$-+TnX7j+b$#6RDY}3|@zxFS zB8?!sXDaf6D9{GuOijW=H0>BqD? zUy-f4f7;GwzS4@v;r!Vj28e>^t$1V39eIZ}lp2pLyi0ht#Jq90rj$rtCjB{U&#I>8 z@%_-^m$6woH*fv1DNqc8Mn#)cE^h|fRu&q+luv~@Cwn2ZkJVkuLDz_JJ9rka*!KN)>LJ)`m$3iXn=8wIv~PocCh}l^-ju4Rw1Zp3 z|MJ=J(}$+9%)k%avtj)K=rh0=Ri14VWXb*(l$35c(1xGL?dHMBb zALUICJSf5&4dl7)(_a7sPL`U)6P1C>`Eh$0_RzgL32wdrf%8UX)+wFrCuZKdGOn$H6f3 zI;inJX5H$Aai&X??N5Dcbl)G9-Lg)g5-zeZT4yw$(ST+iTQDoCc_r+vjx+|_Atn~u z)BIdzj}{`S`&!OO*Jfvds?^54F(4IiRiKnn?<0i*I?4C;cC*>xS^lGq=Y|PTQJrT^LmOpI`$>Mkffm!4=m)z= zoy1|Qo=Rr9M|s)anzKk39Net7t$I|%bK?oS%d>Ehu=h^5nsU~a^R1xGC@xS~Bv-)# z>R)_fp9t4fV~WjZ=#GdVzet{zl>FuU!g>#J6wn=!FzGu#cb?ls!riddYRc^x>7CWK z7RV!y5EETEf0@FjN0I0x}EfXDcl90yyWI-6A}X zoH}+qr=#3+FD@vR_|WWdZvh3g!}3co_WFX?VioI4am2Njf{HDT*W$4iWmDlq{|}96 z_J!{c>fE+GZyx6w+*)MLUntL$3|&xe3R#h+74n4!i41}JHn`YFd;WH( zP(#p2Q-l3^eqLPN9v<4sr$6Y8I`QVP8M-LK$Y|K^_XKWPeLzBnJ!1tk#uDf&k zMbmR)^9f7A)wnUNV(upx&1q+Pby!rgTw4^p*=racZ`Bj0B$a&==KXFxa*TU#4X1u- zg8-SG3ClzjV2CcoW*L9S7t2T8_#gBkw4SJQyyP9 zEk*Eh+#-6!A-*^k$3xSd7Y>5H>tF7y`MVOPJ}db+XqdMrWw&>6eeO$oB|H7!wI5np zZ!fQ$95Yi7?m2Z7S&^*J5fI&8!Aw1Zu?gIz!(&GW?wx$Vd^R2`CUU4zmU-!Yrk2p( za&Bi?Q6&xHfTkti4%#6uDBU@IkCWji7s9ne*wap1na!tfGfJ}m865XEbFP?fNc@nX z_RiV!1IeONh^3!DCBZ6-8tk83Zgv)g6e1Q76(~{ zB!|dcLl@iq-2D^l_-t1RMe|VoFeq?ilQJ+qmC&wg_UJ{@`XC>Bczml;LSSTg zm9Va{=7>X$jacJtV$EodgnZW{fgb&JpB?A~m8fC%;ha{@ekQ*Z0zvlr_^@c-DMefN z2AP!Lrb|ETrc-&sTA$ivK-*%6Ma(DqA`&WDpFEgXOOXm~zWfAuR`0rb##S+laA=j` zInj#9)YmVHVvX0RkJvouC+IQXC{!u5r_C%hN?H;y@__!GNlQq1y(Q=bBDmzrj05HY zo_5m>=Gg9@^DaUdw#zQkuAVtv^jIy+KjhE%ICdP{IZ^_1_E;|T*~b*?kBkjBtFlG* z){5j7YrryL{UwaA=VQr>n4@w#jJLG_r!44m(iESSNF5nSK!9&q%nCzJQy%WVSglQ> z?|ZihDu!06vY=jyr+oI$rd69wWQ}s%K33s0eH-Vaa5;ELM;~etaP?SSE@+T+jXagI zw`2DgW0U_9!v>`|+;Q`wCfB$4(Ta@)xMH>jCFqInoh+<#K?C(n1L7W&@k){CK>v2A z98Sej(zvd5;M)jz5&uzbg}!RuZpCV(j zHyRxIjM3e|AgpsK*q^4f=}G3azBuRuuGUJV*akPqK?-X@MGa%|tyaPH3)&bm)tmUZ zF+^f#T-+(q+SmSo#C&n>3J;c2hFX*Gd(8WEyx0D*98!3~xI^>SHO~4P^t2XCLlzohMuu9?Is?UCix6hwE7}jH0$RAm1Bf zHz=JQ?yZW?zU(c*OU-wP9Y6XfZ0d3GxVCPAmI1r=rCcBN3T1kyKr@?By@bPd7Dms2 z&H2Z)U8G!Jd_2xv%|ffkxFE!X0pML*Wn@AZ_;EN{b=jsM$NafTK=dHbFZ3I<0@LWP z=e@iOGWUg8y62q&c=sN7Oc%N-%Vk9je7Bpnk?Y}%b<>*hEM*D{`q6D_9^;EY&H0MD z67Pl!!{^Pg#(-#US=H9abAMMHvb>`Tj87-$W??#Aq2$jzDee3)DujS6v+m|wK(Yt7 z9RA1Nif5Sr*Z-@fw)~ z-26&Ded015eJ|$A6O3$9n^$LizLWY{o(t}|Z-fF@%T@Wf`!#1QSYp&J`A^B?oJAstj^r74#o65;KDwlohgIDKvIwkVn zG&Oh;>Hlnfo`uy(3mP=L`X~qM6&-mYa1|Q<-c`dd=-{*8vC_ zQo7CLGHCd0*eDNdquT2HhcS>d?_Q(rA+DJ?XXoQ_*%q4Y0lQvX>$cklF+ZC3D`fZ3 z$*vYEDk_pwbLe7-Ia2RZgqPGhxK>p2Ni^^RoPsa}qv@YGYB$+Hkoy+7%1$7UQOJUp zs;k2Jtlx#j+iyvfC*X2SnTW5aRgCq%Xpe(!P3MrM03C6t`ZYF0#zM}t)WQs$n{fZD zod_0?7F|%N>Rm?pckk+3*%bWo81~ln&j_9~vgjyhigsrXDO|UXF!h&DAmrWK!i|os z`6GFyw()jC@rrCx#gK$&mIt#sj6MTPkxdx2F#`4*l^ZN=Ypu>f>ocw!gI4UHyR{{L zeaP{$ZmWP0#@=%Ej_A*g7-P7uYLf;p?{?!Xr7+e!PW%+P?frH&>uXy%E3MdA?kNV5 zXTbWn>n7r`zri6}bmiHcmp0cV(Au5}MxIJaoNOA_?jCLttt&9YV!Qt?jpQ2YNJwAN z=dI3T1VnY?L*9y*a`P_}7~*}v(=hc^O&78j4 zY;5eg?Z3%T^GAmyR_WDh(-L)oY&2|IP03NEg;L6(^Y0|M7qTh*8 zAGy46{B%PWV$h;);=3WJ@{}0#DczUjt{F~7`9TW|`M4U~COzMHvD96brEDV9ZIc?Q zu($cu`!)uoXO9A)f>aq?IkV0iFw7*7gwB=V?m+L6;&O4oe#457gv->`@6@9}Ixp+< zsJAA8k|rSkBJ^~H>ch9Kp<0o>!7Vn2#-}gsi##xn+3C1v-_8d1@?na`No0Qg3Pk_? zU`tow<`?Oh)!OZnjKHRoe%KjnEpqeQ;v;*+f|mz$+a>BOEkxo29tsvwZJu+SQo_rV z$%%w28f+J04@_%;p4asu9c@MjHA!_UCe)(7GGdp@qC|;q;xa#k9Ore|sT*uX-}-a> znxZ6*7-!NzS5Vp|jA2V?f;@n$cq8p<+Z9N1!dt{}&YS2~sObztgZUS{s}}ukPnUwq zo8q_>d*&lA!Q6I|U*<6N1@YEM9*@#twm{%2c9Z9Uq9Ak-KR(|#VPt5+HFVo-@I5)L zapVoolE*=x5<8{d7||Hl^Kq*RI~eZwtCQ5Yt05`-*~J-X-X2!*kZ4$)+R}S2+1}?n z8P~9=pzW_lce5b(>zrR39gwkJ`goAXRNGQNe!*Q6{l4@1FB9yo(QCYQe92ELbDk|2 z!3|Vp7Db_>#JHs)Cvt|F@;27f9d2sH$P3rbwWtv<8N?5^@=c07-zzRy>#0$oR{o0{74eiC>tg*KxIkIKN*r7~2XNVaTP;*P>>f1Uava)UD_;9T`9A zbq5)kOPk^qMMcAjBAwdKD}P`0xINC@0(6_b9|TpZkGq@P2OzAe7Q(8{WmJdQa{4mm zlTMU}uQ3xYD?~&d_=E&zXb%rZIZ_Up4}u!_t7tMk^!wL`GI_?;d$0 z4ODF^7_-tT=oE1UbE5LO%<)lf;?c$BkvCM&tc?d`JMJ&Pmj!nYajG^9-t*jHIyr(6 zA?aRHAydR9rw}USUF?03$13_(_!4qc4SLnNC3@#~xD#w*;`S_8H~2wRi3mA8KO|zn zM2WrVKE*_0z^D#TT!Ms{|&zw&TherGDE-v;m%qN=hj}z)@L*_mDCV)lPEP zrWB$DH4Q|Mu+DMGOnfCz_>`n-sSkD~?gn4Zxy-#mz%a2VM4r-fOB9fh;Z+>+J1t7d zs-(Um1FI#pUEz; z+Pb4fEgon@^2s^x%#`fgCRuJ`1RKjE-t#@?Js;W{3ly9H{~pHJ47YfU0{jJ@U~%Ou zp3RZ2r#-A@DF_vK zXvSpz0T=f8!)Sy{%n1W?S><+ua|z#%2dNgDn} zAK3AG7vfJQE3H_8m}t1=NBozb^S!e2ANDY4?k*D=5Y06mUPKbQ=+X^e20X+PsXeU@ z=L}!!LsbeAAK`(h8y-E*Nlh)Gqcrs%4T%_^RCw}J7H++${zQ6KHpU@iij! zu0?EgySC#c72z{y5xlQ53HDD?P?I9v-q$U)8MvuLwAYLri-P2OFeKK$#k3d=+i4vs}1}Ji~Qr9Bcrakz@%c+jnmwab8Wx6OV#yfyPQ;oXO^({ zIX2aNZI@JFt5ucmm0xh^ace_pF|Y&xpq1Un#F6^j`4neQnX_G-e6v`z6r$dbcF>YqwvF=5A?YFCcTdjuB6g&^G)@)MAWKLy&g|TeQtjjmYM#bwS@zPqmoG}<3W*j0bTNLZQ zTElix0~1LU28lS4;Ax&z4s8-4%r$>x}!3|vHUxg==Au9 zgqFu^yHH$W_VmUU^#pnk!~FPQb9yFzV|#U50o^}~Oa~tv&|}$Srl^P~oLbJOyD>dJ z22eI2gIM@f*p>a#p>S2b=jJRR)3o@u=Z5~zIVxV>D_Pk}8if0m0;SicU%y>XtRU26 z6a?wL;CZ#s{f|oeRq31W=;rMnd!oc^5`D?&j%YUIP3Sy9`Z`e)k8(S~qr_-@;678` z&gdJ~$JO2-pN3hfCs-u*%Xn}E<ju-+l;wL&f(+ABItQau%FvaYj3EyVFBD*-3F#}9O7^F$j=KRFq`VEtCfRBthw=FB3d|M$neGDrZl zbqQYRzhx*sidn^+k4#|G7tquD!Nb|?*xjN?twniBMWI6KZ{Zp?9ehu6-YMv&Je=Yr z0!0!{(j`pbsh;sm1!F>I)}tqhC{@9?u)fYa<>Yj0-veyIGc1Lw=r^NrY5SeX1IR{= z->0e=nh;1NR@H;8oQ^-awoOmif|s$5Q*kJY zNbRDc;xt@7;9QlS1v5UA&;58!;=u1PYlPUqC4aL2v9U2% zprg3fmHU#>(3H1>t26jW5t%jR7K&M(2vDK-A($~*Z;gN~;z zF+r19Qs{eLeLe3?`9^yHO%0--WkALfej4n}K~Jw0mV@EwUyjhraAa{@GYAV)>-g%O zH`H(UFfH{-Pl3S4HFg;!Enj#0qp<aD{*tQJ4(paM;mTKa*scJqi6SJgG&EU>fp}r zCM7&4fw35oSJ(lQw|5NNUWavTiaq> z-U+G{^uBhtH}y&Y7OL}e@q|c88#jR6CIu5q`DVk*e-zs_r~o-iyyh%tDAjU`NX*Zu zEutU9==<`F8jocwicY(6`cuO`$xQ#@=335wi=kv|?W_=6tEbyf zfq~nxl4_86h^GsG^Em2M^XE6cHtcn>A}zW!+K1gUW9U2Yi^zvB z_@->n^nT(LX4A6O?O!n@Hd)01auf~D$0Y2g^IoW^c0QKKu*4}b(geDI$g>Ym9W^y8 z5jX1XjfeuuS3SQwBeU&NET}Q{Y^~gW!o_Hdd4_{q zV=nJD_6E%ayO8_J>c=uKeC|El{UGD9K}$x=C`2T(0-(AFy>OSc8Eh64siT1buCZa zht~MIBwq%9!_*&%kbV`ifdFknv`qBe(A{?zTZZ5Oa zUO9^66e7FKx|`+;+fIKNYpXBgp|r&P6hKZ5kn4Jd%PBM=rsaIif~z4yLGp`AL^+Rc zy`9Uh4?7%mW|2!sN*&@G5Ucbd+|}x&*rSS%+%Z(r`HJmF&pp<%&XbMWo%g#hQ$7t& z5inIeprln|@l*DAQxNrY4CO)i_IozgNV^YJOh)u-qR8ZV_wE;qqE@tRxDON?7yUEB z9j&B3{FKx!uN$X8$CY^<KS4q}P4TPvQ$T6A!d@|1>m6PP;ZWLU!Xa(; z)h&QB6Z-~w(OQ$S)2RG>CNF5^c`hzum|i?X{Q8*xO7QBp(b5e}tYD|#a-S0FkNcf_ z*~pNFDPZ3e(%jamR@=9A_Ol0rHR|7ps@RVx=r5T*`g}U0mNRP$ljYy&(`|8wuDIaO zHLHLTR3fS~8NEMCela9(ZQ(GaV^e3XcFZ~9?3>r&Y=yZ5b!0s*{~2^j@0Z-nOsFI) zp%h_ygd6q~Ot-CG?#A&`Eg0Ff-exLZDNQA=hY|~FA!gr>BzBhq{@Y|E4}?xRIwCAD zizSW+0>RmsU{j|NsJLt+ecJz``7L!537yFsJkk62b8haL#XRbn z?+le5IU?vm!Bz*;Qr(rM21>&-pL9EzLm zW4ikvAroooOF;f2OQ><@B0bt!A-jXaY{gNAwbr1os)?%YmCpT>(5)L^7tGg7k&j zFo=X>Ud>~EM9twsv_FJRXWn+ixiKx=6hm?Ef9$8P@0_%ivZrri)8bq3i<~K!`yuEL+~Y0k zt{_a8g}AtVwKe9ZBm2w3M=JluRt{Ri^Z5sgo!M(-1V?Ip5?z98Y*jm}Uc*M>4#qAn zGj+XRLt|vwn!E?3p1`rLM4?xiSm$ARun1n@pGvv25Qq{J5N-;RQA z%{}mhW(sPI4V(M`_)`LcgOhrD%FSuqD`KUCKWa4jBB`HAI>h3|W3}R+3WFSxP(thD z_5|!2>TZN#gYV|&n?f;=;;Z=zoBH#8k9kzv2mRE&^6-5UtW7{eR>Re&ybPERC@!|J z8nMpr2MMIKb6(Iy0M@pRG@G^GT*O=&4u6x@P)s*QWfgppqC`s}Y<_z(Eo*G8T5}y( z40V1xat9xHg}VLd{Aon&>ZpBhj|Quf)8$GR6VHxtgjHH*0?49JcxRAM zrbQ%vo^LRcrHO})2Fytl$)-NLi zeg7YWXIpuJ#>lI!KKJkYryOEBbZCB1II{=t;cnHZepZeSdRQPFg|Zsn*NH4Syt4yg zpE+SjW3lSKn82*~`2Oc+&y|8uxjvr2y*n61diB1t&;JQ+bk^pqCJ{RQMUwu8MCAV> z?mgq0>bmwpZWU3H;;n#E6|6{6=^aIiC`BQ3h=_DT4?PJM5K+KFQ96ip2)&c22q;J| z0YUj9_VJ1&8DG;z=_dpN;JV?T3a6~FG>J>wVJy# zLJRw#N`O3+1YBzIzdr0X(2Bq4eYkW{>WJf3e@5+g+lR9e)ZL;U8QvTJPSln={vC90 zRcd@wcJ!~zasF59Q7aeX4LL=bd;0GI*1)Gn zVrOU)-N#PF$?*3rE4B7<4IKMZS|IQ#htIYC9+NX8FkHyDBCMh(?`< zzA@&BhKbo%Zb&8_<`I?zV%(B}lwdE^@6F|bJJyXl^fik<>)S@jwCkU`tUn;#pN0H& zj_2GfolGT*?nA{iZlQtD8EZ3r^M$!^(LAC2f%Wy_8&9t~w>O0eh!0%^n576d0X82H zYRA%^?;H|1hAG_PyLoEK=m)4Lg|A2Rx9H;ss6Yd8m*}Ae*}U`s`qc#9n_3ss1o^=A8ooO_j}e>srszrDxo>yUptTFM^-$uRe9&{&Ou|Hxe%u4m0^%*xkc)Tb zQLUqz=O-2}^t;6IoC2b2FN**fwwS0~-@u%VU`qY!I&EA7`O^XbyWbCsb=-a>BF`Pz+W|QkAbVS%uAkt) z6n#+ikzUUOw^(;Z^BS3}+-wn|tqo>fvG2fxVrB3FWjd5&7Pg?O+}v+4j_Z!PV=o8H^3KY(;mTbLBlD zFY)2-_0jHO7frbz*X?2%@4gl5?WgepzSmRtb^;sC;%Ltox1T_-XK`3f?4DNij|+J& zB$JL59AQauzvvPSRg{d`kiOWE!{eB2L@4G1YdgA$m#bZqnS;!ml~KSQvZ z%ku@IJp(CqZ4PSb;1Fn2K*;H)$Yna;k5Du%nlalw^Hu{D{AnLGA0MhU=fNU}qLb}? zGo*DGKc59+v{Zg}pcyY?AshEUx=8mninoE`6hck}_@!x-1Hc%-B0Qt1@$#?W0B)?7 zZ^Hk?CfXD3A%6mQhHCFE8E!-MukGx7Q9(-rd`_SKMP-FJ+Sl{_#I={-L*bMst#>bq zv3AMmAKUgtUPgKGv$auNGy3XpoeD%&LY4QAIy$~;?+junn!c@D5j;g}I&)##Jw+!N zBtG;zPZiVTB)7h8(v=Kq+(Gn4MP$9hdMgc12?^!pTc)kU{Mwo6)7N0{H0g}@5tRIfm3sh?$+CGAsyY+MyfaQBwNJaBgbruK+>h(p_jl&kO-u~Gd8w_+ zi>U#e-z}<#{S77Cj|RPC}v)J}HG8W60dZ}j}7zJozr zM+H{77a(n*d_1}VpvH^ex_(mmP2kfHT>Rh9I^CS;qMe=a8Z9n~=K+8&O1Yo=*VqGW z*P^ObW}HPo@3I@cNSfzADQD1csGDr z|6I>0N7UplY9Z)OaK*_87erKk)I4-sdUQa!V#><~1PJ*JD?ao%-UjBw2<-^<8g`s>V8}xgRuGrhw1O+-|1m4oWh>9~A)!U4Y zGMKi`zGp3=^xVs)iIt6XkeV&9-%sZ^=oW_p zESUn$N=6i!olh0{%=~f-t2+mEJ)V_Grmc{vTt{l-0;8|~hV$H6{p+nc0Cx45F;RBm z;zd)!#qirK`ev2h3sY%d2(gFjx0K|Ov%e%GzDBd2O^2rOcqUv+swyAVc>Yj#O6v5z zd-sMJE+_tMoHVTOyM!xIiRQ?d8C9;${2}7ckerw}tSlU=wz>V~W|D4w zUaK1zwRPsUU#7%4&x1U^1uyu-0x^PiHAbmHdb&ShQX?sbt4C)gRs6s3%O;68qKB_F zN|U@-j{3C+xqB8nZE^=(;&$6*@o7hx#WdB~GI%A+vb7?bH%}HEAbMYPB24|slLJ5c zY(fZW@lU3HmFUqq=M(Voj}!6kT~U0P;~Q|UYrLAhWajLLp3|qFey{ho`uRL! z8*^XB`Y;VQjr8=2nrnHoKK{qv%B{h`37ghnP5)ciPQ8s95Dp zC_DiNdvOD{wtW2x^%%~7L_{g@b=gR>dlPf$@X5D{HGQJ>Nag|^h@8HJF#{4eumF(1 zJ|@vu?!@65n-6UUMt8%{)rdjVppYK`Kknzdp#nVCf>U?*&eqkVf^x<3Dn3UwpB#p< z3YsIY&^{WF;H&7|r<*rThu}k950>T$7n=663upo!vpwe)4xV!Xbv;bD%k^(;;*>{Z z6fUCH4^!C=gk7(_ z=E~1^0QyMbpS)Ac&rJXxy~@Bx3*1*tZB43unRfLGg)Tm)yFQyZ`@UTRLvN3_x0|GD zp>K}2w>pNvtHC!CZ9eF96dzmBkEuD!EYOVn6U`tFitlzjaRd@?0JhcT%-Z?amHqVo-D*@6bd=Sj$MP2k?cOsnw@2H*_vi6R`@7)M zN<>Lk+P|Yw7rxG*0!FzJB>HYNvji?!|gB zg|D!`v2_zaW74aB+4&&jr97>llAbp3;d!0r}UUsuK?k6K!ia{ z{20I1hVAXhrZk1W5WbGQPgmTK)WE=~=`vqkMRBsGd>j^u>&F{)bempal`H+5+Wwkf zdrs<%IT5fA4GKDC==6@I8gcZRuKR4o!V2f)~ZDtk*%TR(v-GsuCJU2F5hYk z2@Zd0RFiPmyI-jKwCSVSAo_e8Fb4MO7TB( zK7QYR@FHny;Y8>@jcV`pJk@639_<5V^OZ8GEzEkI4x$<7k4iDUFL!X;DR(#H*86XF zNFuDtZ6pD@)v-NY@wrn!CwhK&a&J!x5AmU&ne(`tPaRzBdHSrkCHgJt$Z#>)(Xshf zt|1&6v~=&h?BYr>`gwfk*J|fa2I=miBl+P$EBN{s4c$jFceE;xJ`tvf-2Z#7;A8E6 zh9>D#|KHk~HDAf%Mp$|J$gpLU=@0uUArjwo2EYcoFf|MFy-J8UePk1-=7&S+bHV|p z?a|Gj?=vP6T-jkihj%KoJQZSGS&Fjo=D}lwqPwNQM`+K%^Yz4 zm;%QO?Vgw`ISxXQ!}(!{%BJ~V@Q9TxG*uPbG_+`HuaIPMEmWW}A6JJyr7eyC!Algt z&p`@8KjTPgx!Zg}-gAWi0%_vDG`yVW$W?LCQ<4A^^;nBu_<0%m-x`}78|O27iAi)v zFJnC-c;~pEN7C*up30nqLFf+^5Sege=6S6XNB5&|2q%8KPd~4R#~va)J>GNdcWX}# zq}82KZb-|vv$oZ}iR;LGQS-B1AmB1gVhXYOdoWM0Qk(c-<01{bxPIkemLvE{B>#f& z+kKYP`nBU{G#&JNMy)d+T?x8t%=ecg1+grL|(&vB+TogGTA_jc%1u=l^AfNha)&rpWs0cUDu(6V5d z(h_$c+bx^>^rVZufI7Fp*IYC^R&MC=)kS z@2yhd(0nUrfbpwu>({RkxnNb3%m*vkmS&qV5PrXc?9>9j?6!2`dY}{iqWOB2!QoaJ6bjhCW(*&|t znF=QqbMBPaCzP=+*=^*199+o+foI zEYqvA7SMS<8F84w4;g^R(fx8hb&Au!Z>`dZX z-*`Cpce^j^_I~ZPZ?bv7o;Pv4m1z5>_?!lZ$p}v=rOD=n+BVldX?_wwNgwlNt?ezS zx{kk0_G-g%zBL^i&Y0c3o%eigu3;6@SEZ+2u=5QNY=1in4&w?#o|}ZEE%s-%&q(-- zdkcQ3_OcgDuBHvvxZWje1^dHlpaeG>F0~>HE35ut6gUnODeABz^QM`_&s`U z9hH$Xx$ZEJbBf&IlY3gh%XfU6tIpl7c$yUx=cDRXmhAS|ZLIi2lpoQ=oA43np4Dvl z)JJu~uPw_z6+wck4Xd9T`QZ2a8*q_B582{7;DTU17qXf_{_@tLq3 zv3i-~Zd*XUj)EgU<`%et*M5TVI>4O}jBL+u8}6F{Qs`Syb>ZGK;dY!L0(uHnN37$l z-J_hzwZ${wD+^21yoRT6-mXy$IN+-` z-KS=uz&go^R6F)<9FS~!%BN`drvu=W2w;wMj8|G+^gCLCM_q28hQ?!v` zt7K}w2+81=kXTx@W}^<`8hT=>jjn%jxnVoL4E7iUmTOZ&( zPQ<*|irfz1>T8hF30^cBBmFJC(0x3D(}z$s9hSe|M6Zl`qu6EFZTCS4XW$o zGOG%Q&zH*jRpz%=o(zJFa)<3~JwJ-LK|8uAaz7sQ-f|M`s*=xG>dTQ5S?HXY2}6`t z*sC<_J^_{29J>`#?dhX>ll#DAkj>dV+bv7lCy0n#{{bn%-?j3kw9Z)Gx_Q$wW0wJ! zJOTRV)VFxS-0M_imQ;f?K}OLn1u8SWzOv2d$SKL~KcOZ+_PohBTrj$J_o3dTZP>-q zmpq_}VVo97$aWF`@x&~~C%Te? zr+18<0r<1^=OZ%Pa*A<^E9t0?=Bs?J^VOjLnE*!ByZuU{Wm87Ti78S~wXKKw0_ ziYIP0%F&}5DB}&2z1A{V0?V#}H3d^1keS95LSwN8b`#^NUwe9v(ON3fDuEp$ zbU=ZiS42A09_}+)#Zd=?BW)Y)B;Y&Q+zC@#b|8Z?ULP!%y{}J(Z*Lu>s64PBCW^S1 zhF|)1DF;5=@VN`02D((A5C>9km>ZS#@tCt}1Vayy^dzM1St54p^5-j{iyM>PoE)NW zh{=yDv?4B4jwmS5TIa#wkXg@#*N_+TY8N`iWXdTazlqk7Ps9puQwZ1rqUMCWNO?u2 zcV&9<(50MGE=16bFZ%Zx_2B1?QXgk$raHB+rODc_nz~H{VUdDs7+tFONsXV6HO^YW zr+VxUsy6NUTCWvE)Go?8GzW5Ax~OqSKrqW`SEQL>oyK+NqE@)`@wWGTmFp$48 zzCC>!p+l{9%R8Xq=QPiB|EqaV4x`LiwRJc3cdiL7Y7d{7;kMB~&n-8>kSkr{BNN;5 z+ugXOY}W(9W0NvJ9?o%>y>?ytdo@;&?|TE6pknO`O(?%O%Y6(V+p##gQOX;7R&cMz zJOe~7B{!equCZ}#t}c2VbWW7pLzFjzCV1-QtE5n7P}Q}buM;iaFweQlsW-pA+HmxJ z3m9ipRU)lQ%3SsCUp)UklIghmO@ZenC&I_Jj+T@0-jNK6<+&0G?Au}3?kFu9vZ$Tt z$b4a={Xnp*J}69iMpmW)O?;VkvMxH8^Dx9_X|I@QfpxOUjQ}S>db70}VcNEr`x|IH zPhtd#P^`LV-uRxcJFz`eof@2=?)jRvF+C#pn&Mb9V9R~XJ~s?8rY>9VnRlHVVVm2a zJ*_NxaSEGw(L0iAC1ijVqWjbvH&dG^xL8D*4}Rj!Py0r@i6(f!<&2_G)&M)A7hcWU z`9T)`g(`?qBpT9`WkO-Jm9^Tzz3yXs6Y#O@84RZ8++4ecS>qlZdqV3pcIBS&J*#G# zvz_JbX6IRRe#Sv_@FDj8)KWZjh>gb)n8vU(HHz_5fh&=J;O|M>}g8;tw+4ga~Ad*a*<*MEKj-*lyJt@HhlE71nef%^YP zNjL)NM*pJ-pLn{jns7vSSXs=A*~A8V90h*OE(2_&buJ0QWR44pioejD`~PBS=3!Gz zH!V1K^p1Dd~QRbr6CT+LbM{owf*3=;`eNIXV&>4*)fN0pP)p z-hY45qzpW)p5jS+2*i}N3|ylda+L$yzIK%=x6-Y=4X z_f<4E)YwUT^QL&`I}j_Hyd%qDDtb=tyTzNdB7lt?P5km8T?zEQ*drw{UsQF@=FI@g zp%>qP!}?vO0<~yr-nw30O?suaUx%ulal6aAy;zo)e)spwH*WO&S3gv?l-kAU@0ONm zM_ktcx{v>RV#xZyZ3F$5RV@xvO@S1FN?}Z%$MAVOXXoQHM>FGkdV4MJ-%NS9|Iqys z?eYX@>7i;~UoU;kac1n|EbSFi0t^Ky9T+SM6>#;5986$Wl=dJ#mnNTDj* z%~9aOUnzE#${V6;KJ|7U+F|QDOpR2@w5vI<@d%_8<>HO{@$I3)}{kLX50wbqg z*yHejR{(sf9%AlM*O6k{(!knHEQx=&>6j*cMY5&jhJhoJjtG6#LLU*Wxno9X_f*)% z_wucrLa$8FZNTzZ-;~zWM|~AV?>mP6yUey40*l%Ixa`R2QI}vhqATOf-ik%g;~18t zdS(SV>*R;h0Gr}VerkF}2|Cs~u*$bqj}HC!&A{i)b24)0JZlhQ9D~u{ySlFE$Pb{$ ze$+`6^_m)67u3`*OIXyGX*)G5%R3Y(yU1I1?_7>KHGuKXfozf$Oq6z>W(1G$Au*jx znMDD5Emto{-z~FV3`cn0fOdVCSbp7)8uC@rC~3MwKfUZs6osU@UoWAWf-xPM2* z9!pWl;tbcSAWYb?i00O{k7Aus#I#~kOdi{2}# zUuqBU3}{6)-B7f6r92KHd`^Z0)v0YAa72s{Now6ZcdU26$=A0`FgHlQ4|{y$30*Gq zmctw(*VkXk`K0E(x0;_JKX&CA))*{1dKmHGAAP~^U{_cVj11~c-S|f(7L-HxAb?X_ zS~IRZ+!N>`G3?Erxz_3V(di5grnRENb$moV8(}1sap$U&aGnTzQ6;q10nOZ%U0tdt zm-ufev@97S#v~I)nDR4aUthAs>Nnk1zrG@mz91QxIc^iUg7w&iBr8XOjG~pd8Ke;n ztH8maf6pcI#?rv(BB^8ay5yj^IjJ2*4o4yz_(HU3@z(7a=51WV0hk&zPu=8n;bWBw z`ayQXCExFkdn0B8Rt9x5vta|2RmGL3wPOJ}TgU(Lo`+t+VUar+UI{(s z0S=T#z{m7;bf`yh3_Z}zYnA(nTUlmk2L!`)3xQo)UZGbG?#}m*C74B(_X*&0psfxn z4sIiTqwi_B!WqbM*Wewh&Ua3<@2YeVVj-3{n}Boj7|Gz?Zoe<< zNN*_#B^V+i^|sbnjpE}8=vsc~CpA*`E9X@pVj?qxT8)~;*4p)+>~aa_hD-Ar*ukE1 zW}#3u&L~B8ugsS*nL|mWMU8*~GQtSYJQ;dXU4Lsfxrcjfpj5!Bd~Ecm%JR>~wxtYu zwNv=|F|BRan-7Km_csRih-YVk9e$$dy{T|TD>=Pff;D>)ePQ;`d$S=4_Igvl2KjIF z!WU`dp3qhsBe~`Q&X%pbNN898a3p7sEfsj?wsU(;SL>Oy5)zZk;}2)3$76%X58v&b zR?>ywTq=js24sRwH2T^Mt#hEUm8gxI?ei*Q(heGiuNZ{>UomzTkE|sH>apu+dSmzk znz8PzJ9o)D8DcFdDL}R>uuymJ(VAWzpe%J(dKU5NI5uHt{wZ?T_y9%Yi+1~F4|;o} zTVna$-O5qfMn;mihz2zn7ete<4;(YrVyr)_Bz{TtTz!6By|#a$?yC5Bn%aZgWm>T~ zh_Y!TgC)>Tb<4dmn*(RY!LP3zfSOUs$fRwzC*8fh)=+2)2{9|Qx7qt~ z6%!h5g>)*(VIg$X8H3=8{7qe5WXS31>iN+DX?n`9q4(C=Wg=vlIRd_Rrd=WQW6w9p z(+InCvErJv2`lDaYpQzeO8(UWE86zdlTdWzs|LjtD7RYg`P6y>+#=^ZLW*cRv(!dXTwjue34Mx?QA$Lt8JCT5Cu;3ibV!-+2a8 zXyDxA=&|5V7)kDJ2I{oQLjDzXssq*Kgg>BWod$ z{YH&VP7SCi5-r{RJ#4)>)qYk!BKo2>yMWZP%Zm$`8HrfIq+PXGQ0^oxRt~w*RRTWv ze%P!NXEVjrJ8{)k2sq1SAq*p_?og@IzSAZSoIbIWL$`~LevDri_wKAidzEHTRc*Ga z#Np77B8wB>;1*6l{hz7^z}KjOfMp>Osl z0W7mn{r7NWsm0dilBm{Hd52c)y`m4t$0NSVH*ay&CtMv&Z|+T= zZ~VvNiJlEnlxD^3u@7)WlWqEI)Y1)?|7mZDO8(oB`cJR@uPC$sdCLE?W%r*){eRma zBO^P)FJZPiNA6#qLy{m{Z_Hj}q5}}A_Aa9^jBT(d3p9*w<>XYEx#mOjgPlKrURwM5 z{k*($|7GBPOcyR`s?ya6SzEW~l>-pYuk@u~QCFYx+aHu@DQjEXlD26F0U?21S;I>3 zj5w>lV_!`WF8(COxC@zl0334QKY!osg~F;2t6HFhlJ%-d?B1)`z z``0}PuB8(V<*z=oI$lyD$;{9Ys2pPmXfQ-(KC6BXB^My6iaicL}l zwU4xgrZzf^;1Tv^HB5S@YC8_szw~ob51n7c1GbdsDxs6eFT;O!ov@{6+h= zp-;yJ)-ec|~N}q17&8q}=&!7IT z!3!C26wA;rIqU5trJ1N12mtW6toFQ&kB<**1U${byC-pCh7{CKaJ6(BJSllCKM(a;-o{D&@p4l(yZIRV0E^U=+pWZ)GM&w;` zVt?~1M^>jZl3JCnImb9<_&+P$sh|iI1%zsrh!}pp^3Ct)WlXAk&afU0UEWEertIOa zo*a|ScX*W}fy`-J7XFJv^_*DiEEuMfCi3h~#N*XYuN-!ac*~wIY^`zJaIfSf)QYYO z`N)iO${?tSS4XTVIMNF6s*RrzE{J1FbiR{{%}##HMzj&2!=jmVxu=Mq^Op!u$_%Ap z1Ml;8Uu_Tjk%w!!c{emY4hm>?pB-L~EL`pX8em7+v%!TN5PuGYW~6ulk)-&K)I#;p zwetR~)|L7adGF46SJo-up{jzykJgj&;D2$@)3nU|+SJC-Z55M*W1pWVs?!^Os$+8s z@KZ^nw(cf_lSSKGA}Ea&jF#vTp=6(NMq|$4L7M4!a^sTc-8%&jxXgyLR6=P*QKo#F zo5}bv);TR*K!8WPmzgBec`=NheRfM-3$-;e+=zbQ?5{9arR}~Ygx^d|s4grm>;l5^(SvvK@vCZ9 zx3JYMVKyTAzPtC>L2Q?FttD>qX0tXtvVJO2q-^_pnCBMvURZ)od-dFt$U196U8?b` ztC&==3oNlGc|t9r;>B-acG5K-Sty)Me)9S)$N*pFp8%g9FXTQNZ?Uu+9@PehFq@0o z4ZznPROE+v{N(oY!1sI3TcW~W7&A+#vU)SE&iU%yLSC%oSI`0OSIPC^S*gFW3o)2_ z%+CFfUV=iO{n#3g_NR&#tH*l!aiyB<0GzkqOjmoQmV9J8mr9*JU%fXGm-BbYH%Pd; zx-H;iL`70?LI7~C<@3-h^732uynhC@(+=j8v6f)%+H3;bwvC{mt^bx2n+y)U#crlH zqHDatuDM-cyxvb&k?mb?iDFXaeH<(UCvjxiY-8U8{qdZ5$ZsQFlblVrshK=`$?^ zCV3L9tQlzSI>8zhr@tQVsNZs2crrC-fVgFKrGixbPDlTm*t1cXJY8MTLt{0Nc3SOj&KOoB zCm7Mq$E9Yf`3I#A?>?KCm)Ek+xQn~N>dIi>H!(@DAHUm9`D93Q!Uu#m1FUIeGeFOZ z{v@leZqV7OPtnAyHl=#X?z=~Ug_Qfmi(ijf3Ac5hVLhMm_^r4yRHCl&i8RPf7q3J< z8sUmyByuTMcwh#(p#reoXt{RwL9$(ZKWpVvi|=<50$jJOW`ZJIK@_7M^$hMsItmB* zb~}zty%9mTfCl6!e&o+NBnClGILj_)PLe8~{s@e0AFrgH?WCp`|I}%Jfcty8FS?r+u zEd=DqLFB;qL`)XnOyQ8M6!Y*E_i@XLHSM`C9X`ao5U@Lv=C!JTq*k*ZBO#k$(&(4- z^aB)LKPTBhiz$9DK1YoQB}1e_i|Q3_E&7XAN;+D;G3ZAn4V-Eaw$%_~=O-=$Uyx(| z+5O`3al(Q;E~Rt26+15$HNy``jf9`q#a3yrt_q5yG9<<`<<(4P#V#!$&&!h3_AjYwk6#>ye&~+(e-*{6vM7? z?M*8NB3P%|T;BtX)KH=l=E{}pwg@KHMOsdd3aGV!hyUch??|{>mD21{WAHf4I%sLd zh5c%8+nYJJ7r*xWXI9G+{>AoGzF546nw1KRoZ_2~%x=C*W4JiY9{Rc0*zQRh{o{`E zr$JkH@0-+|RKw4hW!$i(X;Nzz{jOO1(5@=&W>@>BJKj)q^T`x!XD0)=^$^*9YWm+T z1P0+E>DYm_&d7?pDv|6i&)I^dV0OVPrM5oOrLVk?7lc}4GmbcD4@%Pvdg@)7fhq}E zhuY|J8c!^XL1em~APRj;Us+}|i*NNE6Rpa@VS4SwR`VK+1%8t!OFcu!UfPar%*k6m zp3n-Z=|?hOSJ2!8GW*e2*JG?YQ^O+e;9@)Pt`J~YM9WSB6L=`TQK^$2N!v4? zA7z!dD(wzWg{R~Z;AeKeRZwA?n~Y!q>KUXq6dkQD$(3%6-y-@MxzTT|^EEaPg2(H) zg3~1SEYdV6g9UGmd1lIYk=myw+!<5$f_A(^(9r3MCr?U3yb!C0I^|$k>{6uhNclj= z?@{7Y5$H<<`VaA{X^s4JhzDy6uU*iH*!@8#SUPu9<3chXFiL#ZYJ6G6wDIi;+_&PL#ZrGNA9v*yxjg?R$+_^{qC3_n2SM z_r{pkwD+mnMO?n=Yu0sdYn+alGI%Ubkv!9vzQ|-sAPJjOMyAs zbvuEuz%&`s)S}O8D0N~(cNvry!B`0$^YPR^Y4hGM!G^Q*X#8icLe2ac=8kW>V#mMc zsveRP&ha)~?K_=)%T+eLsG`2rptFHpXJo5OUr}Ut5;a@Jl)bVz1T~z`DMH7ulYA;Ac?4JG48`JPOn2&K`!)wEqyd!h2&e zOC!qR2`A31I?=_(XHAW3X)P<<%q@LCiD4`{le%1y&X|47{KLnd2BqvDY)mBm7}Q;8 z$shirhO$#mhUB22^0*Yzi|@Qpv~ranJGwB8UT-1JKxI2V7||{i~n+JeBD2Je8`Sh z_8yBD(2%#CTregDekQ?#Yr1wXwOZDhzA9=jKLcVaGN$jO2QzUkY>bp^kq58WZ5`K% zd7tz~@6*REwlhi1Ec)Kh4tes1w>_EC=Hy#<2Vox5V{ zL2>*=PcNOFN4VWTK`mROs)&|@L6_bhE;6#$9?T)lD1Gk$NP*uKJ1kTeMUdGIS!qw4 z@mpcEg8J%{J{%lpXU%oeRJ>V;P#*Fskz+YV-%82c*n~T(Yn)yic8`zt8Pr)Szimh4 zYTn)a3%5*+oL0DC{&pwsF>-JTiO4Pg1s4u2p{e`AbkVKF-G2cT!T0HV(!YZPfq7Gc zw6yt+8&Z@qzxi0V@BySnYo@zYz_tK4z^(9yh;=s6b5y8BIP^J3L8l{+*^2nG&;pV-$8Tx5UZ^d5Q9+Sxn%q zj+q%_0GDpHR4=xwhJ8jIyP0@%wLkTfteTqbK0y38za{TpLHd<(6aXk2Sz1y#-Zav` zZV$A9e{KIjmFhqVVai)$iR(Arb3jWs6gzl?{yq`qa{aU0L3Cnwg-5y!yzPLi-${Bwp zuUYg{XwCifBcKm+6ElauB;9@V-bZ!X(TQ2h6XTu6wcXrnBT#w`!_wMhPE7%W$-8m- zA^7Ry=|3Dx@j=Fnr46uO#b}5a?t0GOHxMCad15_I=UZub=>ny4&O=%CPtMKqmgZ~vdllcwq`R(Me|y~tGl5h?ilga4rz%G3 z48g-ubbT+cMG`s_XR#J$f(kExP_I{;)1+rXWq=?lOmLnX`)efoBsHL!c~s>L;5cdP z*mFp<2n4fB<&}R)yz}l3r1`b$ykf4~Jl_6_QpYw1`{gWr=~}&nNd!f#@%AC$HNI`% zB7Oa4mWPYb6*CxT7UAmN<`Wn?>8OfFyh+sz?ZgLp!%ldPlwP&T+RT)^j<e+4%2n~EyUyVIS0b&9w*z-Xq!eR^=SJB#SL`3 z$a7XeMLcYV7Z|4XUpckoSxfhbk<|uV>6w`V%BLzrZL>=rW5p^b@}OX>R`)}61FD;Y zE^xd1ESIWvrYK~X7>Xv}<(3h9*p>UkUS+Qq94P5Gno4@BQdU#_R z+NTfQdMY0KqW@IR*`SB;=|*DFs6h@LH;aKzvc8HX?4Q3Gp#o&jRW zCNB)sEm*}~HpQ8Wqp|$L)l2x2pN&o>0T5onzX3Pjp#YnlUe8s9N)rD)yn7J)%NrAd z)ghE+8O?peaHgf_#0f{km3J*Cd9%x}AW;>F>Cn$2C}ZzhoKST23Ujp2R|5xyBNf?u ztl$Un?JpP$5#-L5p3Uxvc!&TPjqCuHB^FxVMWh?}Ll&|q6#WMTLUJ8#6(;GeBr#1lt4;g>Aq%)WtDL%9oMXujYFXlp)RYY8OoGGt{ z6(8qmgVT&pA4F!X#r_ZuE9QJh$7+UKhJw9%zwuV2?<*^`6Ma?8p*Te)@^az6{Xiwp zBfPpUl4qWuYav++VJ2xRcoy4neoXdOsg2TIzm^7GOEmKsV8R^Cyef_#!}qyW zWS;RTt#@6q*bD0sir1{df?m2&)RpVGVz zGtAD&KtNg$l9AU>&R#aD(gQQ*m(o#g^>)Ww9boB7)^<7POX?iX6it`3B8BtaLgWg4 zo@pb92P>+(%Sn0F?jfK@4i*$62(sUkpcJ{1HkGV+%cbRUaYydR*e6#JC`rQ#Gp%_u zYyO*M-Lk9J=A;)n3G2PhDK#eT^&+)xJ$7+&UJc&5syX14T3p((BK+=cVSkZB@mn1B zYWo&{7Ob;x?`umD!V^<NV?dN;;O?;`5$D5cu8mY~VP^E_fWdq`{I@DEiUq_Ds0i2#2v{fz0s^9f(wmfkg7gyUC59jhC;j8lFSos`P^^$&3`j%*1T(7Z)nMrH%~cdpS|~)jU>(tUsJs$cib~UdCGR31Cz3Dk}J;?DpVX{>Bu#Oxl9;w-yFoGdT>kJ)dAV5H znm{sW$3V2b&_ zVz(W2anY8Gklv56GUfvKcG@UAO19|ksj+NKh*?d4z~f|;E-hH2c;Lm!&%ZM+Yufph zMh?RRF!X8Vin#zm;iPK>hK{>5IkC$j$=N{VZY(Wc#w;sIqDxXd(KXnQI>9-9*OO*IrA~yY9j8F2mTwK zRq`xY53wF{bpc9l;qq!Kc9l*{#Y%3xya-~Ea%rK4nN5)kGn?9kDAO8^qNH-K$tv(k zfn`#qvA}ey=&`JdE7fd%a2b}EdHbr7tbzrZVPrbFVJT~0tS@xCBuVG&T)9vOq`k+_ zXgU6Z!qQ7r^r6n;N@>DltKW#^#w;jkq?6?1yP1QH(m$NDnsUSg5C7yr_?kCTOANvbuWR^p@;rS zp}`luA!C%zvfASztq@RZr~t7D>p6XGe24GU7xChZ+bVc z#C5RKj`&wGdhpKqaI(O>sqM(7d5}&ud?21jm(tNHJzJCKTTFEz!}sMY)Vl9;^>?ek zA=v4(JC<+$^K)dOaC(onm;G{%%Y9b-9EhdE>o1Qm;k#{ty27~7{R646(vY?CaiWFs z6IT+-y#usaSh#%qwy7L!ZOsHk_6Of7bs5|Atpjoc_k&R0oS(*(eK?QbT{mu&c#xU9 zi{*5h1n8H~Vjjsxix1sO|6Y>;DnCcZUsNmI0!dtaSEZMZ+OwB^a~`Bl9r{UPZ(T_& z^x|rGX{JE$MLFhxocy+Jqz|br<@< zFChv8HjeCQtWl)BFGPj+?l5*hN+9ZE(da3d_Np@|?n9-m0>UI#zkE?LP zuJ@XViy_WO-@L%70%cirDGF+isV6&S7oR;caG>{EBD*knQ!q6xOcCq|}tOhzhxp%$!#4rh-42)ssUS~1OrAtdDg(jFY;zP8?|g<}uMf2>Z)_iRvD*N;#4 z9F{&pCCQ=f`bPTW^AFY4yE{_O9z7(O!irliyFl24%dN*X4dq{Zs2sFUmu)x{vj0lM~H1?*f;fa2e0pT*V#16^&bOn*BO^r2b-fH>!B3hZ$hZFu2=G z7a1$%S^5U0sE}a&?T?@dV84*IYwD`UI5C1Ku{S0~_>MWo4HDxns6Z;*^;J~Lqs2^8 zM@KC{A4vV8$rsZ>oX#Xn8qhlo|9N-WFSPT>pKH~BEEQBbLwQ9zq`4;M^iL>Ms20ac z*rjsw_ko)khG`9&BNTcRDJdyMef~V9euT`3OCKIS3614;KA}4+=d7ss^5aL~+sb~< zt*bU@Ye(6ZTnlQ4&ueirGqD7vml+x7g)g;u^*d#gE~uvxC`e>QcciemLcasoK7Mws zcI!Im$ljDr#J_)k&`WVK1^P+|ezI!HMB=E&H$S>%_xFbHmn2rdHapI_!i&W~U2VQF zk6yO3Jez+K>?d@GgI(*e;-jF!nTpctid${yQJ_5YN2mzoEZ|r ziQ}nFS9LEYPktP=z;V=`slzj+-@esW^h@qK5nv7?cMD5Po;e67XSn0w4^UitA&2y5ut|KE}hQ$mWNDplbp;mTEZ1YL>LA; zkC4ZPggdRQto%g6rOBbAsb^SyAKr$C8-!uMw5^2io;fQisUBD#hT>-sFlt))HhZx zC>{bh~_#Xld~^*hN%WiK#+n}%6ioT`O=DS*>sY$jN8Cvwg6(QN2yiGv~eTLjV2*(_YFdR-VY z3M@we4{GFb!3+Y`pF0GdYLASGsr(9zXB^fo*b&p#wNnnLNAB$C?GR?pUA}x-6<0-- z<~$J5A*X=Ty;V-f1U<#s-_+YYe8#)n{77R1viO}nS1p^h*eq;1C|;kl?iKGXl+~%K z-s9q4;bqxxlLo;TUNXT)&m;X@isfbbb?R{q0U?NsyM&n>3iz0mQFHKSGr_-ZjmdhO zT=sp9;9qCGOArN!{oxSg3`>@2c zCpaD-;CvVPjEk&$<-N@8y_{!Bv_e(Ta55?;X;Lne^d|WU+~JQ8*Xdn>NAeYOR6kw9 z*2F|USGqP(mf|<}O!xH5%(2tw^nM;o%*0GuWlLaNpcLA98KsRlPRe*vOc|cHLeALS z`|zf$J6N3iz_bS+*TGFLzrJ?%W6F(M+=w;&(blTi8h-$S{c-JOa!7(lVI$_x&#vtaF+<6UWl;qm|runhgAN zYxof(R{M^9vpHDV>t4gJg94G=1N%JqNgAlkJTIqGj3oJUddC~BYAVVIGiQ5d-D_$O zGvU}C_N}ft{P#eG8k1|=Xkk-rsTW z6I@{tRzzN2aZH0huL!UM%F1>^V66vws)648@l|tvDU40yR(Sl^Od6486W*U|t)ad) z3bJ1Hp-s}r7l_$>i7@raB(Jv_oN z@P6y(+do$^irnkFDE&$Q?s1>LewF<{|AnJq4tzb`)Y{scs=}pbYC6ge`2M*@DuL?p zpRd>t{(l)O`=7dgT}+i>G0Jn~#KhJO_$Tbzde5g0^BzVDcDkr>9-y*oLfdxo{~0W? zA8)&JunBod|DIm);ctIx?`lK-jCi(ws^OiJV^^^l(7eiU060bOm8|F>ZVw09AoZV;#@P` zI5V6RIy!d5z3+QFj!lW* zxnpgXJ@T#Vc^!CFlvwp|VW0z@N{dj?GV{YAA|$*QpHpT#8%tQT7$I_l$%UCYFEhg| z4G1{@PVw!V?XJw*wf|%)Xc94%n8f<7zOsp9Vj?3mWj1~nsHr{DdGzSry+rpySPMyR zQ-r`AQ>C>+SxKcf(B8{wnz}{StpqszNH-j1aW8jC3`IDf(QcsfU2mN_@3a=A#<&10 z_tP)a!ijD6K@1F!=J-5&9s7ETate(2;J$$(cEY`NecdafprFb6LMs$pZ>=`2^ekbM z&0xQSHqp7rp7?U}%eYe{g0x%Tz6IzqXy$zxbZAJB@zizybgVT_+RMiFc^}^mb|@#~ zF_k`0<_(Q)v+uqD5iu0)&urxFxER)pR^19HENKv z=K_<|>F#ME!0~H`?tKIL&$zmVxVn1UHutF`bNA^D6yXB{H|&Rs%TrTRO`qqb7e5A$ z<8JypT0ytT>S@==&!5wO{CFVk^!V}XK8#6`+leALT`Djz2xU}c?Oeeysz9q1_36*7 zl)P*nMe7UaWG^icOxlLF!~{yHNvlgU>foq z5g3EuFdkF~tymMK1@zIm^&7%Uc;^TothXRGYw~8m(saMCUw zYSYUEdD5JsS7pS*t7hAq_s2Y`>1eEdvBZStNr{=3J^$r874w7-Xj?xeQGl$mXG_)y z#2^1*R`=!)?tTtvrOU2vUUZ7c2?a8~tn6`AbaYPYnMDVi7u2rZ89CHIfvBDfwa~qf zWgR}%Y*4R{Lxq=3_Cr2R+4i@eudnxO78B5LPVmQ_~r zh=`{n2X8ylWB+Hw3^YwYp(^>mwu0vSu*MVv4|H*H4^-u90TZ1Q4c`A|^bU|*>ppc! z5e7L)w6Ziv_dWWIjz~}_hHQMLaP}`Bgany)Oxgw2z94+CcBQAl2gi_;Gefs28_D_{h^qi?rCg zIr)o=?Y(Kg3O%I7q%?fu9S*3i9gqL~eu@GmrtrEqT(clK2sAAzLM}I{m%#CTX`bOJ z{}N3ABm_hshs-M^_6gj>YBzQs7Zyiq+4-Gvd~1xv{hxl*q?G^-wMU`)80jSSXYOUO=cGG_ zj`J5k8#>YNd2-N1W^oAb*JvTZd>LHNioof-&cvrzGi`Yuafd3Uu&!J$|^BrbO# zc^J7T%A2fy9^aV!yEKBhY(>9iToRzcvKvl;R7-MPCb3?jE}?Dd7MN%V+!jyz0N>RH?EyZN9L-j z$f|kDLw%lRgk2vK*uyd3c&KpWF+k&OHrjkl<8_h9W#m`5?W)q+)Q^KMqgy*zct*K- zySATS4Qi&p8m0-bs5>_CewnC|<=)vZM7R$-qjwrAR{Rzm-jn=NxwpX5CgB#-^!&MV zKGarSMa4t_O=Up&M781o*w$PrE+?maq^RHCpuf|@iCIV2XWNG$mS9s`W&p!6}I_Or@@wqc=W=TN-{ZOn%2c$*r5hY-&mu@Y1 zrcm`eR1v)BDerv5e*#GFE3ds6=?<%)r*Y*|c&p6~`Cd^hLkqh(6sK%k&RyTh zV%>Ry>w$%ZeQ+=3+Q6Wz%j|9Z;JH5ovOnMb?x%lL14uBZs;UJ#hzEyNiR-xQmcs3?_)Ku@K$d^F-u+2aD-rac{%PA#DDpu*# zH#FotjG0%{Q0;1FF=L#|=YqFpHGW~!5O@!3EtoL?e>O!!KnCSD=^VR8l&zRsBYU)q z`=uU`e{mm_U;4lvz_`ElUdqybjL5xXlPNWBvL_{X5M!t~v*KR;U_CKIuO9zUFWzLL zWaG%ToA$%1#EOf))#Q0T(&@(1RifEsH2P)&^}xl1h{Mu1b{KkD5`2%X3^3C(k;YaW z3*@}B(<*)vb#)L%=DTSHN?#sF>hYr8)iGS3Q!^rXOn zyXnol9D#wx)z%rQao>fnT~jSDzcp0l^NJOt^8WeB+mw1HrmjWd*#E^n~+)16#@;pMw7%E%alPFG#!lcveJ6bDMy zeMA-rH|n^lu%*Bl@^9^|b=~>a?JoiQC@9<^p5SGKqXH zMoZws4%2nViMsJJ(~diX^7|=l>7jq}q5sACz9?>tsgv( z5xsRoF=|}jc)88qDzC-ODLCyP?Aj^@1PaKpCZ}Sn>F@-yTTyruSTG$`e0(TdSdPQD zUk`10P>Dxmkg7`QrQ+=#PTET!)EtlGQHrD;nkpSkP2lHC!u$PEl{tmEYCcARoDuJ| z9^4K4ST%070dpv^p16llo4qV?)*cpb;n7)McY(f)uS{}?zB zU|-ehZsr^gYF+>*5WXU&cb>c{uRcB0Dm#GcgVwZmWiDU+2TTu7N~<2@MZGqkpxg-1 zjHGVxO$a0jqUS_UL@&PCqXL7Qek~79enxgSeNlx)9JvV$l`N8K->Rx&5KemhJ7a*d z6WDQ)wdskK|08z%FgFXJIVM26!~+oAyK8Z0`+-qA#B*97<4aOVqE|+XnvV|A*QWDC z#Bl~j+eW8XPEuTdBaxd&;+C@Q_F%B_xjWR!I(w&7!2U~c*Iu`W8V%W?yheT-h z$SG86vJYRtcbg{IL8qHRC-U<^M6hj}ZA&gE_b>Jqb?W9{R$&qk;-sJ*EXLe5i0smJ z!VB^NcX9-#-=p6=196*Lyu7mGTW)krj6=g@(#)Fdh3O#vJWA$9iZgwQ;_EdOr?(}O z_b2p}iSJUb+ByU!{QllxjqU8-g4`U<4u?`x>!5@Lj&0J;LnG=~YupxU|8;MxJ{`69 z-3B|jNbdZ12)R?1Z#`{D2z>v)g(;W|#X$pG-{10}I`Qtr0`7f>oQ7PvdCK**of?j+ zPKPB%emcv{A%f>Ur{Rhh>m?(WvScJAOO1OWB~|3o{m6-dL?*$(X)Up>IrrSi zPaKn%*l!WxBgDg(Q#{{#G~whp_Q6x_6t$lZ^ol&+0_}NcRPcyxmNE6kO`Rh4EBS-o zoouedDR1`2CUI=dq&s#-Zu`FYx7*{FyVS-p&Hd3TgYzFNQ?#P2^qJMcwSDLca*P*& za66i8I_K6ij$ZC7%}cetxzE{p7iQinVr}N9J&&W*O!9(DO&9{ zH;@nH?+jUN3=buyJzeJrQJD+^p z{P^b)x#Q0xa_fmkb{?#IfCCTjbg*4){=RmW548yKu)vr8BkWLA3kph){q24pNQPr^=4$ZcNC4A^dQ@t5fKM3u9yHhc1#0 zahnM;9{M#-YT`BfkPZap$+Xq^tezgd)xwG`-|{8N4)ji( z>B$K-E>X*x`TGt^JUe}w(__-*Rt$UU@7%$T8(#s5a})q5N*o6d_x8-5;IjA@#2peE z=9Y5j9~`(NzxBx}PrY42&pDd}!I9Y( zWF0>O(@WZ(@d<4^k<2Tq6IJ4g24_QtjkP_wPn3yPzOe+8o?3OWnZX`l0N|pPXhd1~ z*__u2%q=^Fn5!nOU+7T)T!g?NB~jZd-_iz4Q66E-Mh>Q6$r#v7f9HwMJ9d1e*f@V7nrV|+q)y_8exYON?W)*0V*(#a&T z(yd~h_nJKcS8tXFU8!@l%4@=J&<>k4=2$IQ&og>TT$obhxguH~R!=H^m+n=Y>vPYW zgeX8!%YA(F@t71~loI&y#uyJz_g(!c_MO+UGK%VJC#D9V>zR5)ceu%=%V*6}o>uz7 zOwDw1!rZX)Qjj=ij&nst{e_2IKdWm9nRQO*=RIxjW*bxNFzHg2nuhVZnXM#e!w(7& zZ@tfPZPB$AHVUCd4@-PLNE6m{yzsPSs|8-7d%?UXIzaUNP~ka~^RnB!1)Q9A6>GKc z6^QB*>xAwd0OdF?GYAGvyl!g|C#fBZxg6pkIOg2{(V!R%C_c zJ*dp{eKVD``lBEZ~{o`R|oi=GJ04S?&`D#<8P>EC&L%+(gh`KiaU| zW4O=!$P6JW^23yz*^LDnCNLwlSZz>>u>95iG|~0T#}6Zk*!k=F^R4)~X4j}{nX4y# zZN0N~syw98hD8KF6>~GaX-fb8{)@qN6JIC@d+br%`Q#S7yt8o`gd;9wc&@DH7^S@I z)AitDSF8h%lXz!czoGs3eWQ8L7tN0;xKxx0X{OM_f{BZygF9k9c@0S)UKp*0PBW`%Bz_O-bX#=aY>w6lj>& zp3gD-H^me__w+~r*IQ)a_2(x;2y%m!MaJnWQ$!z!l8PSLwza-EwbBU1CMO|Fi(x;F z;{c;N+ME7XWI)UG=uPoABZvPdE>+El`%2xrM~saKM*gNzZ4*%UzkT>PtcS-s$=OdH z`^(%19PdCL{-74;)?RbR(9|z1k={?DG{ot8&8Kgbc=>%l3ep%1nsseIeb}cUAuiJT zvOPlWVc^0c)1t3GhcvOX`5S$WL<%3c{lwih-I%+08*#pS93$E# z#l_qJ+XOT<8m_zMTP1+vjI6#5opg5*O^fQg;gdIAD&DNyHWQH0eLlT0Tctm}Iul=d zNwb{H3Gr__hYi#3)_7Z~ZnI+1Tz7IjPuy9e7FR6aq-HCCTKw^Rfnr#>7+T#J=61!O zZ_kV4agBS?7N%d323AHTQt$-ynY@Uw!0p|>S(j?`iDjgo3Y1cqt!;Z3WqXN(QwGb3XZ8)+0RcWX?_5MejoezwA5DsxARF zxu+;Be(vFlexYSQ{buYe<6eER$gs&8*_C|nCDwU8)E}+TT5;~uh4aE4BL!CH5y~G- zn8G33&258JUJG{#LL;p5s!lC>-#>Ha_TmYVHJ1Rn789Ga(KEw1S)aV>-0V6zFZAoa zYxcymMu$8yi{|dm&nP)Ts>pY47KCBRg}|SVc28Bk81+kpFxIz180e)(UXe&#rFx_y^u^{HQ{OsKF--{^?^-(V;f;&YRvuk=rtl z)IXjf%o*#C_d5GvmT=vr8>5*Ihv60WanAS4k=2y$*WRIToPZa+z)b5ou1Y|dKp}G* zI4m6oC0d;b5AblpB}#Obx?7=_n4uKJKIv$^6u#tL?BNAwn7O0ZO6P#P=xzypcLMcS z^*LBou93;`iu5k2B+ED$K1l`@i~b~~Ak2Ay@$eFvbQ3B&ZhcQ6;}&sjW2H?Ad!Nju z8Gr~5A0fSc3FrbvmdDsIHSG)gu^$_p&W!-P0&LLX%_6s`tZ+F9=-IP^k?suOFefM2 zLG2pD=ZG$5Bgb09<96>qma5wjpBef4sHkc29Vl+4q7+J!3~K_VSeLN|lx}d_Pz4-C z^qJ%qg_M5BYY5Ok=mXxs&Ed5VBj#Fc&H~7(KnaUYGyyLPva4G*VbZ&%huvGPAYBx>b!azNBCW={q6a3JS*rcfhcWjEhP8{{1fX zY=aGRNxGG05u~OTKDQBhXHi^-+Ge8sf5^)I;x^$RNM!iDxb0RH4>FI_QzWRrOz}dlCdj8M5zHg(JZP0_!_sV0y?o-j zZ5h|DwP}Lkj@|n^0kG{JA>A5=eblqUqE+hX#LoQw)F^!Un(DP{*G~9!HO*f`P#5#u z=@A#FrEv_zP-3-y);oFi)>ug|s(|ikorKyO?C4Fxm7OI~me)TaxR#yU8C;SQGx+uN=}b>S z9Kfn*)m8V*zYM&z6g3^-uo65uJ_nBn^$b}UZ6^#Sec=YaV`%kIww8-JA(jl@TgCg z-!?9kFFxtcYy~h~N(s4dHogI5x2>?;=aLS6E$UvmfjzH(Fxg02$NH8w?=aW4p$=Sq zeWiwm02e4RG9`D$6&AYc8yeEF2OW{Z!g@w#^CYFKIhY)6BfI23)3R+BLqNt!2Ikrm zNIDyVUj1#Js0~2S)wW8gJs_~o2X_`+bK&IT)&-tg_Elq(#y%UjFxQg-(@^_=RB~M% z1IsE9JzJyN!uRgHvbs_wlAT}Q`wg>j-qswL#q;vGQMBpk$@K26W7 zv!M!vg(f(0n;s|;k;RwqB#%#t%9@^-36$SI5lThCN?d-`1#-nG<91fENu}w+fa`Am zALle*>&#MT!YDfhxS9W-Qt2rY^p?wDzUc?;55iY2t(qKs2W-Z8#za?uAs&cN!V(TX zQqwSK3l{)ie4x0!Pzng)0u5YHmo?>bpu8<^_)nfrvQt51C{X!)00Fs6g8cj=P><7VXRcgsO^`JK zZ10e;x1FuhgkNUeNkXv4Xp<~**F((mJWD5!Mb)=j_~99WsNIN}DlQnK(D?J%H9ox$ z)Xy+Z$M#WBU;BkUnMf+p)Qy|?<@;pN6OGxU3o4=;f7oa(^B%`Cfqh%0&>*0Ns;FGP za%J^%MBD4JvuAFbYOAN}Juai#Y(Jp@%5oNCkumKXJx`tiBx~K&c`@=dMWz;+)3qsdx*2w@Ykigiwhk9IJ(AWnf=K*+v1%Gf&H@oU;qM3TGREI z9WX-pjA|N#7OC`M+b?};((1GiN=nKUU<;uQ=-Lo}C>vq{lNK2tf08oaYj_0b^-9XI zr3=)Njjs`5<-ceq5J?7(Id=_hj?)0UO~rpNPRelpG^cqQSldU=f5F}pd6<3}TAlNZ zek^i$t<$8&(MQx0JWh?3&<6QpNa&s*jp#7>~YwHI|qLww@^QSS+;C%i0|5jau zXe|OtsPO62O_k6@PR`|G-&SFGrR~e1VvT+yB7A!KczJ2vC;35A23%(-B1_7)rZZ1kN|5Vu>47niVJ&GFrA*Cc-o&Fh(w6xpg!uFc_xB^p85 z%c}sfLz;a`NVx8!aBbs0$~L|M>J`8h6#%T!W5;Hs*9YHU*+=2u%uND}&vhe(8-Vla zrfhV*2dd?;N14%QrB92!t0LmSkb!;?Su*jm$vX{>N`7gKp=wi2)>|OrGq!iIvVrQG z`6ZeJ;kXdf{i<`iX&(LY`o`KP>a=i*|2vr!yqdS<_Y}XqY$~WnRRAI@!8Zt;VZTX3 z1i0dcc$U`m2sXRBbD<>bzOy(lE(HIwSrCT|X0l{<$@99ENZ@@yGF9+e44fpM4D z#Frz(#Xdgez7z$}=A=sndHYAqO#vDAzCj{s@NFf9Tr@212yB>9NF?Oq9+S)xSKvL< zR}#0p!d2D3iR2YE8#sl#xrZ`A{`!ZqJCN;GB}Du?0Ggj*{rJ>y>8xD0z~df$ZEZ}u zO)J}lSI^9Beh5rJSMKAkADJ7&-NyM(5&>mLq5(GS2j3-pPA_&^odsCM!<(3>_{iK( znt~`MV^_JA1qM4@#rh>-WwXFCY?d!H5dx(yX z&gvYNG{rcO30yz8nMb)JocLxbU`CPPDc2Z92P9!@F~Y)PuF605O(_htvgDkG%WBb; z{L&mm{NmHIg!d9`?Sx-}83d9+B>FluKU%RCEq0}-!PkGmj7xksMukQNKZ*Mcb*}puS*-mwok|t?{>%Z{XG)vHn_R*ejkh!>#0^ZgAKuJM^QMXrmD6 zme!X8Faep>bKLwgb)pB%U)<^zjo8?z3^)e(M?{U4Z3(yS-+9RqMNLW}Ku|xx3Snvu z)+SSyE)xs!SzZbY@ZS}4Q@P^SmwHToIqpe0M67zZ)E85MbU>Cgku6~hx0rmV0nM!Q zzy*!vzO3`UGYgs(9yxAV7^0M46;{q++DV~kl}KN(1)3JZs>dLi9lSKW6P$F4d$>BC zB~&@XLI3ny;A=S0@-z{Bi)}lxW(qInhsH(^OAO(8_X%7dCL~fRj_R-l^p3>bLQ0|A zB<2KyHmIiVl+%kc^ugm1l^`d;H)Nzs^nM{qmD@xRe^Lb(7zaq5SD>)hX`v&e6u_*t zIJM5VIa~Dp?}!~NV-P$`lPy6YjZWcUosi{XdOUDSt(?g1a}7W!w*DWO!hlnc%P)JK z-n3;LZ}BuLZNn;c?#a>Kv2BZNYR{WP7g3PP{j%b`J7@b!g4skjrnH+E2dm+ou6?LP zHErl2CpTAbH4BEjw@+wVqj)IKKyGVTsdIo0m0-x{M z0im7gopIFQ4J!Q)yFb2}K+BayyN9x?svbcqJy#MZpP`MbmGDJaSYDQKweMOVYuGQQ z!izkw{nbC0P3I~yT9Xi!&^Cgr=(NUUsC=IVbZ2#>W#ifTzJcXk$3fb)$2 zJFr5NCCXkpF5~^0eLBoYqD)89^qc!Cn8DJOKG1fjS{!r7E&y|1$v2Wd^%zx6-CUEl zN$^Re%D}RVbMK-IFwa+*GbkX*-OE=g-ha04tf_x9?QAnFjLy&ezFoH;j`+w`J4kl| zSZmNe!3heaoa^i0^`3OqVzH_b9f;?WK3nfV9rz`-Zrx1FR3{T$dpq1g^)61R->&D! zC!YD%P;2!#yN=`(v1cDk{LnIC+lAw&IGT7j4>Rx^6{WUrp@>Sf({n zT4h2jeOCj7VX?j|Q^Q5#9e{?Gac-gf z?AcUP1uUiXA%C?WvA~k`v~;YtE>7xXM+azLXCi4&tP0%4eR4)LPrcY;{5tgb6sD+M zz8$}7ZD6pmv}K6KKL)qHV!VvTYuvxzSVl(n` zQU;C6L5{9*AxWM+~G|vp-n6kEP>#yG)=WBB-8v z*bA=f(BjML~PW5fTWl>hT8Q%lRuhh$jw2zPW@lWVK zM^?gfEkHCjd*9CU#m@--09{+)8AS|mdsN%hM|iN{9r?ubx$9(9tV9apRU~}34M|ZD z4WIhUpb^AF5?YNS>(q^wY2R@YYiRBxAgQUUHu+LY_#n@-`|9~1DIkvWolIcOv@9Jl zeY3F8&Qy<(W9~Mr7r#}WgL+hI-^*#}vK^_sI9C^7cMmDZ^xIy zQH7u&7KNHAe0mK-CvqcmvwMD<6Ek9}`-_|jC-`<2JZYM}RW$r^uEda~m6MZ0V+i@7 z5t|^U|00X$crk=s_>H_X=P<<`*Vu0=*E#1ew@7aCT^dK&_s&F#nf9fs#MUG#-~*2V zSOA6z7`8-gyh_|?B(3%unwR;<27a1o+34cf#?Q%BZFC+h>0mJP6lzwrzU)Taylj{x z&)^vqcyPsN6HR0-LaJ#=F`jD+!9*w2i1i=ISdz~-j@oyid1&C3=S3s<4Y7AUdF|T_ z4=v>(=%Hrx_GUmNH;56FEB0f^EG$Sl%nrB^-6$SB$5}LNmi3?S zt&G5H7jb&h`9sFN^Pj{8>WTW(K?89i*k*+DV9|S%#&8G(^fY`?K?E#4LLWiYL|RxB zoxv5xR*=OZN{RafC3?s)SNS#_hH}ff`kg5g%|{UM9(}K3`&fY?F9W@2k#RO2SOJ5k zPT{UWakZi%tV7dB75YBrV2lH3!&xCS$H20wC6nmTq{<_UYx1scZl-tgK<7~z_^>x$ za*5T%v<-7+0zsXZgWP$J#zF>)LLT3}o6fe7{6&a~^u>5ql#1~FE$!QkH;9UgL2Bmt%|G-940@^0;yzB0n^vni&XpE$YOn&M55KEb zKTc|TW+0`AYr$w7$EsOMN?_1w2y=adJ{i8zBIxV{yZZ-c)4>@TNAM?7;b73yiwkZ%P#eDWR?1F|p|?ShFmx_uHrc%#vh zOY~1i7vxB*(?aD=HBREZK6&-X0OJP(NH~!-Qce5aeV}?$d`12U4e;qTn`0IPr6hQ7 z7I(O6KYb~_im>VXU*>*2p6G=eP zH_%I93xG7hFaIq7Dqm3pUm1WDZOd{#UsLK_Uf`W863_lX4vF6dwaytN!ze@A;j-A} zo`68ERm$cqS%s-9`A>UCXU|n)j(Fx1a$;66>B+gn~*;k!1A z?kSEIOSh;!0`ub+90(Bp@@1~4LO%#5YGCA8ACf_|`U0B;Sj%*1o;A{_`zU3qvpL~+ z8C!-Db3bcpwy4cb)>N*7s@EKirp%2(O&2@NupFi}5D5~uwEeGwaDj@l#!{qpb9H0s zt0@p~48Kn`P;9AF<&Ctv>he$VD=)1-ha~#$3IVNU|CUX1AaLI8PH)!+iE?e9jEoMOZ zBj|3?X(iEo>nOsN*EWi3Brwt;@ADb}sV-sE}Lep2b?Agp-N2KX)f ze4y#2e23(_5>Z{_r^TRiyYmnotFczDgUcC2@@1Fakt?^2M@3O!<5;9sRHI(oD2L!J z2Mg{00QeKm<=ZVUTT*A>%!!cK-|rf}?Fc$mI^H!K?{P=iUD3B{CoKx^xKF!dT*9Ca1r^S5^VYOSR)lq04a z_?_MGyHQMg<#pq6CHiJQ&S*x|hU8b-VbXxFU=}-J^{*EntPt&Q2&&BgHDOuRnD4~4 zC}E?;?{^4Hvw=t3EiL%|DIlBD11(rkhYk7n+%WKH6%I1G^lgabOXnuAM%p1N(v?mx zQ_QMqWonoBO-)MpW%`93dzstuL1S`0zmG*l@>V_xmTOP!(lMzh(iSlg^Wd4bH~P`I z-I-UcPz_;_^OO?XE4w4`0*o%T;G^7=sUAe9m~=;GVk*9H@P1SJR)u@0`-VyaTxl=5 ze!M=WsL_FVdvSnQqCohSM^?c$gA%H|3D$m0DUe%5t=n2Gvcy9Fl&#e)?N;tlKYOrA0D_E_Z(?P(5l%S&!mnoT zpKPhO&zr?n?}8n$vqPKai(kHFq@EgU3&;yUn@RfT#rk0^-jG78ufp-?&z~>fmJ%3j zk5U|3-QXx!d!zY00{@=f)8WE}HgdD*&m3JJQ4*|`H{3q<<7oB7F`Q(wZdRMvL0je9 zCdn9sqD;{$JIfM3_>;2Vz^HhNmAP4O5^NsB-EFU^6qm&7SDf;EWqHRRT|kJ}&DHbQqo9IYJTk;xUxQRxr!73~hUv{lI&>DSa z4DhbReW#-)3BFJ8%6QMAwdUwQOp+eC>V$AH2H`hs{7U<26rtszI`_ARL&3*kw z<}Wb8ek^|=&K?`Gut@E#cF=^%?RSA&D?a7o#XoN;6)pYxw8&bW0E1wi`xo0v)v*NTzWiwkb&VfUXraHc;ROswvO-%HOZb#>ZW15bqPm5y5QJ>147``E6sx#4Tg zLDRGSSRB`r!K)uVUKs(Vu#V!>6p-1?2AqS@m?iy{_8K>A)cYsolpqF*i$d*HsTT$5 zm8c1umy!CYn1vOWD_;-u@`~|CXU(^3qjijYjB;S6TpN$yTeXu=#u+kP9=-Bd?z6t2 z)4Bc6Zq2eMu^c(G3)>aHLl6$9kD#)x5jIcD7oYPVuALv03=4Dt27X>Jb8f{)?zvnL zuR#B3&3;}B)SHkYf^miLo0COJquNC;(8?c zR{&0s#>5&E?1J_m$J}SGBWT@1iSAJqj@~0G9!g2U--6OgPFstfvi_w+#ve~YAWfh+ zGcUr50x%sah`n21=Kr3fNzZ1$7UgIA@~TOLMYSLHiFr2aYB^aFPj4sI5aKksp;Mth zeq3-KfWXW>g1?ng_4GdHA?1{cc;BDsK)gX#$z#=m`XU;LUd;w*$QwynSuI63O#|(P z*&7$V;rhR5Ep`44!q4^}yE+g@6#?hA58c$9To~f4o|;N<>*1zEdov71D8ZmRv}b6< z!{_=OXc=3y(t@1RU`0JT*m@)6JPko*rEG0Z zwu0)U{C`pQ9$-yoUE3&QLq)&_2#SoOqJW4>2PukzBTeZ&(o3k)O9-flBE>>)p(8DH z0)#38(t9W&1Vw5HK|?|bgnx%|-f!kV@Awrvnd zc@Wn{M!-sfn170&pC3JH>RnS|?Ma?vb@r`j^0K%)*z1`di$ev%VvYY|OEu9)Uih?6 zTK&aTW|TOTK=g=xcOU5yi=7oa1+6Px@P?o+L?^HrneR~9D@WUx;x*yJCFMsCfiH8E z7}h$L29mLmB@xZ-fe=^cM&cC!)SU?--8{A&{JFgq6t|jOM_2LzQaSPyE;whLizV;e zGU@$EZIg|UrLMtE{kZvJ-SMSSm$ZUfu8&ldkcm~xxm=(gp?r1@Wk!pL~_{TNYs_bnMc~S%ghJTi3-Lz6$`LgOV#H1`>7defGgUqr0?x zz>u85Yb$VmEX#M(b8>_izK_>^hjo|w_Oknv(_{6zb&fLH^%T|whhU9@Ih228;~X+9 z=OC$yG+^5%=$q;}cR>HK3lLqa`3jFXcqEif`cH{#DBY*!tFvFj!j8S5vJG?T@%Ca$ zSCvfK9Sz*hc5W(0@e+GPurCc;YucLj1*=~>ZV)QmCo$}%?3YtOwN*ll0#yk2n47{S zH8#X4!k<8~!E-ZPKhL+E(>Xp>JLowR)9%`z_wMMJvCz3e5yMt>xk=h)W)$8#Wm+JuS~whOu3Y(T7Y5kNMoFlIOp2nN+!x-z6{WSZ&Ahg<)fug7%MlMl#Sc#hJf4 z_bDA>*I##*6SM7&^Vg-7f4w|2V^L|*T2>FduWl76|GA3?)cHU%8bT}_(~Z;hD>E0s z12qlFXsFwt?+&r^}0hjv${pDW7HH!@NU!5mpKxGdil86Bq&{LwY8>lLmKa7~(8 z7HKEbbU#n%TbCy%JL+>^6BZvya@PNN2|yG{@}K3XIqJa=wI#$H?LO*q9DAUq<{HDK zs@fH4q&R0+VlLNm@ssK0`z&`7#^TSkwqEujm-?YbWvu#LB^?QMo9^D=9)cP_?;Ud` z=ZUw~o++S3a*F3%4M@Ueg9UChOpl|ye_+sB6;vo`Q`T+ieYysO@l4cvZ7)7#g*-^J z6@VPQfvy>!!cO-G3r_FXS1zDgD`-GzK07mkA#a^Nm0KYph^`O~EsYw)1h1wnU?2Q`aywTDBeUB=ai zffHFj3N?zUcJFOa>`4+I71|BFg9zA4XR#Z85nZ^;0lGG|I$`b@b3mXIu(L~`IA3EKOfHZ_?w ze+P3G38w%Nr?d6MfOr&rWl1}UxvC!Z^Z=(A>LHR^bJC8bq_K2%B3})|hWo2M88zh2 z9#Q+rUo8W3Q)H%eY5t_=-gU<4&_aNke0wIY8h3q4I7r-a;V?sm!Spx@SfGxBhD){f zgT$?IP58=PVa~Oit$^`W_|b!-$pO0rWHDk`UVwBX%=WSb%O{VYX3r1$18gFzkV1$i zn3BdylM44474=Txqy6E~-&#Av>Ytl{bYN`AaU$v34U218!=ExVd%D;1SUQ*4RxW{H#CfuIDU3v5=*V@4dj%GR zRCJmw^xuv5f7!y;CfIKp@Rb9pHQfmD|FbJrcpNC1IF8pE`fhGI;AE8k{4*FdTQFX9 zx`q2`ZGPXscwDwhpxa;0RL0vghBYFDSKBzNR&laH(D1w_r%uK3Bwg$BhfZz#fIqKv z>)KANEWnBJKOz>VK!`3e(1MX+3FV8s&kpS=CM3C+8vGg@FA0kMb&I3B~El!8XoVy zTX*Egktshkh7#8UWqE3tR_#fUaIVc!PZGG8S%iXJecIe4H#<9v_G0N6zkNe5d+2T* zCsUQvCxA~?Sa)%X+n=1>s`46FU5f=mSA!LvSUsl+kBu0XsH#6)jO~=-vD@}Z+%mRf3%bCG<;cv+8y?{rL+ zY~KzcVdo`1oW;l=TYQv+kDQVysA??Sov0)LeJx(+Bq!`s0%*6^=uMcwsaxK8yw3e~ z2Z%yoGd`eX-Z**z0 z>&xUd29}7yYOfzPGNg!~oujtKA+Uzlj*7d@pk_MC#9$e?h({i36)Ytcj^mv>^#^@w zy-9fknHrwDxsLHq3Oc=JXGik^bh0&_xn5z_k%4;ygK>+Pq`<>o-d*X0>#6bnKSn-Bw_| zTR+`lPY$BHS!wJ*+8LW1ft;j zlsiJ~o7JfEZ!#>WfOM(vQh!5cLn1k(0HeQNvsC7)7j?egl%u*;wZT)DMFhRZqyM;f zYvO#dFPYOhp-j-5D#bv3B0dCu7aVk&sJmlmNHDo9Q*bgqI)3?+tAtuf$S{aK2*N|6 zxETNxIMJ$jhg*|0Ie%7sHP__x&RI7z0_sFL_U}-W!`HE_!K|-&xz3XEb1Urz?n+C` zn(!f3gx_#-md|=(sYTbDyu(IO63IjrGrl5&nr`?D#Td0DE>8aP3jvZYQwtNqQj%XM z-_-ezLC6iMMYCOTpesU-v%K*a%oZLKl2^QIT)a(#tXQ-#VYl;;`4~p|JW0*py?80L zO1NanPp)2`(E zetM~X*BcLO(WMq}a3Rv@Q+_G$-sO##H;!>vxTh-=N2wg+%VY5G99>M+)K%RH&hJqs z-@mW)>rbr}H&6$A<>_BQS9wP#$bs8G=R?eum)-TqAXC+1C2TR&xi0Ay+2Ngw{S9e3T;p%~O1PYSnG$zvtPKV@>6`Xm5< zoSq=6Ff=ck>))q0gX78z1_0W+_1Bp|*p@H(yNS!$)a*FO=BQb^`?&@{7&7=M`3Zf4 zw7*3gOAiz7J8Z_OQDr?v6*-Z-+~rzu-gIf#?hpVu#;K};bxzdaK$913J>Zq@a{C+q zR2N0p68S2=XXAd3^8&{M3gJY_4p0@3sK~9ZmReVO0^pFdctOJ+%ZqFDHWO?Ag76~A z?t*C@Z<^i3yx%v5>HSl^>lZi1u3kA|#^)X>AT?eKy?AECd-qVp8`z4#^N{Os*lb zyeC0uN}em*pvx;?kY@~Kw_U-T-`grzeL@WPe1+4r1 z=Wob%+dhM|tIL|xEWBLRg1TSa>GlZEahRO%t!ZL=kq|UM7P(z;1rey`pJe9Q{ArWt z#*?&2g%vB=1B1mb@cP|mS+D6YFWw~5y36;6Xh$U=$wl(~j#h%Ha;tF)6Bh*B6{4zE zMqLYF7+vKUj_fLU-}_Zvxr^gz%xh zDz678{kF#84&*u0g4BfRv?C+&jo1ULRR<|*Qr$W%LRYW$Z@8WEEEH{MKj)tOEI3FH zX_UT6p6}GjNOJvtFU7v-${ZE#O>hnsQ8(yTE3T6Saa;FVp9-&dYi&UAPP}H4-!|1V z-x%fWeerHovQD6Toal z7_rB|#5k_2Q>0EI0pcY|w6~H?1Z>AKmqSsBH(LE16>K#rMp{N0xd`98i0%|8gyhPr zBVN^gw{1akA4lwCZ#nS2d(*~tZ6?YVP$WM&D+DZ`@M@JmRU+1BerjuyDz#3OZ9<;5 z-{&VQYUwCAkT5;+?J`RxNIK6anhC^**#-xV%@?eAo>mgAx{V6qHPCvaF||!uhxHG; z`~oq?ge4%%Xw7_~$1|xb@u3_4Jv-$3i^VHKxLOi08Sl;fcVgH)A9bIzlUyHt!55Vx zfnyU%^7IGNoPIy^+LzmDpi^I>Ue5U%zP>Zr7*YYI{aCH>NTlD5xsLK6Ra9j z;PlI03rK``4mK5SQy9w1Ywse8L7FW|x{aw7+>B7dPYFEy~ z^Z7O69qakOk6}MJ5rzk}bBy{bO`x*0RNf6y^LwIfbCl)-$Mp+MY(fvIH*Ix`_Le@+ zBf(dz==62p>-~_HUmD!^kKRu>w(?LK6ype z)jmqM)TwQE1<_uxrziYovx4`B-ZGE@B<*(#AZ%L?$Dq5$F@! z?vk|Xlg$dFOF-pN)BG`Cq~mXj%vSrnp!y`9&r)YvVOtea)^owP(&e>V7vHJSEj0IU z+e3!CPro&b8ch{MRgRhq#&S@ul?kB|xQ1iplW)puf!-~Etz{!SIWj?zh*?x~ZGEAl zwg7b*#i6TmYIp))#k1S|7ZKVcDyMGwPP22z!akoTXB@nMF6l!#h&?_=RD$p0b zHK6mBReaps++O|eEp4pWg2b$UpY5BZ|Vvq88q>v?4BxwM|%j>o=+1gc8Ebb?#&mu?*=b@foVOQ>dFxVsq7 zVXk#PARO4ei^aCYeSJ=R?DY4WGhJM)xGR!ZG_x-oPb{Bl8N1n%f|r*oCZA#o(tE8eT4~i`f&Mhs zyj*w<(x9UnCoG>fotEG=s&h;9YhgXEO8N^_a3sNXD%b>-7jAvs_AU#p)>-?0znV0b zTmHT=Qy?i=-xsTD0Q;Y=?X1s`yF!@CAu2CUI-OR-VxHf2K8L% zyTQ?>Rz0Fqh{bxg7tI$Okxz14SpfeW{v&@xi2dr?{5tb#w)++KL&`l#c(f8rLNG~JJP?KaHc`O7nzCp;~FgSB$ z7~FI}2bhHcqDf)!72CFi|NOSmugLg@D0uv%*lFkIt6&~`K{~AB?wh5&jd>d?^<5Yo zsobjIo%s!ce*B{k*A2Y^zUECBoN*wgb4=P!J`*>GC^+L?eQ&&P@%7r(r=A21P8wyX zrx&Yn`>PJ_$8$ac+-3K5uN3hddT*s&B7~mcL)>A2Gss`UB>fi>@}F}hdzUtNjCd{q zK((y=>GC7IKC zb5Zb$>}ZWL<5U+Ww9a=xcV^7<8CTH6@k_)-CfQXgW4X|=b>=2mpuC{3q8pdqF(ZbB z@$}T6oi~ zwgnr5-3&Hf4L_sAr|x7iiw!$o>6NQRX60JIjb?xP^aohFaEUE`%;Y$HV0jhs?!hG@ zLRm@axnO}{VbzPFy(Hg_+c}eb+JH9u&&TI``#b}l-3?f}iKEP5DP#!!UexU^+s$qH z2tfJc=1$P)Gfhx@a_L;`D4L{M-c3PmTF{|+DXns<{zbCacpc4?4Gm1nS0}nZ{Y<#F zkXDD0XZv-KuL9SWT2|zkJQQ(gE<57VNN?VA{mI-Ya6iX}q`_${zu8Pf8lzTUqmmC1+n2;#pSzC`^ z+c*_vM@^0*%7?OY=f4rDz01l<@mu}MylfxrJ-<%lIGWwXcUVT4S>((y!Q-n1VE)?& zpWTGXhqRdc^S+QxD14T{Uzd?F2=)RC`O4;F71Ap%ztojWwATibU7G&5)MwH@Bq{!; zhNh#~G$d*5t_Z>B~ zH0dd2*1y$YWMzlEPDp!JENTINtzJah(qAja}i!uFWJv#GvY5& z9W4hZ@3kyt>eayobkU#6?S2|cykTCJqb9E`(y&%%wlOM-*#=1Go|KoI0Q)b7-IBt-gqo%3gillSkc!%6Yr@oK6B`|4j0`guIDFo}8E7 zH${5~?C!DhZ>W^C#sKH{Bz_zZ{eA2kAWckQb|d)G+%~zlFx?ync1?Px0Jp*&Xuz&0 zulC^%(`%A^!RO|I!ZQ<75eC!T_~eGVv~LZSc@SJEZE$!v>t~TQrf)}I-M)AlA+7lI z^Vb>5W*3sN98C>aa-#*Nc(2^|691H&8hCd%ka9VKM_M@!;uZ7$b#_{ zc=~28r8q9N5^s`Jf@8$)i>;7HZf-8b*0_}U5&o-+XQI;nOBxEX+Ms7_U!J+@W>d+z9biMZXEJ~ ze2Y?)AS7OVA@&6nf9_PQX>1Gl}9 zHX*YS_aUjEqQXPGH8B}zCk4f3AAC+_&Mi7pe|#GYYBEO^SNnFle>3{O=95=R$lla} z8Y{>@4T|jT>+j(MvtQO92&earfI)qBZ>=%!JSQGMjqqpGqE#3HbLU4{$t*HfUEZG# z?lsMVO@4N64vI_MA-(zke7Lg4$lk8u9-VD>x&N1=d48TojBXxba+QYiJ?{sD$c}^D z95n6?4hhC$=+3x%=f$ZN#xPy^r%;=#tR|W3Gh=u4KQ%U~el_F-ipGOXiP>ixUzOGH!nI$k!M5oqP}! zVdWEH;e&%GH|qjt=jX9I6GK%AfeAzgAnVEyvio~$az4u9cU4wM1<0h0ytsG;n_kb& z*Wgk)yR;+%SHGwduJ`-#N0-jU^bxdgy?Z2BnVD~9ty^R9BYwyK>S?_j$1Aojc@wn_ z)3;t48AfQR(n|Nk=WHf+a~R)$Zr?vItp1gF>)HZ-Hv_UeXSTB)r!b(zzT1&X_x_n> zxf~Yge~Rt?vf^~Zy+2O)he4ujNuC?szD}iC$xq=J0rzbGxfc~Ol>U=Ja5{#S~tB_+68PEyAi?(ye|4XoO`Q(vY4rj|YX`YCW>FEQhVW(Cs+oVn(6-ZDsnwgxg z_@w@iHSWQKA^CX)k8hv%He!n$C5%Wm{!myloz!2aZ?`j)=WtV*CAmm6l`vscr5i=x zDV8Bdi;4$s-;>)|-M_Ulli=XspmXUQM!y){(V*G5NvvmdH~@vEHYo~9ee&yBFvA&l zpc8j02a#cmfNQxlZ1Rm@Mka`|(5ck>v+M_wV>RdDFXutIj)*_)+ezUEKErlZQBG?H z>6WE>+_cRP4=#<2jV*w)9vmF}PWIK+M_DepR;x6T^o{%y9UQ0u>WcHWz+6AjiP1f$ z>3NaNlN^&^9pN&Yesv*S?_*Ue)QLj>{r|D#2BWI~4lJj+@qr5~Z**?jX1z`2+%;?$ zvzhOH@mi@CdS#wR9w(A;jPI$6?rq1nPoIw%iTWoPd4DM(gst)91aI2so5`vQB}Uy3 z%#oP@j#u&@Nu#JedtVIw9fT<${2-OH(Yf^lzw+<~Zb-xTRXwm)x&GYW=$So9mm#TggMu>d(Skany~#8OPjo)^pt(mF;^m3z&dZ8%%P7S7j_{ z4%5v>W<4u$mr+fVF`x#!lQr z{s>h*9XZ+qxuIVl7DhZ8+}c`CByishr~oazvVMk@DgFFfq7uNwSjbl&1U|c|;r>hL zQb;ceU9F=Xs*-|dFB$+sO+%WpsvovwSr>H9Fv1+Ld2p87z_LI&!Q+sY3$R_Z!k{LJ zo((7-7MeY@=J0hL2&5P&m@`&tkMCR8$Q@i2S=j+_*lOi&T^74-Lorxk(`r-ww%mHQ z7xweY5`A{)l5otyd5D8PYrtF$9EhxZUKv7u+1_(6dmz5IahDD_0SyB;Kj= zJ3LVY(vKg8%5sx~Eyex94PWN@#0lLPBCZs`3b*E^)zM#c;e@(Zo@nLY@oT&vljlA} z@{Zp0Ga$*WteYj+zai3Uu@om4mqmFewD9~be{^ID(F3OBPgIby&jE6#*d92%Yu!53 zDQ=wnL)@#rug7v&t1-4>g}N@k;v^SXg#L9qnwmFANyUVMQMe(5R$Do_Y7P6L8aH;K zVQEP)H?+caAOQE25K%i`<}FhaW(lH+_OQw>6^T(f2weOgK<64n{jMV&wF0hBUZ; z)EH&_BR0^hmFU;9Qd1746xjBTB168#F6NaZ2X-(n@80PzQC&!0XbJ|lJtV$0ULdZ~ z;(??07xBc^%sdTC&cMRG%iF9a?OV$w7hlqPVBI5&czPu=(d6otTpRnrId1j^TW|UF z5b7fDD5bM}u+)}qascHJe13O;;^$iGKH=A`QMbsx)p=7Eg3v)vpTN;mn3Dlv)0xOFBR^tY?I>vb%=g&DOs;Nf_vUh zEaT-%AJs$8Qg)`l1j*x z`^r{xPc4ByS|Pzb($C^D)MJ_GFT71m@w3i6edcMfv)s^uksJacPV><{YrpkA;ShRw zJB5X?WoBp_SUiv%>Wpzxs8-QT{P2$KN$^g_rtG{3q&&xZ?iO_EoE03dAnc}{eHpJI z{3IcT4n66=^J8;gs5TB{Ie2s#9uqZXgr~mL2`cJzCB0MwjN2_{U&gXFi_N zeC5zwz`oF|0vY~Pq^CW1BZVBIn$#!Ji$+VY>EBdkrw>n(5QcD-4}Y{=A7LF`SsPoq z|6`au;y?4Ib~hJ;r+nKNbLE+!{12P?Qqc^VdD))y#tG{jgzqF|IPB%eEreHrbEE6} z(zAMT=YU5-Qvu3-jjQ2PreDQKt#p@0@fo^6J88n-bHthb@sDBc(}vp_6d#2&ea>h@ z()&W}=A{1Grk?I~hMq6jYBRDbvkG@NkMn>Ru6wF;FuxgmxmAOvvFqm;p3%vyfqBhG zrIhl3fj5cclNBFE*JylGNZPJXoO3n#M7^wk>=cZ8T~2)Tz+sjV+N(LU!F@YR!J`$1 zk0a!F4yE`ItrMf2Oy|Ry*Scc0;jy=hN|42vBmM0UZS8xPt*JXg4cj^qkq;`}`TAKE zsx^zd164J5ws>&#u!OMcM;}C_yBfNYFBkJ=UHU7YrFo(oC-VGohPB-<=nl4v&1YRO zJH-Ylawn{o*mk9&A9R1c*gLxD)#&6PHa`IMz!vij%tY;|;cIv96G-p()xU%qxid!) z@Ft5c=vjs!M)7S$k5_SDsHOi#zo&7^4p+Bu%7V{+a3fAMb zJ(@=(7hYlmi&#RJ&D?r#&0OE1_o!<%d*ECjy5xP^FSquF%Ug}tpGo7K)k9#Z#>208 z)og#7w;8`s_W)m-I6V3NmBX6I%82yNv;34DA)M>H21?eq&@V~M__^ok_4+%bM)2qL zCJsBSIoesFTO_Su=MQ?f(L>RJ&^aj4xtzB*89mWp*IepNSofaHFX+8(q~spEg!AUK zYsKtnGFSVq%%Ihn>!>6$pl*Gq#HM)4dR6l-ziG`#tR>PDt;2P+@slpbuI1y->%Iz9 ze_E_wls^kqB2W}gzi;Zf(^I=^ncH0IJKMX1u^VW^>Qmg7O$~)V(c@{*AdZ z&z^l6kW*e2E4#g2{f_R+IyE~zef9QVU0^@FaFn+cl*B#V+cvemHA`MNGS+r&80Nj0 zT(7QF_hI5C?)`AF@O?7XZt#>hgw$E`9(bHT%Xcio_Okm#Qh z>Ei-s9v^B+D9#|R@h1)n&BYY&pCJ{e& z3%0RfmZQh})|Pp}A+GLYJ03El*3P3#Dl{b!xs{6Pu^LOfI>gC#&lV-Vj}raHS75|L zte(9J$BVCtb1XjNk(eFi{Y}2B`VIfbrY%FXTf>}~q_l+nKxXkG9UGXPpsQu|J*JkH zZ{A~DFRBXd<|T=nqkjK(z?9-7s!lpEDp7r0FvqX zxc!xQtU{2RONUuI2^+Pr*{F~%+b24tx|f(_V%n%k%-wRL^MV_sIM1L;_a7DaTcvE~ z>mr~2%$nFNUUHjHmJ6sbN-E-eR|?2&ESdug9rJ{j{(J)mmAqJjSZnS`CXX2hkL}wb zuUyfgFPU!9CV39}Au`S)z4rO|m(~ovV9Vt3Yb&ayZV8F^$W&6(+r`W89z9m0#XbqG zggBY*@RYRUkipba_KVpSwz0|@AsTl^YpLCt;)~s9J?1M_u)lr!dEaFb zH)rP0(UqKrU7;mV#>93_y;JO-xms_nLZZaH4ah;MiI4dWcN%^YAcQ%gLRvs;QSJPw z{?g}*ngiT3%+ls{nzoH7{Dd3o+}MU8YDQ!zj5}$$YY&G4cQN`lX7_y5CL`c)I=GjP zo^R!4amp&(F`&qB_a_mbH2;8r3^kY)a zZ&D*`WFZgDH~PuoQEM(P=PdQ+EQuT?Z;6ll(gdd3#v+eon?oV&F$(XAut65sJ1Y>c zrAXyG3JyEyw;h?}(I{{g#Z_56m5(+nSt&m%PAu>q=OA-V!9T3{nkRG)`9mjv=r57u zk~7@uGX){k@RdzEOjy&sj$M>guf5(kmSr$?BPDQzSWC$-7pqmAxIH6}^IV5&6xSQm zI*ZpLy{k1Qsos|h-8O)l^C(py6uQ=#qEO;&8If2PYCkbwNZYPixkbV77m%cTlCmpuuH$0L&lrwCD9@-TztQzigofYFq>ig@>w| zaeedJro=mNyI{62e21@EMYw?;!vLGQ`QIS6ywh&iP#rxT+V&l>60&VKyt-ZIQJPTz zG!LRaMk}+}_Bo*X5s%T%ceCe%IiQ((XEY?qPy z_;d8$Gv#Zl=1ctX%YgJ>yX;#OQWMA+`uhKZ9ZWhe(bc|khxIWY^d7z*#OAok+_$jF zl{!EuPi80wGq#P>iR8fOJy3h{83V-O9h=bS>xaR&V^X)q?Em^FrXbDB_bc}-_kG6v z`g^->dkcP^z2kpO`hVVtyOq#*`Y+(t`JVuaYt5Vy9Kd#aO3izL>fawx)PMW1W9A+c z^$}jP^_7D%#Nl!edJR_| z1W(t1mKPRYOY!-XS3AS3fZ3g*pGt9?koG#u_}CL$fxjA;TxVnoz}BB`ZDr-q`8}_8 z`MrqUM8zo`lgV~zm8J3Wxa*B;4`DyTMDjjPZ;cA}v!`3ebj*Nl9@^LCjX{J%Z8Lhk!PcvjWBn6O-hxLAUUBi^`k! zrwVTo>)BCgTljjkTKv{&+4#J`p&=zI>2*pk)Xwe^NSBYqx*9P)$RzP$PkL|q=-bA)>@{Xb-Zhn(l5a*14Ih$`{gis1wlXTJD^Wv2Vnx5SW9 z8_w81t2^z(+nxe#3?t{t!eEFsOzdakeFR2>5!$cPeQBCweK8X>C)&f)s6~~aDfxqY z7od4a{Owr0N+v~aseQYX`XEM!Oloxi^@fcZBy>Me+v3svP zzM(iu;m0eRzRj_QZF8lUOW)ogA1Nu-pAXph4RHN2&iDj8N&jl}b@=ZK73b=n+_1xW zH#$HG$(+^i@blVI=k#4zh1XV?rC^vsg!8} z*6s{|E;%(7X>}{RtxY+X10gQT=~-C`$PcFgY*!{R6vHvA(bd3l`gDE{QrRt)EL^+t z>JL*Mfqr3keLJt)AU%Jm)$-=NN%iX^(=Q9xg>y~@W{UT^BE$%kKHGW)rvjp|3@7~e zw724IBj@~GFaJ%n;sB|K&vl1vt!=S0Ng0D>gztN6{Wlg)73^}qY#A^Z9*-Cgu*r6@ z9B36NolsBlOYYs73;}SEhf&ggeR%|4eqob}c3Ftgr)M8I#SiWLZn^eNwT}=2m%G>q zH?~`~fJu>Iaz7U!(Q4-t6+?ke)6ld0Jfxe^v@*XKPZee}|KVM8XKS{8oB8h_>wqWP zV^{3)&seov(D8fukVB`oF9Q7EtK{rkLM z%BZ~5gKx~ZRDk3tXuPUVp|Hx>bc(4bOBn*ZWH2u`InU>IPPQr+Zy5vN17^wN&+O0h z3?uy43XrkOi2v}$nyBX0ix*WWv1>9yQ&ih!8*{2TWISo#LLk zW*U2)x1sWvf&PyrkvqjSS>jgD=mchPZEkN;Gf@?(4D;uD@E`b4P*;KlJn!_-PKG=8 ziLj*toIlh^8OdP(1Udc73&fRu0PW-YYYL;pCm1JB{b$=(S834}k@D?~^2G#%4*t`N zeSsKYu!C?6?Dg->#M#&}ED^q-)=7B((=XeZyRZhlh5l#Xi0b5o*|>c8f}?~^b_G%< z{e?Rt)$zBf-TNYH3xTp!ZK*lTKs*@?9>w3MlZOJP-pxb}0=85`px?U4vOIu7KeU%a zW6Bsw{w_#u?}|HF9p&(O^)ZHWAjKW4-#NT>krJwLHT9uDdX_tbJ%#;eBLwP9yuc8n zL&@TrcHcl#u#(r2<^O_%7BNs#Ks~~prvsw&C7j2MaH8LuN65v;fCz1v1EM*n!B7|K2+WZ4BqeLw z6x7ys+B->r)-54}Es5d;OHh3_<^+hqk(gpg!JYkw1WxvpY#jqSS*=D57I_lR6KOtw zAbrUS16SN_g(OAvTkE&3o>40o!J57An0pF8|5JOx>RVox1$7Gyo}d)Q^*`ZLdwW?} z&eTjqBq8NBH%jrw)kZw2B2DxME#T;ZeZ2x6oHnxdRdIXIy}IY~kq;i<-+d7GUb!++ z;+FFM^$T|m(NmKJE+2==I8*_o;DU0Rs{0evbOC)=JwTc;Gn3p5_ze)=9Q^q*D=O~E z7eh8TH@zCYz+CaTtKJc9p~D!SWBUa_oJ74Sd?xF??v>NWHjX%YX~{PtrXg3pS5|!p z{l0&XT*t(gZYQSHt|1GsqzFdIyN+Jfh_r3!11Ukos4eq8ZP}0yf(4x%Z`zmfO6euf zg40H>)IbrPNBx(c6b#tgD+tUhkk9rfd@jApPjXR(q?YWaK3&qGJLu`mOqyx<$kSby_czJ%Fwgk->27wuwoL}f+G%aBRx%JNk_e+XdaqPMu(&iV? z`nc4?7QtGd$r;5~y z9is9TTIQytF?4YF0#=crZUL!!eXFi53h|MdFHn&WetQVXcBXXdBAim2v`kw|!cP@| zzP6@5&DNMr(Y0lFO@{()>DB)QZdLfgeX^G)Ia|bsOc@eS5*rl~Pb6BNmD5`=DAa8W zq+dGur?`ZKOi{|{muraI7&tjJs;0JjPipazh3#&4 zNuJmF?=2URR%Xx3ZF%= z^Y>Q!V4%sS)Ze@_+I!&e`ocv4iww%vd3@VlRJ0B3Ap#(k*K^p))Pp`Ncx~BSPYT^b z_#t@w*i~~}TiqsNfgQeGq#Sw9iFpGEQ>n|XZ+{RJ5gBDDvdwZK2ew$Dl<5|@-q@Bg z&?5Dt8I17lt{H=>4rOx!Yz>fhn~_&HfWC_fU|vsI^GsKHHGkY^diMGtkwxD@?-ufZ zNJIlhjopD7mRIodb88*L@foiK_4MsAes>>XvwndRvF)ED)cR%r_XQWR1`^t*40vHT zM@0TJcS@4hr~Fj_@u0GH^?kX=*H+t!E}#}BT4Y$S#lAhuzSSY8F!(ZpAol+Q5cN#- zGy{}6Q2g!7^FNrV*RJcB2|6$FkOzOnh`LsyX0!8!Tn6LzU;aZr`xm@fn!B%IWulnC zwN5v)ROUmD(#h}TlQ-#9L)QNaZ)$RY5wfcTdN1K}O|uJk^KL)a$Uzp~iLs`HVFz@< zahAjL0)I<;Zjq0VY>u|H$;UH>g!Yb?nHmKveRHIkc4LGTY?+To2ImFh`j%C1fn5o1 zHu;cs(V1?Y#ub}35U=Bi<*7^SLpRnh@bvV*WF-LWNvc>8Nnv0{_|ks z7!Yi%2&8rtkmeh#>Kr#MV)Uc=%K9Oq?#4OG5VAm8xm{T9@Mmvv@EPjAC{fbda*qYH zC0Q3tcuomGrK2wx0?*O=&qhYmPme@}TEG{gEnY|01By>%P$lF?akI+DtMMxpOdHq! za_K=DpAd7}mqcK+lU1=#E=?!z#a>NQ+1+rf-2Nlu+B-%pOldQR{dbma^Z5GLulj__jo*

GHAJ%&q<1L!98J;ujuiJ5EU8ych@o_n=3??4@5kH@oWktgW~xX+v=-w! zvcCyrpM0VBTiVR()#V??yMECmb{Quod6s~?zW^j~hpSBnQ{&ke0zjt3lmb$bH+>3C z!%B*!oum0yu?oKhNc=12v(4G66ryJzX3B6c6`~umOcs0JFompOROjb8rG%v(wcLHY zlS8_}ap2>pmsF0Q8_UI~zn^>3s!-gXsn32k*9=4EZ32ZsfYKV{jH|VuJaNinE!rLw zZ@Yr^x6J(V@dz8c;V(D6_=Y59WsT31W1>z$5!6_N87#( zA`C44`PJebA$@?1o<;71nt`ToLU@hlhJnnsC3jnh9i`^;iZ}Q=gSaFMNMMp#eVAuw zhpkauqZN&q;)rXg;X^YY`89vi3RDv&Pveu6uUO@5skbI9p@cqa@>i5JD1&T#GUvI& zq7(=G6N83m{S$qu2gQSM2_*~e&)M}K8zl}6%Gh}S@%<*E-7k7xp=lb?=<6_jOho&@ zey3oo158g4`-0On6ew!y$*vW1!tn*(Q@*O%*~T6ddA6V;AjgrNqMmSDJvTKVr?c}R zWd-d51*}_{uNxmcw%0z)dV5VDVmnjF(u@9#9A5w0&A#?El+VCP$zw&DOXkAe;-f77 z2Du`GX~EBd8i)#z0P~JXN-8Gz)qh!%{la!Qk!gBuMgvok*KUui755|6qZqjeMg`*s z{`2*!ipPIF&nJTPcUwyH(c!tv`EVwx@bV9+qMw0$@o9T=e&HucuSRpZLeHg}fBm?1 zKuPc?pASXu6pxrcr!@2NZP6XnLHezo!c0YD#!MW`wY!w@%?u_EZ@%KUHqf}hr%6|b z(`ZWh#Ra2po0W*u5Iwyrxyvs1trK8V3aQ&@3i%FD;C*%v?+ZB7ugiF>iyRTK;PC;ncWj|Hk6w?Jx=Guf7opn zrL><9xzFRh;ls!rG(=vZZ+h?Sl!jJ%Li2c)#hO1k1@CqE1D72Xtp@y>%oS~-NJ82- zz7mBK2>t!fE+k0B9YM@+#+>W#gH4Qn4< z&Nf~qKU7t%c=GkOY>A{#zd3lJy-VL;Sy`w<=IYkIl~B?}?n~Wy_pOv49hEm{o4-$@ zm4`A=$R6|HGm)^nF~gnz@!Dw6Z_mG@wvL^-@hBl7!Km7`0VI})SYtqoi!C;Az(wt& zEI+=)H?&t#0xq}7YfdWPFsPS5A?q98q`jk*cH^R`4vzhIv61)h(vQ_kI%cGDdsK%F zDdqpk4`@{W*7A3VNt{)I63k5M0x|oG8f>py=4_c_hwXM#;Y{VF2@cUzr;{27Cl2Ht z1{4L*Yhp1w4l%kc&4*TW>OjLqn9jEFZcj(-$ zsqv-Gc54|L+C%260vQGBAlm0WE~dSc*pQQ$W-r;Sm|2%$0;Jf(lru1?St`CYo7BD@ zV>QGJT>F>>u3?n$SS;}ZJ1`@EZDY=~bYrr;EYq~8wcyVLd*3%hcLk3HZcRg)CO&$z z9MDa8MVQcKSrdmEK7th7(B82S>2_AcKlwthkK)}XU!4yUqWhjM!H!1O!;*^C&>Tn}p?{*X0{QW=7WnED;)O`Yh5cBkz z@7G87euL(^M9H&K!djDrXx#jd?6S1bu|yXK2iL$4rgHs+ALIC+*n~XAYT;{RIq?$J z?zd*z#yn-Pxr;W-G8{Sjd#*3-RLN9B(#3rFskZ%1w!H_)-8uz#PQCbfenYGPyHRf@ z`#|?Z)MC%lQ(@>w?M#rD5Eow#$y*zh6t)Tt*)mXw=-$st&(`uZSYyzd^4Ka{N(v`lB!ZhqCv8YBF8hwy`pT;8+kv%2)sa z5v3Oa5e239k_f1DLO`TRXm$hzMmnKO4}=~%L_|P(4WS1`N(c~IC<%f8j?SJv``zFB zukXuRlf@u}B+rxkx$f&a&*Pl8-v2^KN>cLc?4~*V!=lOwL+xuPTJ`#l9FUflf+(VS zG)PYrg0#iO#nGc{Bj2#&tFK4ncN@wi6>bd8H&I8S&i2Hfcjor}XKQpS;%2ky+_nqy zxJTjU2xOl(%-(Ud=|;UJ$+N=Xi??bJ%+hB`E*ug^*3n76D0sCcTofr2p(QH3IQRh~&oK5om z(AHMLkkkzVd`yKY8Hx=EMIw<#1EmhJe^}~wTWXlON@`TZ6uqW>NLfPhvFs^hf)f3QXML!V*^uh+@G#g5l#1*_IzH2mE2=bgRak!zm#V6= zc}q&VX<>Av?$Kn|&R|y_kx<=a&wSjU+2d?XH{!bWOvNCZE9-2r$9mHZJTk>wG^M-m zI%i7lj9g23i3R7$4T-`OyA?#;##9~Li3a!g3gG!Ut9W4F8RUM1zVD{`?;Aci1yKu? za9wkBvVfekz^3n9*&5O`L^HS^)&aGud z#qD)#usZe&38tj=>l9zz{A`GrsS=EiO(=Aoc>2yiWBxgI2RKNt;z9cvv??vKBO*Gf z+hTzzARC+&la-wzcXxC!)~6G}Gmh_iSK{3y4E1EBnilkgGG37(!W7P0a|V3L508tAiM=VziVeHA21 zT~Ge>2{$vEv46scAhUVGBEa|&Ivo2SBI<{A_`K=5CnmB;D_0N%33=-K>CN`yAn^>veki9|^AJq{NHc2ll(p!76$Q*Ra?`=r?(GO|52ew@4H3a7~%Kg^SClob*}Sk(S|G1 z-b5ivYYY2FTo#`FTw!cJW$FFGl17FBqA<(Av>kpV1$iNvNB%19oS0DO7-WVZsHxPX$6y+>&;cNv>SMUKKFt6;7rRK4}N zTSP)a)wPcDMnkh=NJ08%$0Oe?%Q{T1i+wH0f2N8{k@~nZNe0T`BrqRQ-nNOE@mnoc z3+GDj*Hs*4XW?~Umt+_~C6sr2lpHGD`VTXFks-I!hf6UBZ}9q7TY(WkBY8!*g|DdM%e!+<(bJ=WO<0dYQ~&;3TyeaE%j<9Z z=Z$vOs7BEa>}hd@Qcd}6N*sGVCr;Lq_>PJ=qA$9Wkl0%1!A9r3=em@0PqN~!YJQ34 zbFgSqc9%P8V1Y9$JZe89!^8}(1E6yG_#p=X4bm$newsAeT3d}>T-G9zl0Y|4l9s^g zT1U46{R%@bh7qOY079&8=LMtE;n0=Tt*LRMuqbX1OaTOU82dSKBSpk1V&gFazQ23d z(Ae16OSVUge`j4{uSjH<@t%tpHBb$vs((@nT0FFV)wv}pxwberf1!Ehe*b5e`tes8Ztc8gK{)cwM;=| zW{NDV<+>uiI>HNTPZxKIc%#MzB7~%}B3OLizIdQ(2~$s%jmzQ)rHpWe594-EOAhdE z8DV4IVtDZhxdCX3Y$3-t99(=S0PavS=$l0al?|AyXqV6SWpIgak2+RMJ8pV2Pu-$(&av_HYc5I{%7C7l?HkS zVd1#}HHWjPp30?St)N!`T#Sq+!g~ordUe8CP-uC|WKrQzbxQjnx_6gKr(zIdy*cyM z2=FgLzO=7K*n(ywY!Wp5Ch}@4Dfi<8l?n@Kf z`R#P~NwZFLVaZ_cEw&_DxqFyz9R2}#&SK#GWU5T|`2en!<4UT5%>b!a<_T+6@_wpC z!GeH;C8>4$0UQnnGIKT>EWDB#7j|$4w}$j=JHssLb>@sLSMwjf6SQFd7KEX>9UYwV!52zbFCtxmsW_K$R}(?SAAjKyBh@fRgMynu~W@^F)hmJgj@WXnE3r?hT?~r z=WNk3?dQ0R9-<}g!Timw=`I^AB|FUJ^>LJfK?uGh3O9pT+pjX4SSL|&5;NG&R@^)@ zdTEQ9TN1p*&)89=Iuw$07B@JNyp-(5;^r&XKm)5QI5%Jv*vq~AK<`TG8lzF=e~)qY zNpyjh5-_XA&*R5C&Pd=2Yp&y5N9AjEW9n0IR)Hf$I^^ba#>VYNSQlGYKJZhxF?pOT z!MU*O%xS$U%!tyCoBN$2I*~&Qj~ho2Ki2X>@B^vJ(fR!exM^w$&dWnmJ+Mee*`&Oe zD5~B1wYRQG)yQ>Gc?a8vL1cKe_$Wy9uOLa4SFqXrC3&E*9Cx+Yw9>>Yi24c_6-=!! zxP(B(>;gr{AEfNw_wv^6>@ePQ#+qzeruSB<5F2+#xLx}W3{vP-HgsO_@-~@0Z7VKV z^or?<)D-rsvmQWxCqC5WaK3^n#K{a^>1(#B3p@9F&!YN_5NzWnt=4~-E1W2nNm>sQ z1@mNXmJ}}(K{DJFG+lCJMKfw-@*hJs#U9iN+<&B~>_U?Jq{Op%Y2C8ng!WmNMVmS$ zBiuz(VRxN1ALa`&8pK-L(z4n#%QZC}F9@{3dT}%q@-biTWzykc&$oOcqZwocLo;&c zovPPdykQV}%y4YgwThsn%UpjHQe>7ER#vsC&v7kApPWNk^GS4--kY>yYLKULKv6(E zeRFZ)2B{c3K5w0+IWd!Bo8Vc4tLR-Qk_Y~C)1>*$Zn~O-hpZt!A6aOQF%IE-!O|Z% zr=T@;VVFvte@#s-e-!QpgYmebtgIv@XkN44z`JlF`1a^}XF)H9d) zB%~rGIpsW$qHx-EYZwQ~GS`Pqiu&07<)GkvS!I1!L5U2uep?fn#gcr4EvZ+*5JQ)7 zGDkrqWXk=xvs)3n`40K`1Ps(T!Z52>-AH5vd+Yf`r)GZYE!J~+Qr2RzGEdU`KD>-v zwQpK)c%x>ko)Ot1=+PXE(IA_n-#kd4aS0_B9mi;xZTjoytC6AfTBQ`A)P}djm3$V` z=85n7)HrX%m4l0q{N8rX;C$di%>E{hdASS?E96ywe~s!eB3K?#l~nfJ;;y{0fIuiD z;xz|^yRcV*B-iYAIeylq2ce8eZCH3_g?wVB@meuqootN8<`ZUn2Ojnf1XDIDnbDpC z>8UDWqVT?)4-}-iA-bV){?r_XOtIbMuhV^9|J2VwhBxwR;B!np#i@$0^bUy0H}g=Q zL(3Mf?&w0u9;BFTeZA#qbcnAEu_?Nfu7Rf4%b?oVAEItY3g9loZmxNv*G$dvxCeFY zBjX#e)jy-c5(aWS+=m~m(eEyK4I99GY4<+fI!_|=if3s!4tBFS8gKXqlgkur`Aju* z$xj++&xsHH)1V(L96faWF)`55?xzZTuEZ4HzS*^R3ywX?sJs%_(z=&mmud!-W$^N2 zG|Vqd3>Xf8@2E9l%cZKlvs}8lQkg6A@z78*zl;j@NN|nDzfX( zulv3&VMgoaaOhYg(;He`B$v+6$siqKD&|UK?+~O>{ikE1FQKC(ZGbFEdXFIT#qL8Dx zPIsMb<8dw3T`|;!D|X`)Hsd4c!b{>$F8hy!1llDn@KS`wUD3?b=wm$b?Hp#Yq}b8> zxmR7-Hs4FR)zo1zSJ6Yro}MKo+e|21(6cJ~G4v>(Nf8O|g1QIUv?TfEm9$NDodr*?`@^ z<6Z>D$D7MrUX?RoBHDuh=Qd!|yjtGl@X!mPnU3BG*HmxcKTb&`5TQNBiB*~h30h09SrvFVaQe2GlV+c-|p0Q`3sx??SJNs9sH5dDFI4LjC9kEC`Vg--^PGA3Q z%$FI~hSb`8;5l=(c*3K5$KefdTjH>Uo6|P4Be9He;Io1*zl}ro)o*|H_9MF>^cEiN?u4*6?r2C*>EL^+XcdmOb zathZ?Kvjs?KGJ=udK~TK)YEMCvVr>WCYXlpIIq&r{zM1d-cHX zQvNbnJC4pk&w|gnUq>ZaKGwkUF?+iSoDv+_)Nu>)_j-Bq7%O1rX1Am~{jxi_wmoZM z1DCml_|c1XXlk4G!ahI1O!~-rwW6G#S8=O{4x$TzuXzHjMpAk!fXPsQi2;~;FY)(1 z0K9>(Lqotha9jV)($}*&Sl98?J%g2Q960nQvVuHm$ zs3W_s*$*6&v+gUK5aShQy3iu9p3j^nPQC5E%1fu(v`q==2;u0vPQ99Wr`qmSEWwm{ zyq+j$x3GJ1fTFCuS_w8>_ucBUg0`yeqP-}6u^*n9R?=E+B_g?31q5;IuXUj--JHu# zpUNKDnsrBp^P10 z^1igRc6+(NdIPdjht_(%O*mwP9#i+Q0G5p58?Kg(^0^w= zh3ow%Or^3GMmrad_m(BJIJSR$;=dK^qp=WmShBFe0{uNDc(hK^CNZGO2SYvBFC}>j z7d|M4k9kV-S0iC~XR{B7>TiCVZi9u)5k$Bs*mk^=yJ32Ny8ksWBTofL49}x>v+sds zz`8jU+=B(f{lXT0w;HkBJ(eP&qo58*j^+%^jp9l;*74?^O7-05_JuV*zd0bE>Gw;; zm=;ceaot*U{0B$o{a|_~3cxjBk9d#4=lDNS{@!rhp`<6OApA&&>Dk39wm2X}X#2(J zsW`m1;~4O|r0s#kWMs*CqXE((-T`pS6e1-N@BOD!mWAL2UT& zr^`ISP*I&@zWy7p|IXY6c%rFON`*{)s;UZ^rBDI%ZD$tF}7?d zk#QQ~S^1{p%Rsx)qix92H)r{VA1c$Tr9mOlCTS!qV$sWco>ts8ceXb~W)RvP zUKhKeux4+F3D`J|e(GB%JZMBdaZM86%UH6Nu*qj7P126QqBvM#i?v$*4|^}b9a?^$ zc(z`&L{iY=n5*P7xq!rzySIg`-@4dmIq#4;8=sQ0o()jk-XrGz{Nrl z<~`8l_5^b~Fs8I8MOKwkK($b|2?MEseUFw4t+4y5mv=Nz{dRD@r!jB>w%lE7S;nuh zWiM)n-M2KoxqT{34WM}9hNW$0ZSrdKJ@*~((D@4_G@FA->FWX56*S`-K#CigSJh!$ z2Xc<=+8*mu7Lg`msPb??9)bYrz;uK1TG+%+QtQV`xD zZUsoBlUGxuw}R{x)n;H*V-^Q$8bR5@+qg;@(X}h9ZX--&+6l zl}z}qK5N%Jyr3zG3<-So?0)O}VxMF2T3G`_UC}%}2)ipI!f$Ua7s7c4B~S|Phv!qG z1-48J(Z=8tiV``vCt&~MYiz_sJ`rg(bbbq8@}Bqj^eKCX&iC3I#Kw|7Noko3EZUjf zEVPq__tz7S-CkAY^6+!5X&dQa=9EP*zWd4bRFKUI$%@ZH z*tgKTpGIFn_ta*`V0?#MhRz-^lD(n3M^`ST|FLEN{x{pX^7D!VL*AtyHToaIXry%? z_cBy59Tgl`rEm#F%?9$Q=}z2&r}FNOabij zD*LVv8tV$NE3lxDF>NzK_iMOb1d6uL@tW9m=O`Y^zJ5(2=Y!mNMNIX!;?CAW&XR2Y zU1L@8^&pp$>_bxh<4c+*`bMfc3wa|Eh|a)(7PnlW-((QZFm$vH!s31ZR!Nd z-1j?;XsbJr+B7-?yd|Y^K z*K$8yIZj(eMjm#N-vJ{0CG~?T1L9LpjTv+1$rA^%epX zB#9v4ftWipMn&1&@;E7|)mJrZ>CCKQ-%0IBMz}8wU6g$CbJehs@(|cyJJV0yB;laWg(YA zVzRu`#HHjHRj@arP5rj0jEQB0`U+|(4=+_Xtn62$cbo)z`98P_jYoN_xJ0~^vMRFI z_x>26g2t#hnUKn?vlpEzRX$B&PJhlDiahfDi?u?ZU$Kz}M&Zyt|3So5tSLofBn>-g zG_{)P!*P^mY^CG*Fg5+U*dcR8i2=RgoZHqkQ@<`!3V%309o;VS+wP)GZiNA>HEzU! z6Ccs{$}gy}w>|Wn@p!Yul??O3y>Yq!kmyjzR7+g0N_e@@0C4_5!=b-^Ar*0V9xlr(K>nL6@Q_`dMP zZ{2A^g<8vtHwjnA|GBhg_O4s*(88Y@8Bq=+$0dXL4N)?-d)z?kqC~<{jSPZKbXdu{ zYr`p=xEKt1{R0#GeS}W6_`8p&`%vGz?2+CISMbR*GQFQqVJ*z^9HAjHmlBiI*x&7AQc_5^m=N(@5Yp=5{%@^)`Slj*PKCQE6fwTvKT5DZ z?esu%XSXCr48FN~mf)6u1%!aV6I|5w1n6Rc(-9gzMv)pzeXg#3r7zkRV>7E)4;#Cp zJR$o$PW9IQS>k)gxHxyj(a~z0**?$Xo*Zu1t6y=xS0V2^>Rsl7V?ZIIRt+22J>nu= zwm4-2Y(V?(B`FA}3eHO9W|Yf48~!r{>RTanr&g(^1ZLV8p!oBAW84*SIyyQ6AC-h> z%dw@wa5A+D*gEdCb+%&c*@l~e(*Em;Ix)sI&`PTOa!zH;+WR77iM@VFHko zR;EWA{wcJF0KM59wJ^Xi)?)PzhliMviCxUq>Pds~^-d_R)c!-iCK$4{Q*b1gfk_qB zD3AisucHW+30#w=TwSVB+>!bA^IdaH3>Ujy(T#4`aTmOzH3XvHF5X2-f^+!Zr3Jm)$U?a-PF;lYcnD}lAPAoy z`m`;a#m&5iCYdnt1j~#UCVoF^Tu@jpQjf__POr}O@OjMu;u#yOY6Pn;$JrdLWjS~f zSt^uvL?#U16sUM7R>==VO&V6;eQR|#K$!!aw*R_GCdhuf%*G{qdk?W<3fT{}Ad>^N>l2U;XZo_9$$TpB284)XF(A#>*pLe^~;KkS>uShMk zcAW<7Qv*~e|8zyU@7aLD&z6OcOnjpSGhfNM^Kl7tVJ5dZ5ai8LFU9$_ zR{Wtn2+;2R>oqe$dU|?N`87FVfbwb#g~BR8Z&x3T$yaIgL7{L^^`i6;W&lu>--cqT z>8A13rc+3Gr=DjjA-5&+m0sfVOpAe72^E?keL49pY+^@uy>)CDuw?e?>jK+o3ctjp;TuA>K%tQ> zW!&nipreJUh51WV#ekeBlRPquTB%4`O8bqDrd8ffI2~oM=&Br6V_RFz7h~=Eauhin zhDSVNnaOA+Z94RSHh?3s#uUntCjTR^Yb||_{KKPpZ6aV+dz`jbBjr884UjQDb(S^z zQ}MHmjvu;QK%vr>TsL95_v}yHpdHYU2%P(}pSRF&Fhcg!y<;B2b{cC&hJDFhW28-~3iEflzsGZ| z3n_PItC4CoVD|)aMh>M?xbn^&yZ5O`LWDaxd%-e(-rwffw(7oY%;GD$rLn*Xp#G`< z=;k{;*jyR4%+FC=PO~U3l44wV|GK4eblsS8M~eF(W3F0!#KBC49)9|1w8*x(@GwZH zJU3t1^ool>bkwb%P75B%;Y?lI*YoF`v>{DknVd1v`s2+cpS%P%_rectp?gir3(ZE~ zHR9w_lR^SVL5tHo0Q;nGM}unM^rva7Ea@0iS?0&yad4iN-mzvq^_x^s;iy5FSZZIE zz@d`<0X+`Gw&5_Dc*hCtGd&cqC;xo+X+V87<CZ6j(StFD0>znXc9vc0%SUy5~C zV3;RAF8nXH5Sn$?yzEr6wNZ8ctzuLL7wKG-@-=AiZ3Bs2NnKA4HXU7$+jfFiPaMh1 z6|xi6))A3^x4!N{`=s^XUXk({imwzs=3?}Fe&dp|v5}#BA8Lybx>p9QP3srdofOdH zoY(mbfaIhaSuE54VK+EV#?irY^8e)^IPd@!!ZGSK~^xJLmd^_ zWEg*8J6pjF9Q681iv(_9Am=TGcRLS1ZbQf>>}*&9;UDQU>($b(lr&K37(hWjL@a$w zj&tr6F?()!Hf12ebypqZiSeR!AuH_lw+}GO*h0L%6xK+*qkJgT5W}~W<(B9adp*rx z9xi3T`mDq3Wf=aEB{w6lyDwj!uQRpG<}WU&qU&mEMt0kWr7gwDb1Q81)v{#JaC*Z# z+c(oZsg;R;h8?{VHK>Rptl!J^(yTM7D{fW7reAM?^!N1}8J5n%_RA{+kEhy4)DN?2 zk$8Y(?YIK-6FFwAGRyvOefj|Ci{TfrSEF0((X%Spd5!tS$Cgkz*fUd@Q{-w=j}?x? z4ejUu66y4vx^u`!KG6Mc>1jaCsXH_|} zyS$-$Y)41)w3v}!Ha;9!qdM7_>(-VM5KG&F#+H^YRx`ksoep@w^9@0J`9+k9`|cFV zMllM+AyjxhzUEvPzS|}xlcP%xpmvf23NqVIlf~H{L)+W6`o&m-9I~=+>6#8WSeRq} z1M=tbZNm+s67%NO1%$O>=IMmolA2VYKic+F!r@LFh26XM!}Jd>J@`HMYbB<#IqrxV z2LqFs6XTz$p8EHv^YY4m;G{OTc|wmMg8mXD0UY385+h+?4G#8U>uT#NZ>dn+B*{nQ zf6U1WSo^JRZ*KCV73w3oII}Wxt>Sk3ZjDZHbzH%YpOu93^;Y0A(c-trYFNppZ~JVfPR z(jGb2aQi)lnQ7#f4*95PfMYc9`(U7R-8tGmo;&c91O877=0(m$zn3AHs2bi{<>y$T zEdcom*dPDnC50icnFCd|ad+~a>Y4dMVi@0!nQ|>Xd*ey(p%hacodRjsiJTVRz}pEFoC)67~qxWUs3WW3)pF!ug zB;kv?vn5HfabmW*m}=2$`Dc#=oq$91P*1JhGqLdbg#5*785h%?QtV`3(?7kq_q%@Vc6`S)8XU~(@rj?b z>Pw!-J2H9Mqc17wQeaR1aOB0-9x0rUj_RI04|c3cT80+2O>HkIp+_=ng=~lRx&GsX zV!AI7uQhDQ;yYcucR*egM$z^$(4xGnb8bK zqF3E*?@IoU^Aeb*M@$4}9%bEJd-_{#>RL&*8wEltj-{LtEyW+-E71Q}QzJ-*h)cq9O|(yExDylFyD!rD{Nz zskJO>`)kHbWM#%(NH44=BGC`O;#8Wdiy79-L(;qX z@)MQfb+XG(wrThO8A)|&!y{eue9opf^!v7RuFKW+aD8xj$m7{O=1m~m9z@%O*4{XD}UIcj&vyK*Kx zvBW=RgA#lZryQ}g?jfjjem_$xzptX@gRaR3`ya%i0?ZZD+pfGHc{qIA@Uv1}1Wv>- z{t!IyfoxQKR^miFXfSQS+h%r6QkNH2+c6ba(kzz9XR!Mx zH+WHHN#Xa9ryhj7jADF9p3ybU$$T%%IIDurhYcL#MB_Js1Y$lU!-DM^P~S04vaB?};=RiKkuO_% zZfRs2&``ss3#9VOGILvrL~v1Mv?uVey4y?1>3@)!%W~3C`nDn>#-L_1Cc#d*zO8^- zbUZaQkb=%Hj*4q)->Y+HXE&<7p4;XrwJ~K@j?$n^vpLVo2ag8{#RIJtNj+657u=g0$E}=>Jp_$F!M?wsh(#ZGCUOa>RV0>qvl^>;x{}ezl6?R=9U*EG zr`XBz(bky&y&`y1aC*&mnsqszX#-tftIyxw#_|5(fc5QqDmTtB*zDFj z%lp&o00!Yz{H8MEo>1v!)%pkc(VsXK**^q^WEk599A_cU+&)>PuwuJO;`iSimj6tv zrfTAez7_PlgvxKDT#ECL6^1H8gHoT@6P1VZqL4DVCs% zUMUZ*<%n5PI4nFGY*`M!2x)pGPkd##&_4a+$H+dg0na10B&CA3pf+V`TpeIjK*rBo zmmTV!L&sbrG+*+!U$2#^=y$5BplMy2T06lkFQRE$hqcHRd;%a_pIPeYqjC^s-*+HK z=kdExcg^(?U8Npu;Cj7foqLP;Y>K7@--ZA*trAYl;^jA}rdo_omdP;Lug_3We0Muro8347)BYvTzF zQ=Q|&kXz?-MG``QTkam4KO^!pTnfP5V=>S~)JTms~KA zvG9Npu@dE^F>hQ|Fygq6sZyKL41t>)Wp%Q8Ux_B=q4Cc$`Wuse+Hc^!-(OR)Ry$_8 zJ8Q09P4)1@N6+K~2VL3ImHlW-%xCt41(u~6d;#mvtGz{fmLH~r2&cElF9IW98iOxy zp_F#}LfT&aHZo$c`}jq*R?-^OUmX$~3qt!WK*|w~x<@16jV$%E9;&L3;C9~dO5Z;; z&MN7;>s+H4t>FY(Hz0)FkArj5L|QYNr#LnCn1 z1Mm%Q{*hyxXX-TTuAw0fEU*Km_F{mmFC?g73e}L#Ei1F3c{&f5+`)KC3168zUx?2G zzSKMv%Ah?)to-HkW?&ItEZ?d1^FmVi_yktb7Gj9 zc)67{G2@_j;e0HZDSjfm>tH(W2UVreEc^x+6KD_Vvlw!C6?&#WIfh#5QQ0&_!3=fv zdA;jKysuCc>pVu=oY@e+_jGNDwt`Zmkv+ucP0Na+jsGZ3i!^>1YtNW#dX3j z=?&YtTvx)5V%_!z3^Dm!`SOxhuWy47CuyOytKW*R)~axK)^?oIm;%g!8BnZmq!Y<$ zQ3*ua;}{KYFuk3Z!P1p2_HgHIBy&9M;@`YwI$LBb($+?k-MAf1l*5S zLlDN3f@j@BsCD1i%-}kAZjtE=gIgiM<5?EqO%RiWZK#K|Ol`myI@a62c8s}R`<$pq zp#|kVvW@BHIemZsm;HTLYFuyAvo@l{#FXA7qhL5(#804R;O_&5m*%H<8^#2;zB(_uJwp#ByMm$gJd`Sz=~_(w{;pPyk@TUM+w9w}ps7kRevT zXh?nGsDfL*@mOlDDduP9lZm|O<8!6{zmf(Sp-&?l6Fo+$t_1?eK>ibVsM zn)sX5zY*(3a!OE!J+ImNw?*jxyd(GCj9>2A{%_JXnDNW!be-C1FSnem-y2up2ya=Z zkM5~sFjA|u1J#t6QIHdzQ4_d|g445O(qrH{xaeDf^}jxsnXlRk&~xTzTm#f$cyup= zV*v~+U=35>TSJqDe%I>X+nqBp=Kj%DlDp;Itk{y{VgCA&@ja$}PqpH>b?x z7QA@L&l5kKE8TztPIMd5#BHR z*FI%rXODgxtFE@d8YhF%?@QhJ?3>lP(|O3o(^d4+g-Jq35ND2yNO$|-ytJ&ky1!3Z z`4yp3-U`HaKfUd07^=QRpFF-9+)w&+a3>%$s1a3O-s-?-i8bwamFu&d)r^UOo>LHt z;4~ozzeX&y#U@$8gC+^j!313j5KI1lBt=R{MncqB$Nv)a#ps3H&;#&w6G-(P3A@_# zWJSarYYMha>5~_c-_&95VI#1jI|J<7$V9Qv?0CgMk6vMSlKb<+$CR|d1bgtFVB#Z- zfYJ%yJyHG>ZgsSY(UDr)bl}AbY36auU{`Ri?2=!*U*k0T{wx_P_R&q$qWmr}5tkQO zzXeCsk20rkL$EBg^p|v>p>6E*|G7~AbyvX%_1ZG(kbI$_%fAJ*H+?nay3E$dta)#2 zy%i(0cczneJ1}r3Z4q}80O#&9z;gh@bPOaZ*1I)U0HhG5=VG9~qhl@)`&iM#Bt@yB zT;45EFWGLkw|Dj{13P)A)p9ToyCj27N2BU0tw_?ToPlMoc%$Qf8{63nkI(<(2|a@{ z&?z2zJpL#`-E~8c4brI9IT^T~t?jg>=9x{h%qMm_!>FDdmdUT^yHS$8cCG&)89txc z(Q$=xc_G)6gdggEQEuD{s052+T4&+F69m|5qdvf-G5f^`%fqmAu>Ev!SVjEp9|Eak zz}g`r(G30p5JN^82NZnC$eqj$um}Y|_pkptYYzjakA_r+D*UYsQt0up#^{wBawa{M|DH?zTnRm>uQ7)|}oVPyb{I*!w) z=0CEE;iIi8O%4r`|9)(jzn~{QLWlP6W8=DJ_LBJ+PGm3O9wXIre1q;vWTb=ufhugL zGFV6V@89$O>uJwLJ#lvjSLuf#O~|sRtru`OTyBQVJG%f$RVgXlJi#^fNr`*X#NWTS z^cT9cN9c9mMFu(y6kgJB2)XRPA~uI&+u6l>ly!JPON3Koy!*WBuLns@pSsM;_%hW{ z+>oA?9kf6=8Jt1yclaj_leQW@6-2CQRpG+$tK*F}aqq3e^h`dHf6-{b?_Ojq6X5ok zM)Z}94lip^kSM$E@b%kb5WeGM@M5?v|h%u^enCDZpHfgZKndJl7M zXnS*y>wZWrY-%qm@<9Cs%lvr|${0b)DN$<*OsNNm6LmNLStYgG&7Rt5A+627_YKo0 zmasITBAoJF^>#lw*C2b|D|Z@N+ek!Qh?emYu!;;TG5(N_P@aw+(Og zx}49n&xSQYNoDinY&zWsB9oJ^g5ny)xMXLV?>=*8Q_4-*PgII7M3s#`q1g+a)pVL^ zWHqXYp0$N}o=Z$mufG9-#zdKiL+f^k0k)j###X~Qn@hAWL`)#4Z$fYQri%O8(H<={5J434>s$B{CJJ?eU!#qvugXx_;dHt z3H`4SV75+-Q3YA{;a!Tew`lcBAlw{}YexclQ$_uE@0Kr@j-(%_t19>%vT{|B`*za4*ro}2kJ!ik4$Uc;$ z4^oycpE588@XtOu+rxF$zRqdvgeWxsc69akbczo2IW8tW6XdP!sFM;WEZ!Yqvj@3f zA3yp8Ho^mL)YrV(?)O}I?}@@p5XNNEg4CjITJb;J5oQK`ZQq6ceP$5j;^hfhE?vfd z40E$wSv1dkXOGs8rXbcE-NvPa|19Y4zMrizyIv;h!y@k=+Q*Bj_UnQ~o6|ySYUpoW z=>wxVQR_Sa&3G3i7(efpynlTO%b z&aWI)xeCvE*-KEt88r&2$re$O?@7a$9_<=@J)c!uo!Z}b-FQ}VK3OzjeRYN_+g6uz zy?em?kU8S1&uhyu1N0irc7Clz9pcxKoo2}=r%EuqR?nh0H>bOM5nE< z>jWr>| zY$o?bCBKo|{;qqUYd{=#|{wkJ{W( z)kSlJ2pVt=i01FSPql-PG&+=c`o1Za<}ZE=)6j)2?#}EeRx};Z-urmv(1w1R zC)_w)$;>#dc`G@VVE4>ZaU3MLZ&V9QnT;GW!We4( zBCs><^X@j0YgEFJxLbZ~7oOu-Z?Oh>xq%lE)K(PlFZ)=gqCJrcqEE?8$J!D+vGy@l zVF)-HK5px4$1K$h*?e+qUv{ST2!yXcpJF_6AG5jH{yJWT!#sS7x+(xKNe-G>cYtA) zl}lhbz4!5nY`77Jx>yJFep0oXIqjhx!Lf_#1b6sWawvXZsBnj#H>s*lyUHIb1+l7{ zs!JKR`N&4jn1rB~t&lvU!++2Hdta6$0d%T>HI;fvIla z4#f(cIXYs@L;&5LYJQp=D2>CkMRbutCO~SZ#}4Vra^h~6L@6h^6({lzLyXDEZUt@cj!*){St*n1ZVtk({Z*Yr^E*i>8s9icCiV8Qn;GU$#Rw1Mkq09Ceinq zmrsFY$`GB{Z+GJc^Jo}Fc4@u2e^(S^{iD`0ye`=ps@@TS7t>5j^vG`M+Te~%DnPZb zQR}#5(PpZ;#0r18g)jAObI08f+;JAXP?Frk=3gLI#v5zF$|x(JT@|R2Rkgl)CO=mu ztp7T}!7?krz4lF#+|6&bF}Expr_-ARFCKcxv)%M<6o1@TvC1d0+3NZjvG^FRX7%b9 zkiQj?P*BXtrjJec%_aP}KuIykXvkj2F_G*Oid_D!DEsx-Ad?iIjiUuaRV-`A_OW`n z?55%&Ep;d}i%cBZwI~)9Dg`te)5o?FEtwzuRhs+%D$Ngt`*3@+ydTC6b8sYV=X-tu zJi=|{48^ANeJIMz5WU`!+HY-N6B-OsXP`&q`E)-auApa4i4iPtq!p@@wf$6Hu+Z=9 zvWn~-SHsbi#Jlmrm|v$CxSuolIZOOFe(TD!c90MCUHga>u6cp~xMsOEVg?CWeL`5M-i>GZ2$&s_HD+>yltu zZkW--(>e9|Dr+gw0+cq!YjpSN0n1QX>t`X&UihqS=%Ul(|I4y+5NOIAE2cNP4a8#x z{wFW1Pz_Z%QNpiKCu4EdM%+RnXg4-&HEtH7U>m;lp2AZebD-t+HXI@zU(|60l{ zT&n&?<8-vp??=CKt_Zt6h}or;h8r1{kj{ABvGa}UmH5~nAbCgA(mdVl|6}Sb!!(BZnY4M#BbU@ZaZm{2zReV-NOh z_py7sulIFc=k+=*KQhDKl#Rv&IvHyhlRdnJ9smLVD|xIA+jIDtUlr(8jMfG&A1rzs zbP8bh+@CdO93Jk%-v^9OOe|p%1RUmmmcsR`Y%zH7jX;JN}lYSPEjF|*Hqe2{(k}!zv%VLl^7rP*B z;bo(oKbXijOjf|x!q1E4KBno;e2WYZLFz0sfq;O*3@c0&0;ZFCySs0{;k^kJ?{ogtr@FHWh1-Ust|$$e`q4+Dxh zEUEg#Wd(kB4H$?qV#a7BETRgdnzH&p37|ouvo)FVE zhB>b{d)|ENJ{=G-=rzwD_#1QOt5HA0a(uZ_+|bbPf3yn=r#o3G7s0UGFrdW!itlNc zn~G$1|9LxSyR~fdf)g<)!k-@;y8Wi<_vpSpI9b}61Wfq@M(p?OFp0%gzw4U74PdCR zSK)SEMn>76Qd02urJ$Lqm~R_lH~xQ3L3XgEr{&TA+LnrCf>1EbdO|a}>HNHA$+z30 zW^{UWzFTNp3JpoCz6y@DXyEEb&*b`FwPaQY%`s8wVTXovljeUzckAox8?@Vn76jeD zPtQ1RM<f16&SG1!Qbxmn4=Y-Y(w;2-ob*9%Cft|4N7>z=e_k z#|un$(y;3C05yceYyAoey)ZN%L9_Z6r>f>Uu#{<=P!J}^cORoxH=$m?d2#>rR zMQ~!UndOE4tyf(nu$ng0w!+V=AEkH2)n$*L2T143K2S7mX>V` z>h96_knJm^Ec2qfL%wEqYA0z^g80rqkvD(GN{>c&W$St^TuQx7@ZVO+V@L0M!fqCR z>!vJK>z9@n-b63em|8-8nv5BReJi@1*48n!Z^KnMS?kHDDS8L?+5$q0KHBs7w%&o+ zK4-U+%_L=!+}M!mMQp5MmQ1M@mzE%CA0H&d?LP)6pGhO6$E2wnl8d>d!`HBm0K?T= z5{4xhaIbK#N@tP^dT=HRJ8qcQSom4WwYpmE0}XO0Z0l9;4aUZ}Ep01Ip}|YuzNIbs zz6O{i=FgZ!UjKI(>NN&aIse)MNq}+h>A{YhmSBIlmx7!<{XM0+<4`MgNtkHoxs16_ z-HzQeye5sa%)K0>EOnR}6^~UnGu(N6Zud;EZhPc!bke-?o@tr>()I1H%!0Op`8U@W z-?k#guUlQ0j*@D;9;{Ko#tRRtTyw=}hodZx=H(U9EZF)@i?oJ+v41Tv7Sh?=9f$rtxd@ENSwKU;~CZ@CL=E^tgPP|>I9K!_$}}) zvV}BvZwMl^eFS#7$b)421@6t{I`Mh5_V!A`;yvK#*8ePvC*2RNv9Ymlrs-jS2US;A zr`w=6buzcUDsiX`n_O|5ONyn7ie7X_ui4+V?w&oDW67ls=wOZ0D1Csb{qGi2yR1nT zNjMG(b6{3Wot;UV=@JGP4?hoF%lAvj$3XA{@?XP-Fe2#z+y`23GVgN~fXVc6fI9qa z0eE+{)Gf~kCq4kKXRrh(cviRZW8}jW3Fy~a+B+TMz{YKzSI6FM>&IWc+v*H`d|rH7 z`6TemH}sRi+PZQ0bMg&zezrh1@%yZZNAD4L*E{JSZF+8cc=iecjvoq8-3)Eu8?+Wj zfL5l}M_KaCJsXv5VK;Zka`M#SMIgCpW$fkAvT9W)aAd|us&dPrrV~!DLDD z9|E?NQdfYcfLz!}|J2^YrPnSeY z$%`z@7MHyvzxmx;NsnlW8Br~t^zGjrV8eIzV!Jp^4hW6>woUM(i(IzwXa~t`aksGh ze9^E%qY|a}`p0(xcfAR@OXUkJ9*=54dOB5BFZJ7kYi^rKM+lhq}>-n;Td!IbgZJZ0;=SJljeK!h1-2w+i+xF~E4iQ^iHAG$CqVlkiPPoBIqkOJV?VBD+V>98 zV~sX>J4wEU`X)2%m5tFjCmL30vE{3N0u5n(tufd=^h;&11RtV85w9H~&wmwwUG(ZM zac?Q4gFmPkqP;~OSSS!C*kW`z3Bx3kW$37b56O7gB z*~{SYAw4#|)oI1S4_A@be4`6k8=MI{K|aeg^-lP`r+B`?AYj)yqRYw%PdI?5oV6&q~Io7)0dw+;;X%bNiTlY7>c2wDFK@%2d3Trmy6z= zL(4+9Om6fO4VKZh^iZOx+fwFJ)5k17%RBMFez1Jq{O+96R={Qmn?Vmm&}_j%O&ke4HCRoM0jyk{ZCtm|nxWOHLRV7H8k33*`ZxwV#Jnmu_6_n7*1 zYI!i4-QNORvM&;9Naa-P07BfePY-C9fg&v|9!?WhAs36=+dPx4PKMS;?b`N^pPa+@ z2Fg#ic3=FhWqEjQTZS1r_|_0vG!Bu(2;hdFP#n=n-|qujlAh`g^s+_GPPSY@AmYoL z7{VXrD6kQ-xltG_d%UxI`-APm+@gstVYsW!HYu3?BuCmdYUH%5b-v6i03JT{TKBD? zp!c1svxXn*yjyoyF5vn=L~A<>Qb$;rke=>*L=taazp>Qq;39_auzlPRGSU>Bbr(d> zGBrle8JqcI-I;O9@owsgI(;ECY~~fNMeMTx$b-wEATbAxycw#9Vb&wIWH=T`=xJxc zJg9znv6p5by|pv8VVN3u_ajfrP2ov;%qM(@$w09zmd^TqMGg;w3Dz*VitLRD7=UTT zz4CBaXVd>EM8Mau!b+aX7j6L5Ok^RQ)}Pj8EgV{U#il%=GWpk7b!TDx^HU0@T6WzkxzCx$v%hPaM7752HQHo;XtbSv{NCI9yHJ^{Bb!9WNtUBA zd52&$NXJRnWL0XLa-cUx;qRwrg?Qc@-aQ&h#Vch3i^xuv2$8+m^BtbC81qcEhv4w% zRc1ewXkJwl;SvCsY7O}~KV;0Ak%l^jS>3TN{MIhb_`$35f#yY*9GwLVHHaW;3Ii*| z&TU(}RIU63%*p$xk%9bhHdo@u5OfktrRF6c$$C^H_AqYT){!Ufr=T2;6EpZEV$vN^ zaET?zTS3eu{0Yxg>f;lt_@yFVgi)~qC#I^lFlvZDkD&{=+UM3aa?AhmO8?XO9uMo) zq3Dm5mcy#e=NxX2<2V)ZlV)KYUGW6A{oR##XmS#!w}J1%mDW42QZae)?=93aOCH-N z*E#3kHhQgQyq3RR!y?MWsdwr6OICX~Vb@pVUE@E8*_+}^DjiLH-&QOA%W|~#<<*pC z!r>eAV^H|{YMAMz0SU6V`wUgeced}hG$ecVDFST2#9>)Wb5xGa3>@nhd3 z8ZZ!X-1{N?HsLkIu-cc&uY4j{1dPYGizAoNCku0r%H^936r_&;Q0; z-_#zTGz;JqL97pNi+iPa0h(dYUK%Kc|Bf_fYCO$mPGAsA!(aEJxWMGcDAIiER?cQ{ zX;-gpKQaJEU)XoEYFV>g?-km1dgt~alVnhP0T29(RXX#yfkU6y*Mis$6p9AC^orUa zHlo+My)u7NzJI#C!XOXd8vbC_?)t-UQ^uti&R@BCBa z4B%c3S`Yyg7noj{5JV`04ePI*rQUsbTO0@GMVgCHtof?_lhAGfPu)=vW`+GraKG z>kbEJZHB1;wTu_P5Fdk_mmA+FGD}x^Z?8BwF9~r=@hU_*UBV`6ome|IZ|1^>i($3h z`Z;eIrjVa&jr?@+oSNZ(66+rbWa&iSdhM58FyD%t6!WpU%RXi88dKMz);0AD=Lvt+ zQKVC<;v}~5`m5Rgwy!^_P(gIiuNpDP`) z_SS`o<=ML#mRB`9q3BX6@;*F;Tn33dccwk^Gd9onb()zxhVuMOZt27f{@DyDhJzL$ zzEZFe_CpMKIAIO)^6qV5SHg_xM@`<5AW{Z@V4`7U9+TOfDrBN8jn7cbK44HrTSND4 zZ*A%Y_l*YiC`=qmltS4PpCY|qD6T&m?ii>tG&JH9o&zhqRn#mf`ug0V<&Ewt0dr!67M+)iCU=6 zLQnAJJ_Vd1-YsUhw)@tG>Q^K%fZwF+XHRrbi$2`rcttPf#Cv!LTvHeq}wU3~z2nf=M3+}fUV6{;-f z)gEOhMgLh-exOy>g-h)$T-v>!aa$7ud)QFbFZQ-*9C<9nH;b}~ju>;#@% zDn|}6^D~PNP21Obi&n?1_#C|fD{Q`#RUsZ5iOO~9FrExS>cexqi~)9sM|0H64VIk` z-)NYJsvKXNcl~4$D$^FY6SS4g4pey9m`gAz%Le;dqRmL<4jsJ*K7&Kcw?>=1Y2mZ`n3dn45`KE}Ysn{*I(? zrT*J}4!O#hUhVMi)KmU8)p;;FG_-9RnlGs__v)9e=bpZVhT5IZ$|F4X?n~7JOxTUlc`Ju&?$WQs%sJtp88AM zDG78hAcXW!tX}#z-#UVu)Wre?#d^+T zVP5g^v9VuzGliOgP4Y3WK^`UZb*j=EL3`NFfpw(_{f!%EN78=0juofDTRE zYDlofiy;fUrXdaw%RDv#xx6r7fX1}g#K`dNLwe9hq%Cb!iki8g=wE2$UT%-Emhq^P!hn&5EtkH0cdet88|-B(8YJCy+gAN` z$7|f9gk8=1PV>qZF9JD{t`nhbhI__PQWbb}G5Nc$m(`{BB8S(wLoa;c*M}cVkGvX6 zhQE9nQ@S74El5FZ02G^+>h+Xo3-hP@v80b^^&Q3*R5nj!HKo|+%QTm+`@*K(biYon zqZ?42roC~P*7E8Yc+Y1VC?S8%0;$`=r(RqpWMq87K06vlLU$I7g46ij$vz~%oF99q z$~vFv75>P2_({4~1730e0~+C$jU49gSY?CrO%C)chmH3tmtEXLf=U-%IK?bJDSGJkS<7#MV_*AEXU9-3O@jK2S&)4ebgHc9Y zu%(*x-iHf;^L2#jM89J6Ycu-46WPSPF*IXOM+a8o1Z4MW(TyJqWt4k!nSe2)Vam^W zgT3sa%B#|Jz041Y!P)|D2eZWcdLO7W$(FVaFV5K-Ce*Aq5Iw!u2!IAp&&rd>GVK6K zZdIeNnSQAvdd|^5>vjs0(CSSp6hExZogVI`>0j&syZxCT#_g!E*~&`!M-ox*Rt1Qx zrgK^r%($0%eU8`oIigG}uM~O%z*pe9-t3Cy%qrdAoF04VpF}w^y5u_2w{_7w;bhRI z6NMLR75&PML(7kDKd?5ZqfXGdu{h)YA@$aON^lK)aGX^W+vk>l*`Fsb^(&E_H)bW( zrT*DZfkM`Y&#%l;m&_G$#7B*Q!)$;9*ZZMyD?doR zxpB_=0g#Zoq!xIH2d>lTxJCbt_wpJ;e*(l46!~tMjwRA(LQ?ZlRu(d0ef^IU36!>MhzpnSDvFqX(CLD0A z+0m{>$Ezm@UC!{x)ffAn{1W@mW6(n?0}C|hL7i&Nt@n~n_?Z^S>M>!p?H!#X9TxS_ zg-NUEA+JIJ3%29|cW>_69l^%=1(`E=1N+v^T4na9MmbLOEos;JipNS61(f}nns{E3LS5ep z{$VDn57Bqsb+$A)W2#p{G}hdY(ObWb&ag_rWUhw>`P ztn$Oo;rZW+He+Miy%qGwmxm)>)KJHE!bs^_XKu#BOYyQw@ ztCU>}+UZlcLNxSGx&yJ)ipcqGc#GrxSQbk%#CURApUFeSPaR1EKib~x=V<1Web`Dn zzsGkOpU|0n?ZD+hFvMns=>$EBf`(KFWQ4(Nlu2#maUnTVl~#nmb$%x@&`6HQ)6E+d z`iTz9EY=vB?okl9k#Dg%b1B|T9!qb_-ssVAon`7hDs#cL!9*=bKB*#LZ}qHRQMtW2 z--*-dnj9fM;wW(t{%Bb@_UUdUSD@|=**G5YvkgdqiV(bZ_;JX7@k_o23M z{$z|XHn~leOn5_dp@FI}KRzeMm#+)MCg>fqruromCdxlKFsRhKvV-=?6ieI?{)PB!$z9)jLjB+ zy>aveVQmIyo6L9m#S|#Di!J7|Qszw%5Kb^pxfH8(m z#7|7SZPH_dJAjxs{^?SvyNB_t29^mG-Ft$dQi@9;2+!t&)s%G_XmLSZ|P{@KgapcD;u<<6a~U8S2#JLr((BU!Rwzv_l6>w%+lUU^kQA_l%0$i ziiz|a$ooDa?pOZ#h(mF|d`NWyr=({ zzSVHT>tO^<+T#Juoj>pl8yAC6v$FTq!7r~{fw?ZKN-BqtNC3TFZ(1S|eIko|1SAu)~b{Kk9BS%XCX-WCrY)zsA z3m7piA zAKnmS1j_$=#T((PGNil-!Cg(~$#iI?6&7gFa!1axt-nv&XC+Tuw_7M`I9}TgGI`YL z`nM#9FT9TntC*5I&1y7}iGJuZv3Aq0Upm11{XN#rWflPrPK-OjKHCyDRuSXL091eM zNtqp{_hS{`;d-!g+T15gRF%hxVXg3QIi%mLH0jO=E*O>#w}K5oV=9V zD9cG2K<18xili}QjNd=Zhu4g9e=kNNQO2e}-b_$lZixMl&>w$LPhF?-0S% zL3T+VySLs6pQ9OlqE5q#_)Avtt{4u_oUGKhZev(%K>5XV$;e{9{rBl?~cvMftf%WB0ezSBiNj&$rYa>c>;XHiAuz`AdhH+0#WB!os194 zn||m2-rv%XVs+oFL<>9a|LP#uF74})7cahd_wM}GqL-aQ|B1L3A_ub7t`qgBO=!hn-1 z#%5L@4b1pcQ&1pJ>6an(*^H<|!n#)H^`B^+NVbmXb^H?ih(=xoCh;+C$$QzK*kRv< zBix**AHQ&*J|H-~@c$K?eis*U)eLvHTG0k1XDVlLsFD$>cBblo=Y0o~x=Jldk$Z{de@#NCz-!k zK|jtidl(_t`t^1j&55H+Odt;AZfP& zF%4JQkOWO{IB%8N&nFDzJNKxw@}0xqwLYyq@rlb;QdWFyTj(_-C<1k=$OP`tub{7OkJN3ok?KGl>& zz@qnDbu-iz2O90jVAb+Z`cn3e`kv|da7EF{VleJCHk}1(Q=KNXK4i504r+T}6BiV! z&nqrk6W^rC^OaY(lL+!xY{OqU(x1`%;}QeT`hozvb0SJHu%C;jamY1qV zBGEsv#1qjU@mddlcXZQHqa!qpzl85yRvNV<=OL=pK$^D5q>o`%`YW(IJVzo> zQgXsHYm6JY6uZ_0A1kdbbL_)3`rP!0hUhv*18CalfjeNsnbZ*^U)Ay4XM}fG0omOD!>5NppDJ5~ z{q-o=Y~x@uxWIH&XqtU@SC4?WVP1+x(1Z&pL2$vcLS}gaWeNP~+zZ!WLP5r~xJ19g zUpdi|cQzJBiH4ZQ%qIA&6vdwO=_SR$b$bO#Wn(0?ox?g_GU&<|eh-LhaV9pXKNV|1 zAO-rrcw5J7#<>Qa%|#yaU}24>a(?*jGDREKP7HW#{=|~Ux(`oSNv8uiw}_1;h=YjP zK5z)xPbA?$>d8RGs2X#GcI)l82DMMx-)c~)ahyJ#E*w_CvFU0pPUM+{iD07_^`9{#pHFPO9)v9k&UZ@9n7FIAL8N!BaPU#TLaA;s*5@{dh15xJo!bCvt%Ww#(gP z)xt%W#1HN;J{32-UaO@ey5rl)7WG-g_usRHnc|(1ec3RJ@R&DSvQ74UZN>V0np`lv z=j&`+n@E#eJPWDA=UD~os0!J$MKRN=x^3+y6(+E5k^`5{MApq%bzxaqSG2_KJ$8VX z2D7jJ7Cj9aZJ&>+k67*_iqWM5} z!ZPW>xCeFY5R~A53%1l8mu0zcySBX(J|YVKllMWWPt>`ODV||5Qri*^;C*T}!ZrTF zhK#ZsEVcW?-f@u`LJZ&#MBPWbVA^Tu{3eY&8@1g;`tKJg#0fY=s{do7h3|6t6RKA~ z2gs=RDA;_77($=%(sD9}dz{WbMl+L9|09s2klTB)`gAxB7B<@IR5{AoFgqAfQc46% z{1$eP3|Vnt;v?a5EZL8`m8mu+u{c0#as?gMZLf(~t=vlsN{J}Q z>ngYYFiHA638TUaz}W+mhSq&stWulj;J>u5Ly9@%P3>*j9)9|`u^S*039nvn4lGNJ8sU=jI3&t+)(ferS z^M)wq$l$t&RPVXXpZMXgSotp97de=&h?0KlU(+SwEpNXowVd9a-1B#4&wRtf0e7HBVLvvJ%X0^U$&V%ypv(R0!rNsdH5SWquVX2=)|Y~{_1PRPpImH ztvodwKVrMTLuMj)z!0N@nc5m;W z_;J*dt$Sl_uGykNu`9}C!l7&-SJpBgK528($%Cu)E3f3CmBzn&Y-p5NHy0_KdNoh( zUul@7;G=Tsn%@imX&9r(8c(qFZ5X^`%KhBsOE|sNx5We}xjtU!A&e&Llwss%6o}Xl z?cgzN4GZ8x=mP-1oR~B}c9X3*wmaZo+O9~)5#$=@5*qzn?*-K5Nu^YWX&rNjX_qKr zZj{pIkK|FkNa(Vlcw+uA(a|GIN&RR0qcOU)lXe2ULk+Co zgDxgw?$xmd6#VPWUxn@Q|Zp%g~VjKZTVWH@}F`Dn)I4?x6%oBHH2t#X(;ss zCY{h;VIK#_RV5_x880PB;;y%-weKZ-VvFYfinD?*nkbamla!p-fxwaMYX{uvmGCqP zzght~&6fG$jqsktXa)Y1uB{#S>aUEqv+hiimlg+*kxB>6vwaT9qkx_oy+A6=Z~ zSc4fx$uWbHrgw>t2ErZ3th(f?!^~bZRX-z$B0shWC%3^@=w{m*F5iN*kZmvt)b-re z+7u7V{^Z|pb8Kc+qqvS`8Tm5wDs=QCM})kUV(u=`&XkdIrpd2_?}&yjbRSAU68}JQ zx^Djvvro?8%Zk@OI#@?$Vx?@Q*qJ-3QXJwPwMi;OjrB6ZTXwpDQ&SejLggT2@~4ix za@INM`mDWGVhC%Brrr?w+v@0V`v8XAP+7#0mFgT!K|xo0!CZ&Nx=-^^Ur4g9W}he7 z=99_A7flhO?`<~w(kz2-9Wp&Apz0Vx9txZ+4zGkJOqWGls0D$&ul@E!#YlhlsKPm} zm#;?63_aLhZaETO=S8CAay7HJN3~2A3|SdLG4)|Lcphq?HpSQRX3T8jqSV?M0&xpI zhZowTyw{d-6bah0F*2w6^@B$ExhhKn6HI?tFEY*Q;@C+uCnoTQ*UUSHj_I|@6}C7sb8hSDgQ_|Ji<|3pxA_>cF%F& zl*zLgwK_H|iS+PU^3vtU=xnaCc0A`{Jh7BfgW}yzC_792lobRKQ1_?hudGH;NZnI; zBEHsZ$ulMKGx+`LdN|h0?~G@3i)_{39bdzSvo0<2pucX@ANM3Ns?Yjgh!^;NIpG_T zmJO2~>TmwMLK;L(J9QhQ)Mn}!X?Lhk|G&Fc=ezt_S(Suf&5dA$4wcDg#1LSN!1r`BczJu|ai6Nnexys6<8sMsx8Ik2QBPUq zPE}bDW{hT2W(5Oz*ph!K`J6P)mg_;tJ;;+gD$y;W@|XHgS<0&uS%%nXEy>&bSf5xB z_mw%skKbG~rl2T#IG->xi_y_IZXR{nj7({DzD7b6`gO9wU^k>N3cff!d=Gk@E4kb*eKtvm3AJlW;s3GI1_fl> z^qgpO)mCr#bo*cJ?9gnaCduA@EV%8ZX{l?p4-?0~_;U0J6eaM!+q{j!IG};>^oTfD zHLEb$E!7QIb}%lJT$epcJ}=#E1{q{FAp3vP!hdC)1N@Ylx>(haLXyzv-xaPkSsvFA z8|$PthtQuMUOOCTDJeGUFcNC(a5n7zO|>YP!bn(T@BWoE!G`IFkh}Z__ft&6DFeO18Ho|XBTH``*H*8<{U>)(g*SF_0Up5)K^e7JdF^)b%1_cJs_OYFhL+W-? z08+*BOPMCY1`oMZ1nsGh%{N1ebArrA;$MD?KJK^(CK;I5l?aA!v339J)qF5K3FZBT zli9u5j2{PY_G9IFXZ6J@r%1S7>xh#07uwM|EdCkfmE)I9j@033Rv%ZvF1PJ6|1Zps z1E__&dIl|&ho4Y+xqasiI1mclx1Q_JVX$oi==>49rV_`C>yuHHLpl2G?ob(5)Z2VL zyV_YmG$xM>a0aC)*D1`X$Zd-3W8H+JUv{E;Z~aS*{H9&~F3<42;?3mAJ3p@DFG+-X z*YX{&Kx9{V2#K)kYOq>L36EFqY1}E|JqUE`C4MB~6P3kVht@sTf0ZL@U(4>GL=sab zHD)tB=)tI4#0m=E-X4o->~8^3my);2MSJJJ31_C`5p}vI3=jz~y`|+~J+0~VK6$3} z%miR*#Ma40wD{nE`O@z`8~mlZzu{kjStduJbNj&)zpZgq&(QC=r&I@%=Tt~EZx!>~z`pK`Z>8>(NOP;vVGaC@v*N+%`IsCj^3Q4Mcq~!4Rwp97 zpV$n_G~GSF3SQnYOW*Ee@dWvY1OBEdPk!ZFecNd)Hw+CWn$gGo`$WIopw*6Bc@5bQ z>GAAtv)YQG6FfBW#5vYTdRLn^2BOfNNJy@*rR679M*>z9`91W`bnh*c&5J))F4BECNuHPHW9}0C zO~7j~XXjTlRK@1qPt@^EZWnsw#Q|&uYdJ7D@9j%tK*ot&5&KQ*=ffxDzZo$$JlJD$ z3lGodcRM}34ofd0!0|0aYk6^doU3PWK!-fDKQfrAURg!yW<(C$iai-VbpK`~(Bph*?;HjTF^$2SUQEd74rlnw6e2cOYB@@0MR$6lw?gN)Wk9}LA zU>FoHp`y5Lv{su7WLYLZ>D-f_X4NQE{gPDpv|WGqqZ05w5{Z9CPd7G}Jo9J5Dwjp* z&Ydbo3o-THrQ_<=4MsF>p4rb`nZX_rjp*(4IhmDsaUxGJ87QN^E*TI>s}Zzrgf}1t zZ9m>EeeCon4vaHX^Y|}uN#Um%O}5|VJ|#FHjqEL0Q$KJ0jFdRP|4{wpWs{Za)sQbz zbiLzpAsD^Az~V{y*@m@iD*#gQOuXEB2Bh--INMJo`(Pmhe*qVsh`PANVCG8;>bm?Z zVxNs8$J=+xMvrAa73NFOi6(6S1Bzbd$(pWU>;NQNLP}mhT#F9`sX%bw?XR^{pkIMp zy_(~xyb{Q5#!>KsmHEA>qN9jMh}k}iu8CW>jdGr}t;&bMV`2P* zRr58}?zRDy_%EcLs)wLNp>{feJd4Wpgz#vTxZAKbd0fipwqeNkW&LIkO?y9Y@?$jr zA0&nT6&CELje6r{#%%j1vzvf=!lOeT7p;fyH$`B43V)yD1w|3-)O=8!vK78?2z&Dh z*fzP^q)EkQDyK{+Y}L72Q9O#4K`v^9!38+6y#L$lbFn~F^3pXHcY>hI>06K**Vz2V zwj#`F?Z`pHFU?-n;52c2l_v_ohHj{<0S^CVKi}E zXp~9yZR@{?o)?D+hCZtI z0pV<8j(+LQ4y*EIpsZI(_b@iPt25NfR~O)crhmL$1$mcBRt{ZP`MbXZuev7G$Q{ep zu-G0kWBhKfyDNaQt5D(x(?F1ZxrTI2K%(7~sBQP!-fJ6UrQFLwvD$~=?T0ur8P(al z!lZj08RS0GwAp`j+v05IrYG_j2pQkc1^&Cxj+pbgNe~R-16xunEI^E}XGAIwrfw+iX{kEb-uAxv9?eIWN0`KsW_aZH+;5 zIy5-9aQEHjQHWMtb)fw-Qu%sV!Ej@>jhPbbcfo(nScH#qr}^)BUzZY*S`gqc_d6BF zb3x3MxI^QF2nZ-Z?spP(_%=Kt7+?JRx&ri@rd`{@1hsMIXsxK>Z`7vxk5p+>jmf>( zKb{=}Eu;B`txjFR%t@0lm!9VsqF00s2V>xwtNm(O>4rzesR{Kn9dN;8D~0nbSeJ+l zHl4>9IW>W$E5^&a=9?#gxp$ithK&Xda*QuJ@TuQPkS0WW-5#$2o7QQMCq0#yUOI{p z?M~_*`<{xIm`?c9 z?KRyCtJ5b`w`GZn_t5$9NFVC-rmc%dbQys=DQ+wSWT!Z9{X(+_gbZAIrax9L)IsH% z2;J_)7^J%*y^mhdg`Md@o;~BOHKQa{5HPdsk4?9CVqFeU>6D1e#BKB#br$wK{q+_^ zKo#Ni7Vq|iPWIb=vKr^abgt~(zOmnVCBU9qLg|CFKe911`pGuS#p+a15St)qDq~la zT;h{Y=KSd7qE!mQc(p2cQU}$oB;KJz1ga}Bc zm|I$B}UTxIm4Z8+;H#cBb5Hu zR-Zqa8a%ZOUuAcDzV#Ra!F*Qy+1Xk8&cE+U%ln{9W`bP87@@@q6SXoBQoBhKO9&6A z8>u{$kG1;YtN9Fo%QON)$6y9ppa~x@EUeBD%3cz&R>_#?BDXAww}?r@8QvDB2v2*S zrG{gJ+D0Q?cP74maSJjNH8LY+fIK{1j}Q%*;k{Xy{v6`&JYUtlP%|%A)^P(rFS^g8 zYRBKr=}F-15em1=>GXmBa;sNm4$#{0JMp5*gklWcZ))jYK^V4FHd^Mx5%m5>FK5=1o-@H!5 zf8`E?uFxyn+=zD`~lX!Fr zVho)D`TuiB;z4BLu%t><5&dIjvM@LO==H9uVz+}F#_@L0``!T_wIg>3e@?oVO`!=&|MGU;V~6Yps;y$;A#J=|8x{dE)tsZ z#3{_usogg5?$j*vH4z)PX5JjrMKfl(lD-3Vr!&maWDEKM;tpmn43;sWamW#KIe4&o z#jV-?Urz)6i+swu5914-6AjEpUsoVoK4veJ=1OplAkfXlYnfL`@qD}{9G)*3_6&bK zXMOLQp&ZvY<&5)rd|>D-8}pTh48O;rw~{jgx@SM=un_R_cOKCnEHuPD@uE2XiN8#N zF^AC5iCioC!QkL8ck>)WS3P#ETW5kjeYrPz zok=fP_X!xAK{@&=pTK~?1>9PSdBP}&U#CT?-D1r~>9N{U#1Eg*FFI6o_c_-mt1gL} zhDh?Eu2GG$u2#lHa?vuQ?2neI#NXy`&*?9aNs&h1kNE!BKgvQFg(4eWk%=!Cs)K{u zTRYoLa^1cMPYVTxLP1#A7VnkDfqbhJ*_;(>jg)5G(&AOjokJdeq8Vo`^5~JAhOnuu z|9*R8gu*eL&T(vV@6A7i?%N7Neqr$6vn24W>f?|SfNxI5B$siD)-DOUT~ z)lV((Ejggm?F=vUwmq6D3ZfsDey?hsJai+Vvs1xVG8RXBG0zjP_;t=NBfUqwMqb|X zdaqz>zu3l6?b}jieK&fqjK{m-J0Oe00(n@A5}^Fjm}rZn?;9uu0p92O;mO-v9kzT) zX2^n*iHDJ@?Oac|3lk0VCQNP8io!UaKvV&4)EBGK{D;pb@0k~L)@Ch%az9fGL z-~J-aZrnY`T@P-4xedS;R&kT!Qy|*qWhNj!-y`z4pb%TSLE#0J1$V2gaD_La)A7F2&v^S8OA2d3kd2TT&dAv@tT%eeK)$ z(^``96;ENUJs(ltKP{#a_5~!?q}#=!i#Oi_P0gqs$%IknED{E3)t1bVG%Q?p&-kAY z$(U`sng!uO84h8KCQY-9b|Mn}Qu5GK;!rHX2sxR6f`7Y#Ju)D@#iAY;eu_LeS*HPJ ztTvoau!+sqbk+Ab{K}st(O);RxZi6E;YxhDU@to%i9sh~*+;x%T&E$%j1o?lqX@o43kTaI?AC+mZQ7lznjE(=2^Bn_D_3m8h& z%nF4g0+PGCRH3aC;k;8*UKMJ>(*ath$U!UnwKkWwTUTUdC_MJ;N9%}9%_U~K^Yj?M zGs1~5zEUY78Ysh51KP%&t?ccFg{tCVmlF$e+pjbOofQ-X zm2%BIXz6HHTaSyDFY7rmriCAkn5@|B6uRWrp*%ry$J56M>7h7C{jPi$j_gxY;@pUb z2-$J8|7@LvGcDjpyPBXB&o=eRAeSF2L0lsL){2Bs*`WV3Ne|v#9%d=cKP8xZ6_vdf z(yF%g{CGoL28``iGV*>+Z!14hj=^l6aP<7P5nVF}4!tqinMgMdAOmx>VfXVR_BV9J zil=Yw%VO;+u@4iruL2Oq<)WExZQ7-AfkLnNI!oT#5>n5d>ECjB{6D(hIx4F0ZU0po z5do2s2I&%z7Le|c?k=TUVnC1v>69)(TDrTt8-{Kex`vrK+wc3nzc}lhwfJLLvzS?H zv!7?*&wXFl=j!?7EOMo~Oog!gLB@b?u+xvQ97#F)ZPDv*@#g8q3my&p?6x*21-Y}+}bWJFi79f4!%sfmTkGN z08vx(k6jF*OqFOST+`)BqRe5!g(dni5VNV`XaxiIT!G>q`8tNvS85zT>02$_v1s`FgiR`0uC)j zt?~=Q5hEQ0bSO#!Z{Y0&{JfN%{_b!{C3?F8kQGWBC0(<@ll{d z*PG;Gq!@QGY+~OJ!jY@E!)7r06TE`U6hE@?rb7yUBHu-rgh}O>k?nhknB;Cci3cu| zR_i>cVI*Y1bFOmQyJ6|q;aH(GSEVA`SIo3utr8nqM&=wn&rK83SI*{ZkJsX4Q`xsg zyuu}{wi*0ux$as0O|qpAU5=v^lNZt1_2a1Yz4G(B!EevVK9A;6-*t-P@=iki`mL7H z;8;jCt7vzlf!tVg7AMGA_@Y73o5viL)h$bCdOH0I^`8Py*x@p=OWv;w#LN;mvW7W}>vmJ$*&a$GrXC=n-oT#fYW{u(;xSdn7 z#P%H)5-tii8Mcg-ELA@4Ene5qSL?erS|ZRAJR&3c6bF}vs+cyi&RQh^n~uy~1pzD> zpCcSJ_GP1wHpQzxci`E|Pl?Y5qA8Plx5LtDr^i^^-H){h@U+TQb)o1cSh}s!qcK%Z zZBPn3M%RiDyt5DM7B}4-(g(ON#rX&`SsM3EmC+UlzFGSSoUMDF);Yj`fZTuqTIsCq zKq+>o=f|CjIQ_-r6Jn3&V%#D*o*iL0i!ZCba1Y!*o-}c+7~jlP!=7(Hr$Ie2$`Sp6 z5EJI=fNqUo{t{z9cO}n6xJ|6#*v!kQRAnT(8NhIPaZz4y3o{xAN&7M&37cf>Fsex! z&vwPphSrsD`Mgi|0S|o)0bl4}to+D5-5Qu8G&2V+Se5;Zsr;SyX&F2H17LWhEzCS+ z-R(!taTrNrDLM^k%JNjSa(E6^NpoU;Hi2xnn&)>gf}s^4ILf#}398WeqqZ#V(e$9Z z!QbN!Kx6XEu}E>PVIS5IM8<0XD_8EU>6oLUT?lG?Glu^M=h0cKddN(I7&(oIRtNdf z9F;GNR!uZ*{9b0yuv(r=lo{en;3y*A-^}&b^5&tz9t>PHonsPFF=!n^*bZ=@yjcI{ zmAh#ap1#pxw*6fI>oop$$N)S0<-xX5eTOm8Q(mRl8)o8sB+*9>8hM;7@{ZpUE#u77 z8RtuliQXKz(pp`_;VvSw<$CVGZjLy4yfn5Pr0dCqmA`BAxk;e@AtMNWr@4wgu$n6N zl41HmJ!=CW8g1N9|JICY(PlVZ0Cj<8hUz_}7#NWp&kem5#oGI}#R;x6OfS_7ZVGi& z9;*TH_Um;4<$}gt$u&(?CF$AU?4NHs?A{+8tvJ5}IedqA?cqzt-A8u9Xny5`96aqk zP6drKP<$5JK{9-BfK-USIL};B79+Sq$cRypAt(sGiH{6D-V^0I^SZzYG74TJmNzI- zG|+9jiWY708vX%*82G-d98EWc8u#PPKmJzufDd0z8H|YezZfD-83u zxp{-VUq!J|XE4mit)mfEv^fY5yvd%vM>dHGE7AaL!>7{`y9yu12ZgZ5>fR}#G$qoS?Oji$)FUy_oO9N^>J$bmP6Y||6Bp)f1<`u-%2zeYI28_DT# zM)yp6N+GyJ8VS`S^43%$$Y+7@tZm;>Q08#;PrAcnnQ{DPQ_VP%z8eL_aL0La#<`zjE;d_+z$02h|k+8E}G<(RRK_g$;W(il86HNBd4vTB)oU-O4jN}P;)wKDe-k`+W zSvc@~@i}oifwWx7`?`!?ipf^mUY@wdyN$LzLa*}S1K~lj( z?LW~i5x-}U+jBb$!USKAhjoHd-0lpiM%V8Az#t;lvq#HIrln@5ly^-~K5^RRInTb| z&hb_I#mV}9+P^uHBW@cHn8zh!?hfC0kvi5XBj6jx{s|8n8GHoJ3yvwN_&2Wq%c=B8f!v zB3NPS1!wh6%?TxTJYMc24N9iYfq-^PBc5PYv;9CqV_An3J5ZvM}QL!j-tydL%mPa_v zjY_M(fzE`k41+eyO?Xx-~b}{Nd|ZeaI>8X z@MS0y2$WF8&Y-%nm5{T5XLwy%3TW5~F9=u;>xOJn(y3zmD)doai%flbP5^N(qakT& z+!qi<6hywdIp&$|gZD-HCvbGYKmLTsoR=%4Q2a_dUV*gFc2LI`*i)^)OO*$v_vXWC z-*Z&HyU7E7#1a{K8dWpu92Z{#ZEIRh9j=D~i2G+A+_My)GVP*vw0Vs8v=~yc8WncB z4}eY!C~Ga5C%4nS&fSLR*!-|Kr&1l&8HM4Rv^|y_?+%Yt&M5kasSHv`^LD=2-R7CW z!VBC1{(^EXHC@B97YyCMfgz({y^ z)D9=x6WCuV{kpEZ#q`ZhX45;QdE3n$Gzb{#&X(7x$0U>W_dQ$gPPDr8@FxVMp71#cb&!(Y|9BW~zThBzI5Da_W7sz5vG5=ThhBHMQFAOU4Sv#z zwW1Eoe6%n#H$679j!EaSFEo33!dah5euMq?!{-1hbDX9A!Rc;(9(cmkufAZZ`%g}dCS0)H#1z#{IcNFwVICQWhn)}1R$ED`O)^z zK%Hv=A@2`wY1yeNKXZce^9&1Iy2!QV8{6vxz4jQT=V8t|Hv7U+U&!>8VDXwv@9{Mp z0;FUg- zwR^h%;_#jvDpUXdD6M&?1s8==+52V`XA8IgCzCqEh!16J(iu%XIuiC1=N6I$f#qhq zZ&QR{N5yw`c!TYX(Q=b>Kwl4_(svW#>;7Zvo%mMQX|b$J{3~!)nY-&oOZZC&?cjNx2$PEvwvo}0Pk(H?BQzLHuZ~~zj782uM8H2v#B=n z#`s)r@x(ruBa{9-7bavgq5}ATa}Uqkk#F%<&esYmXJ4Uqn{h3<`n%O*USP+PpIjl|anJhP6oEtGDSCPVx<2R+tu#5}A}^zTQadNNn<`^>zSs(0tm zJ7c7FCaX+o2_{|0x$d^^-aDop_v>=esXK=4R81I!`W&8}Yy-kkw`d8(7@Nt+IV~Ov z6=TlX;(4dIsO1Pi{7MYItuXZd!|ybWV_XU$mQCm%D`>t-OHJQAe(uU2B+65=kSO44 zJ^WiKU4vhmoK{V&;0N&^_tJW`m7jkxW~ww#SKOy~r3ux2iTV#eRT*Nm9kpEioS0D- zSdT4}ikcdYYp`1?v&Zs@Src_Ksk};4mP=7K%v`hEJlWA3V-BNq+s1AXpv;M@M?MO% z-J&&LiD~f|Y%{SXYT=E6nV-EreUt!s+V^W$v;6fcX<*^rbFk~qU)!ZWhY>p-2*rJl zV?LmR>f7!gmpQ?qquIazR7!N5to~_9KGfKsK)JQtdSmgXhj8O6PM8=M2?ucRzepff zSZHp~5wXuGwO*)yZ2;ONt==rR8fzBhvvBD2Okj&{m!kLDi$453`askBa`0WF+ab*Y zyRZ9*)yp6`-{?oPtU$&Ds1mbnjPa)%~h>Tm~h!tarkH6iN=ozm3#A zv3s!u56j<>*)IA_Z@>1jJk94U*kL>x8!P(4NcP*S`Ik0`UfaPFoynw!SOsYt;Z5WB zcLir@d|dE}R8CE368dfgr6ftsU#Fi}1mDLff&IaQx1qy9&y+%~tdZmP(}Q{b;ju;H z?bBPh5;z|&`rww)dc5Z)Laoi5NV#S2`<7SYy<<1J>AHg9b`KZCL}ZNHB)uucu*FlB zd%^0W0XG^1??bUS{&>uz4bE}p+RF=DId@HSG&2t~j0WUv5vlYoUKm&QzNzvAEMGcg z+#+-ws#;!=5*&8i`!zlH6U9%E^ z^7Z}X4yYoD{Fy(yB&qF?$acZlU&+GWqBl@p*$!8XXmq*t)jq<^eM~V1C98F0`w8~n zBn1C+gFo!GaNGBJhy+Mcs)eggnLy)TDeN3PjTwfE&;=o;#7%GT;g-r*R&4I3iBc$PAMj=AzKxH|(N|a> z7pILH&;prJ&TuzwPPt`?rtow@4|7FapuIOBoae*rnLbB> z4>+Q}456=z*m?0g=^6EFyxDXCQYZU{cwNby<)@BF_rD>T>*PXiQ)Nz1xU#g*-~hSF zZJ+MBDGIiQk;&#Sa~E8)(KKj+5esoOvJ=?ix0z|9Z3@Ma43#8}({Kl7H#H+wRS#Y* zU{IHsgdJCk&YTFczQNUbm)g@!{M?4is>*4)Ae<#;9r?Ol$7FMvjJBr^hg`5g93^`2 zcrHI1ElWV!ZMl|`$L28H#s%L(kXBu`{H^=-V22-|Hx#GO>$t%0x5>npB>iAg^~UA& z3ZMzhG+&rJY>M2A`Xzv4z3MxYt)U@fs2*x$h(Bq~8C`AI?3NoV6^e?BlojT;0eI{b628z12L}7I zy5V0tuKaGJ80hbR$F+lNLw`OF;Gp=t zNPY-O5-Q^Hlv?{`t*dS*w}Q=Ws*^_dOv54`>v#b`jB>dvWrgH zUMOaN6l>>zG0+dkEJ&mMqMPPAD1?)E5>EDnDqoPvpu23Z5)R{DQUjJ&HBw0@zl56bYpj!p z;MY|aXzoAwWBSm8;w^Hw!V0D+{foCbL20Pm(SDXGyskk(3_$2; zF6tCv;8bV(Xg62)x(W5WNY`1cym2~*ml*IC^t|A~VhnpxjB&jWj^#)nPdKsuCSFPN z&4wNYG98;XZg`gE>K`kDa zyJ)9`U@Ahh?^13ke=RaMgjdnCEjMk3)S^>{L08&@KXmycS@g9AIG^73mjXmbI+JV(pfB5Ixc(DRfD=MDe|@L3cW+TylE!tfi-6wH_2%PZIQ@ zc7|T*S83)zN+&z$V$S;O0!a_9iK+EB_9x=rJXYNCU&89ZzNJO9A9WJ95ajGAo==aog>4KD>{O(q_Hnax{g%_MI(8x6d5qy3ewc?(XPik0!Y@mA_@N zdutZ1#fD^8d==c!Pujc;;&=p;nd>9W??T;oc%tP4_ zp5>`exidk(6`*unWyrq~Y_cc24v$umu@Wn!mAZR+WI6|)6pj9Ee|)@3RN}E{EwZP1v9lZ%;B$L6$Az`tm_lmeyHw-T z@P=JYXMjH|vJRskzgKV65Y(=Z&W6v6N)wNLJ%uOFV>>TC5GqiCJz13dSpxioEfny5 z=IRTH@nY@ZN0X(?zGZ*n0q>imB3JCA46f|+!vT_cZYngaH}I(m+_iYQ2K|MzqZQoJ z#l|oBzFQSiFfOZV&O(Ew7kU#0*&<<;5iv4~&qcrAzqr~4j$irqs9m^ej(AzP=~Pzg zbzhY-Ii{@c*H_ValN4Jq^q|cmvtx4{7cGwM58X>Uj@QB$K#t#D{8+S-d_X&0dIKYi z-QK9Kx`rLQtg#=#uMUIeQ~N1MzPY0Cc#T6Xm$r}Pt&CupEQ>r;zuXi9xcbmS3%+6 zWbgOu+n!t$v0W*b$~e4HT=R!y%S%&T7V{p#b(H)D0&CG{}lN5MFmga7F_WGAS z))0W#`IBG9&Mb{et&tmOUG?ai=r-O$g>ggfwUY>Qx+ z?@`oC@G*?#@8A8J@a*4V6Bp)T_?s$RjyTtKZur5a5_W$H-*A|x3 zWw2+=A~s6dwa*jFGEY5JSev3Yaug*(x_?J{)409y@`9v65YW_b2X@iy|HdYL>c|u( z*xt_Ej&1Q6vrGY4Aw%T;t=d1xDShwj*;Ba#y1r1WUf#A;9&HUTeWxlxAlzfuO79e= zyDS9!u2U0Bj79~-hPdv2_PJR2+jL29cXvu=mdUWz@BWsvcF5JJ%Tp`WCcwnv$UzbY zl}kGdG%qMAGgQR=SdqZT+{c;VxPfXXTq>DaLgR&9xGoBM6WJL*_vKlfUXNA#Y28+9 z;!0Ynk*&rkXtL1r_-MtQGkSuLL0!qOH6!I+)T&3JCV$A5%wb#_@(Zl&<&HM*gtX7x zcRe)ve)Tc;4^ANu>=tSJK94XXc%d-Ea#TW<=phawVk63@sfWN1@dXK(N$9ZAG*f6S zu|_I(eds>Y7`%|&Pl=z49G+$HkLzg}A16dmbW#wo6S09P?HO1)Rv6_565eH#nh(!& z(D}@trDTH*xBU0h#B!EyCfI7nAbNEw!TwWpSaOA0&X*T;hmrB)A*vue!ey-H>`tjD zS5z7a!lp&TQE|JCEgh?KR-n5&`?3o~s7>l;nRsjr`MrfT77r~>q&CJxmAwV8f};UV zvFv1AHu+gIStjv|unY{U_=kXeU9MJ-DKoWJNjdDJ*R12G@6q0{>GdY*XUw~L%Y+o7 z-~9Ll{v5vs%=K(Y8!Y%I4X=WeIOaslm@FJ9-w$u&33mb2!PxAgv2_u*W7SkK7#;9FZ z=HR#1R4XXDqJPd zL&LSDN@mC}V4xDkj~WUa5ipDE>x$}Mi64hBZ#{AJzSd=8pal`L$jy42_&=tV2U07n zn^x!SB0&$wC_Nxcf8fT&kv}XnXd*wPCl8WI@+O~r$E{QCvfV>rqp7izSs+#=$b)pd-KA>UeM3d=l@-m8zoMDQS^DBhw^(=bA zSN%K7MF+g-xS?wP@~;FDKb5}a1`fW)LnmnwN#|cje4VGNkgRWf z-KX%UlYN$xh4x_kZ^c^G(%=ZaH$5>T)v>>Ef`(#zkfD2_P-%LgX<_wMo)E7t*SN)- zN;#ap=P8EXNe=VA+har$XFdXAIsxyQbV~|IjU$Q=cxx7skF7_o#Vf4$!Leg_ab#E9 z*A+kQO?sGbpMQ2NzW9YDW|D!H>rm|c+D;>7d`*M@p-Tpm;qmgkskfnmpq!N8O9IJ~ zQU5Rie>wsWy(fKPkdECIbrB{u!-BKEEp*gTVwBRJLG;Oqh(wGPZPV`HBHsTA1|C;o zX7qnpS!=RlHG|6`3>zHEuM)&t7|h0d-;z`=o&SpK(5;i#^c+*KaNzqbbW581!*J=; zw>aZcJ^e$)K0^EUZ`6NIe~GF!Cqcsyrftdr#_N+Qm1H*kcVIt69U6vH;81&)1ChrV z8UMHQGQwWgZR7P2%m_<>bT@Kut<&Feg>JvZ1>YrGbR(QvnoCO}cV`W%(oZp5>=?~X zIL$0U3DE5vlQ&6#?!ZoJM{N|lJ4{kOZaqU9$lYqWHN`%%@H@@-275_qbPD5tmwfj6 z>jpVS)Zfyy>I{)>8PM6uCtWM|+$BB!mgCdje z0&7LPP3Fx`YJW+V-``)STT^7cVr5O#&;9rstUx-3@kl4COA>RvJg#ivVLySkyK{Eo zioK0>r)bqUI?PJE2MJpj_)6I&VRg%0wAoTsniekPMR^!+$)Uor&xaqxJ~AP>L|rv?-LuCWi8dk)M~ej+VZk~hBCEVTA3#;PX_hqGFzX~ z7T8h_u8Qo_``$i%Pj=Tj!W3iN66%GNYF9d0CJ0kf_;=Zw=LV`KaE3L#gFAVvijv95 z&F7Wfo8>NQofIYOQv5OR3^iXrwLHN>?Uk_bqaBy~yt*({7B8xr z%15E>k3GaewqI&9C35T4SOD z>}qc(FTK#uJ2{ayW8og{*}iGi#;>Hc^{!VyAmIB;pM)acq6PkJ)r!$v?wYn)92$%M ztg`RerZbN7Q!h9=5FJ2&BP7n+WI#glTVt6{oOb~hJ~7wY8zuuu&-nzr zzW)epk#$(jQ_hN~AcsglE}%ipyJ5*d%SoC4_tHH`95+F##$WV80#~M{e`LVvRiM##`px7 z>$#W-qv)?!OCTu&n3~#O%P)VHa|090v}XO2CM(TFiS`DJ<%-%IMBB5~haTRswtnxO zXJ?-i)SAs!(=>#RL*O2l8qL{qkWhA6?ag8^v;FX)`!H`*Qdnvgt0BYv0IXxiXgd_L zmAbU}>(8BGv$V6E2bhkb+}LV2v>7XMm}8f8;H!X^gP~T0CL?Q+a=|<)WC$?Lz&+Fo z?FE=lhqEq~y>J>m)LGlBG`<)cG1s%_FYMF=K%mBOQ&xebonac*?B0g(nd-8m;SwhU zYJs%Gj5BL4xUE7FKfu!*+=%1ydt&_8k5rFtZ9E!2E(B!xm&3;sBduPpyS?G> zK*G2vczyCjNocesau!nHS}zD9V~YbnyPSN>vm*M~_z-rY z>(kjbkn-3hpW>oc;=wsY8OO~3u~tA#%fChHHO|9XsbWIBTDun$CdZqKj-GH@28*G_ z8y7nWC#+4jkoeDXd4?pRTw9SkL%d~Q?@yObP*H|ra`6_PT8%LslMdG~;uYnw|0;7! z3T14^$!h6_^%UYZIOC$Tb&}cK@iypZLA@AWOpLt{=%& z!hFn%uZxg>AS|3o+zroXuvpu%ul7uWU<@8qqZX>I@Utnzs8>+%jxJIQ5kV5v<%V8w zw&$yb@-yk?hat#CrV~AN{K6fzrH4N-vOl~iNWMEuLbo_xHFR-nsKi!X-~E*wUlnNj zC`7jYiz?j;690qL^){>q>9eU?o*o{PT74{nK*mL6spFyQMBiU^#WSE7Nl)66gL&8g zM~raktKq$DkFP7jyN~Kq5u9q(U-5<@%UJ_cu<+`II)+M+HEIP30m&bl^?aG>`kJbP z_e>O!vlfFR5UttAS~f9{%H>KBeVJ-%qB|(s0~h1e2X=kx;%l6D%5w6aNxWoJC@nvi^|1dEV~cE?S^g}Gt-=)Zu3ZPVw!QJYXFSW!!~QS`k;JXu zXjVIWWGweW^38b%jf)+^S=+J2Vj=?eC}hu9uk38?igXfR141{XTxu9!K(~p+7&Y5^ zaRiAyQ3RVVWP7mPkmsg>K{8V>;Jh2lp5Js*$%H%%Ryym0Tk5h+tmQR0nCH9`7bd=$ z*lPJT4K1K&v@xFySKa2^e{hd?W0ad$UZRICkj;W=GnFcY8mrek2Exr5ELJ#t4BAdE zJK|eD6;bRB|1rDbsxU4TA;q-yYP~2NdC*6xgRtYaHhHTs{NmRPMR^1 z(MZh$vdknWS9FY9Es@ZF^Mn`{`Oi){SO|2s&EN8UW%h5pVb^W7$ZdUmbalBt_>7IT z?z&*i(|A|uFUqRlV6O=zX$8rID1?Td{ z?)hdx5dlrKU4 z8sW0x;cSe+4c#8vak{cm;kC*@^)lvF0?lqdo&UES#+^j6#YC!bOu6`pwbys*wQ8oA zSnZHr@P^euRXK})ldKY-`ADKHF>#{>5=Mjf znw+$7Y5QaGg2YO+{FYd@$?s2H@0kpK=gj{^cNZriU)x`IJxbhhy$NPyX8u%#skh!@ z=+$CNV-e}Rz(hQ&jC=I%L@;?8t+rJ`=1T|<4vGb)kUB(&VTjVG6Nd8P^@1-Wa^+e? zON@Zv^KTyH~KsDj(iTkUAPF&E+2*AaGd@M(Z>O=4OOg-1!zg?P6o-{Vg&@ z?Vcs1{OC5=HngPRPbyXx(uqq-6t$_=eIhzDb*f;$R$Hhf{?i{+n%O@w$NzSfHcqCVPyF}J5w$Ar@CHs0#-w&)`ajQhH@7IVj~hY` zoD7V5WvTDpS+IwUu)L2wTW*v1I$uVsl>Q+F(4W{6b^qjW z$OPfA57j;BFfQ}X;=27x?p!z%9YELDLNb$P$c91a9Sm)EyR$xIOWrDI5Fy&<3vx~K z0>3}OpI+}h8g4m*oj1EOJVX!I)H{9D)`=7uA)GEjs-U>AFbqsj(eUuYg|`2>O{6_A zJ+Bn^Km#$s$-Bbz^wO^rUc0j5H~-K3Au?To!8?O-L1gKUi9aK65G>FFVcOw;7wZzE zRI5C3!N_0ipC2xb)Ad&?d@u5Hex-7_ANzQEY!pDC|F4hvKVMwTp zWj`ZWL_AJb+3`ilj}c=JS_CXZIRO%dUz1A42bP&AV;(*M={?&B+XDkYTMQ)V=vj?r zQ7a^~UU&SGZ}Y}3R4VlV_}J_)xM1D!fBKk&Eq5Y_Ep@o(?n{9SMWY8PDe0ktA@CrO z6bbW}15{hL(^X>Tx-}k2zM_Yuh|>MgjFN^``L00Je+sVJwRd!MUmuj=7l2_+J(nE& zEr0LLmCnB~pD1!sd5@FDGlZ?pY=x!_VEAMOSi}iH-je{^j7tC#>dsa`Pr%cLS z#gfW}dCW8Pi2L?vNribpduWp5JnPiLrP}vmx2Pli!H@B(5KEwe9}L6HZdA0(fUj(K zb|Dwdd6{JQcv1-Ofdss-ixHK%aYRu3(_o*k02CsNKgq}kvzD*b`Z5N5?1OYHR0L#r z{gzAk%>@i_nA8enL(EkfAp-bIX?GbTO&NEFU7QlWagl3JMcy}h@v%;fYhEn7Q$-8U zm^4dfAs!T|Q#$w&6R+hTb~JoD-&f0rZ_!!aRb&cyxaj_OmGggPZQ+rV)1u>$og+MM z-?-x!1qGoX9RF+ars0=xyvD<$-aKRAyts3o<$nIhwz5g?Xtsjo;tZ@=qA`7Rd2i0LMEJ|?Z&RA0a=c_vgU2ZUf)+?^S6`HU+fTE??7yni=xS?&sI`=MOx4&HmM z+X3cx+{D)Hn7bxx8#aLJo$zJG{~EEonviglO!`OTZ}rpFrvGsukjf@gJ9g=-S*n$y zE2^Y2^jl%$W?XcQ0m#5g%$JFlZ+pc4HyZ`C95hP}j+N~UZ2cr;he@spG%iN{uZB(D$a-`xzz)Sox)Y?;Pq_^Nex z_uP1uXqHoRTPuyI5pJG*oOz%r%HnJgNBEux$P|XKN4=A^JLFo{ihRSy40HtqcJ6{A zmVngF5YruU+<-9Bqi9N@J%2h^Gc&VyoJ<#<32}bsbUlUEG3`gkniXoA`}71Xy`|az zoRgpl;P$qiZcHP+qKw-c)04Y7;+eiTJIwi^<)v4-=w}fi+gr3yVc)^!!?5`}F>h4ZFRe5iyPhxD{Ds2) zxNt^?zLeEy#;-G<@qYSUEkLpA-SiM5DqBnH-^PhSy?aZ(qV@z+UB`d%Z}~(vk%3_Y zqN{8vKU?*_&XuwCMJ7#p(Q1@xNl-i;=c}#Uz-Tc;H4PMm6ciN`0Zr_Y4{qC<-?8Ca zA7PKDVq5MxKkci|(f(hmzR9Y?3=G@IA#s|x)06`7vF2AltDXPC{Vgelju!s+lj2Eo zKvKa%veEZ(iJwHo<YP(-%Z4L*5; z0wU#W9z;aM{Nnkl;9K}@sm3z~K`=$NUdLy^{14rRU=ff|7M%$|h8D*8Pah9NC$PpnqS-Mw-L?Du?FLRk!BhjC@3J{-&~AE(ttQbV+*y; zhKe$26vw`7{}$BFz*1H0Hw>#(I(`M$R{JvD_$T9|mv}U#is8%0%hHM6%hHfR01|oC z;c{CwxpJMi_n+X4-};rw`Us~?U=d4xnXL+UJC>ghDT|x47Np#{=H8P&$i^7@YwzEB zMc)MfJ;I{{o}WX;V#o!)E%j7b^l9C`(v-Nr={Eq-XO6n>$*xdIOE>VF+enPk?chUH z7&N_bSt<9F74J43J4+yrl?&$l1qH&1njf3*k%2x~;)@$0kApJEA^6UW$+#l^hdA{2)*xu7>===SxCsKc;Jt#TtKFzBuj@Nehd zM>VYbh9++VDibz<95cTV2%yWF-CJgYzItV}whFq(ppp1ICChR>^Lm|(U8`AmTQDQhxrw(Jq;TSBl zcNR%yqdwz(24{f6+x@gLLGj)#%5Jf09Z`bA^+j4*a<~U^A1T6eHP2(KC6^5docsh# zQK?TNl16SEJ=a0$$>_Qtyi_~}?XNnc{_6%FVJP}{126p3Z~afbb|Km&>?ukGuz$a- zjllpA*3OJddjbLuj`T)}ktuwUL}v^~@|KkJE~a|d9>J>HtcvW}>F@tbSONf1=0u|D zTCUud#{*+E>}f#Xst|x=TJ0)xr0j`w$MNy5*}!>;=TRu&UfbAD`(Mb2*Hs3!_&S%^RdUK?KDin&E{W@)q#%!Z(-;+27^$ICa0(pSn zTe8mEbGB5vk=gH=$HxA!V6=MO+7V`55dFb=IdIx}l+okB?+2An^g4g;thn|BKQ7(? zsSBQsH+!OoQ2n~EK4MuHzOM8u3N0)Uzd^rGJ}A*`zqs7vfoMQk?z^bmR)*5lQ=S!D z;t*$QIY(BKn=TZXuqv@(vqqVf&}x%wC?cu$Y5l1MpCNuH0n>bE+{a<=rAR<5iEU4a zJC{%N1kp1TqOfop8k(Cs#}0%%h2PF|AdDyi{%ZXqfQ%ze6Ibzcxla9P!8Y-?5u+#M zUR+7;FqBpp;dGpNc_EYCBnUG?L_+pKx6^pq!#+zaf4)UJE*f)4Z^&sXFh#c$7+67or! zCwK&SjGHH2Bd?aS<&&BJEG-1WdOrOVK@AA|L+wi#9j132>tbN{+lRU}6Ou;9DnP31 zXVZ7@?>;#JFavnvZM@SvG3byZmRby9VLLSqjR7KkhWF5SYb?N_{ldfREN!9Ic&g*9 z{fa}6uQP*v80IrPJH%v;3x_`$`_Fqly9!WUPtdI9f((rtoGD(BNq&hzKurlk(5&nW z#(*$ClR7=OR8^MI<4QF52u@23^vJ32)#?R{v_}DantbfDl^;yT&c{L zC!#vTDU~@B_!wWRh1?JFCV1=y3*XjGnyZX+&#KRr@VI$W-mMh6;;aex9;2+Je)>B} zV&K_I2;?~XRLuHV<%xL#&<#UIJPHpUvylzAaR?)2uD%s)f&+t%#n0vsn_ zB2eJ|$x?o{awiva4=~$pw5^}^VbWy2x-?-pkW%_JB6|m#gZy}Ue#r)`sdxg0EGVUak`cKX9_As1vVJiqBHp?ld@D-gYyGv@q; zsq*S+*0|`Ep-QRH2_r1rfkn^4DQa7O6V*+MjN|a8zd1|`5SEk;0QFi`K{*;lKU1L42?`1g)l81t z&M6YOlQhNfHhhR5S%=W3fMRd59sF(S;({e8k+GURv}fuBu>%9c(Pxw*0ql% z(%9~JYPW~-3Vb_=ZAZIa5J$~d?@vcEMJ*_9*;jV(rGAFEu2C5b_yM)JpB(v+gVvWh z#%7df5dwQO_&76PuD=SRoOSO0LEb=SSPKN*8-73Uv~Gme`&?5P`5rRCOc&S(C_cXe z+NW_4|9femV6K>uGK?bjYeK^u0CCAzcIl{1!%V=_+Xf$LTi-M4)x*mh?R?(bie?+= z>c|rguTEeDJyxi;$J^qw?EoQP3#0Vk5eg4o;RWxEp-$!in3jKaNtvtfQ&aWsyN4Y) z-FBK&yt-L!#m(-DjNyA@%*=mAm5V>A(JB;A=?fSk=;A(>Czf|Y_m39#r=BVzHymTt zor@lG1bp}Kvwk1KmXY3M=XMD>5L+tOI3Kvivw`&vea>|I_l?W_IrYs-jf7TKI4cgH zk$?KhT7x$H%0FfhKwxY>>b0&qt=T_%65?xD=v9!26UUHb-&0Qt``nIV-A1WsmC&)g ztxri>x_`*glVGCwATt!jy<~LK6`sJbnnB-KqtV+v!~bmf6^w?Z zLD&9y=FP4#+^{I$mXr@~)n@z)xwn7P3Hw~f8ShOb0jwg~WOjWTiFZAkbVik_E+MceZO1eK(<;4==Q>1wp#>!C7Qy2jUl-#cMP)8>af;94r{etf)_;BuYUSnw0FS$j6HAg(dFtS@#az$NceBFzXHV%AN& zv#@6O{O$G1{&E4l_u#L9D#qqo0(dFD_~#je&d>PAkOn{3Ta@RsP13j4OSO==3^^HE z8&q4wwI^%3tbF``czf%psQSL`+n^f}5kWen1*E$hq(eerC~1%`MN;WTQW~YZN9jho zL%N3=8s`0Rp4W9=*Ym9V{p-EgyB5pEV(ppP?mhb#-|umJj+fcxbdNJ=h z@BCe^tb4ok*3f4hd!+ddo7LC>72!6;J;+h4h@Y?3NKW9%p3v({I7!^mC=@~KblA~0 zxA-+5zn%S$)XxC*5C^34`Lb`+xw@5Fj5-bPS2dk_I@;1!HZY$a7)f%Xi33hju7GF| z4%rrIH-7SG9lT%z-)XLqMhm0*0!Q^N5PI|(#nC@mR1}%V@wxDc6Y+0^NwZy7gaKS= z7X#}FIf{Q78l`52;Z07VSIhpqS5JJR5k1$tF=hN7N}Ff==W4_+--fWty6lGBJJ36W zY4Hz>WW3}f2a>_9k;0T$AU@vm=Ud|KUQ*Q5O!TuZyT+fF64%oADr5Pg&HSY!C98tKN&W1vOZ>x~TjlNU}NE9ybbV@U&am{{%DfAF95%J$Yfk zuhpTLNS)*~X}2{aHB8k}rOf|+b2)v@NvPq2HK=jv%u1VMs)pngpe`to>`Z<8k#f&~ z0k|`UH&Bt1S>|BW`d*vcblp{5A0M(tE6hnozHPqD z@%xsI_|@EW#kY60mZf*;Qt;q#il3oM{LQyP1xeKbUWRYQ6|boA0-r{O`ph@LHOIO) z*xt=4f*jXkf30nfr@a9NzHK+~*NkaDiC2e=MEgQK>#s+wA(=aE3xi_*Xj8!oGMATF zJ|Ptr+I$a|PnKGC>Mz?_O~~t2wn;wnEWe|Wl}-60V6`zYcjH_-WqWJAD!lwAKAzFm zzzD+t5f^lqk3Qd`c>@jX9L|#A+V;z|5!eoVYr3VD3pq_qzC7#C z+*8T8%?^8ZlP*lu^*FYpm!!+x=Qpj;YN5r5$i%9?AVK3AI;&P`f@c_=FFv=$gi6cv zV=_#l8@fp0TON;1a==KX_sJG<-nd;&hgMzc9CRdR^rmK5SQ)kKe|+o z$2dLQVpVq(Ok*oQJ^ygji3KqEAux3_UA&X#MrJlJ12OiT`hm0KoW6H+O*>eHAJ3e8XvX(sz+g@aC=R9T5l1F*`WuRm$ z{>iY^+NGLqA6Z=TUM`kSlBsg=;)8zl>8I-~g8@hBMW!`8Y%_*H6X7iPNo+hP0UVZ_a*CooK{oee8%X- zYC9-w4Y7d537npLF@x7pZRK$6Z0LA>a~&=M051vH6pS<|!VW)Qhllqku}jI z?PD=Btscyq9s!i(E39^ckLK<0$$2UwE~`8MJG?LC)Ju0mSrelX-oL>AIv@o)w=8VL z_a$R-$!7PCTlze|;0v&+#-PBT6_$nZb%6Y>yWG)BfwBd+XfMOae{EOfA~^@v244;J zpef@Ur$xZ!CMZIg)O?%0u%iTS-7^pdg@WgkGOuEnbE!)wc4Gjxs|%4>6g|8CkvLj2 zgEsrPsbrm#@e)_FR!v z_*`l67+W!d`nNj!^IspH4CwVHu%yg;N%zdcn(q>6$IrkMxbzC2=Dyl~;*n`Eu|JKg zQ&7FNW}&|3PM9fS%k?E^^Amw!&{WK|rF^OI74__Q-5)7NZi5!@LhplNfv?--_>=+* zf|Miz5d64ufvZM2BP$AjCvnJlmDMrUY+L<@x9p2w-p2RP#pbX`?7Z1RCBYk=C@@s7>;ScaHu$)k%eSO^m@;MKuC~bX@lK12 z6=|#`LkN^2wEoBA3)NuR@g-AQ{L`W5|* z!)?2hE^^Bo{A>O#y3s2G#KZRCsf5*7ddNy1w)hjVk=w-3@K_iWp|^)ig~Cxq;fk^Q zu!{N;D@#IHyGb$>3wZ!ofku0{Z=CY2>CsT|HAeiqL3XM^iw&U+K|SaP_oeZhf& zEidr<0muRr=8>vBM4^Jdg6sp)?8MP=aXF_`rnSdjXiFI^7FUYrPROY$9}3cuBS55R zqt|Tl$Md%!dMWR8H##^YY9rlOle2E!`G()p(B~s`%s_A;W60{H;>k$j6oM}U5DC{yw$u=|Q z)U=ZfTi3fMomv%q_!~E)duP3|DovR(VHIM-1m5?vzQ9@#@-tOi6!~)qX`kx_tFw&Q zfMrf6Kbvkv{%9K0PVZGeE_+Ai4TTLyJA6RXP-(AV1E~aaJ}}#7SLd)%M9g7^cTtK7 z-fxdo{&;3k2+q5GE^u=Go6Xjgo7|t)bV8-<5KEo9L6Axe^~vq)!wXP@m16 z$?Uy>e7{*stbaydKom#wXZ~zK!*Qo-OGLg4Cv9zCfLwO8rBd853DsOuT3Eq+&6#x> z6o3=|E&w}<6{zJlkTrLgYjau*MBSxx-Z5frW|~x`sB+~yd-XO*hk*CCKb=#-4-X#w zJvsD`_(oB>;Ff^hJx2_Ga(`o)NVlk=TfYIL z^V#ctTRm2wsvxDM*nnSG7HhlSzva0&YN6;FJ8P5+!VXIme{4yfd;i40hn6KGi%eQ? zP)`dpE%t{htFRJFb-qW4V15e0n7$t%di29y*v+;*KJm&oLVC$EXe3u5jZSiz6CEAB zq@~5xj7O8mf=}-r#uLUIfh^xETi@R5sr#P0Q;tHtM8l88WgHdZ%G^*YTi=$ zaJOphTo7q+EMMDi47H*C6UgG`^b}CTBxGLQ)tBN+b3ASdKL)k{I9j#NF^IB^%_uxU_*2p6b%ULHmUY)$MU>3VRUvAWa0SX|r4#e^Bfb(>Z8tB@#n z_laC^iv?ipjR{bpT6R@g;09>oB98BSZ2l41p~9J_NcUYwul|;PPBJ@)jcQRWl@r-5 z!p^?p$j3S5;d2D3s4?*GU$EYISYc0ba9Xi*Kjkqp0%Z?H{U$hH!D>Q++#2Phkl|XRv;-eW*uAT@9a4x!c<%+pu-rn|FcP z9>M6z#y$QJs0h<#i`)WJQWOh2=yefRHhT=6J_|BVRe0jlpoe2OkZvolET7Qf?d!Ff zp`t9D`yfiRTQHs2=EZ!KmBeSft+UO=H1y?%Xdjmj5rV1vb>VN(#PFsUyI%*gjpVs( z4`=BHT0};K4r345Jf&C=^QKDza3Qygz6|mN@U-^~rp>yTp0xN_RVLkUp~O>B=rwtl(bu7KJI1%%7<|sOD&IgGwiy}0>^c#(xh&qsTO(UvDID@ zXF^UUE$_c=JRy2!LLN-;NPqrn7qg2@-L5Yo*PI}g(=E^Lz|7RqwP)`pv>J0cA>DE; z8^8z~5+t~fMY|1oOs349`Q0Q|%59!;OK=d0tf?oeGrpZWt*>c_FUp4K@UlQ<>3&eN0X6>QCPWy$0z?WBa@_s-;Vx z9~ey>SZ4G$KKNE#zW%Yy6|_AX26bj{Ai;C5aQj1wE}4_#hw4LcpGq5c{-x9EME>UF zZAQ3{S0vv302qUZe-eJV7xDuy6j@xF9m!;u*%&o~p4T`!i1~x{Wbv>8-|igso1xx# zrm@0x{t#aW+VzTCg-0Kq5AFlD3LW-8GeDrx(C7t#i#f5~?UYtF(>52@YmYINS0(~@%mWzko{POAki^ch#=(`#8-gDTIgIX*c?qVY5v1VKnl z6CY=#iGIU7uu*!(rGB5tzhk};*mX5tXv zW`*KAyk`~aaiO+0TRLfY_^AwkW*!kUCWYZS{NAbOj*kK(Rzt%#{oeM3VH?XhWZMr# z@6x9ng0K41g#>cztTzNFFZ{WcMKWPgV#AZZ5#2SmZ>q8CEz$+>GN!5{yasI>n4+H> zzOIn8YDVk{fcKhXn4N>SIK}<|PU#=hybKaE_XH>yyx5+K&z5TEe;fO*DYIiY@HM_f zkpt&m40cpS{-WJ<#W1{&z!P_lb22 z49wUhhB?Idn6yH){}|hB@$wuB6~Ucb<5BMmricE-xvxZ*IYmX6dctW#cqU5x2$D75 zQ4h_)eW5b5fTK-hSVP(#{A03Y#Pkvm1=W8lKCRtlUedj-6R1sovCZ64@S~JNQki}{ zXPl!U*hn-|8DsVo6lh%UE|JNMg~`euprdScDY3nyqf%j45(D^*HH*S&8EF8s-3&j%TD||8e6Y{(w^$`!qYB36il2&Vg2G&|!Gb{vrFt)8HNsK#^;UGst^gYDi63R;p7zhmfBzc4IX) zOB@uMJ0~gxmtt_7lYN5Xz-$G%D_6ZUZ1Gpw&j9Et7y_fO$m`VIBr8Ly@b;}&512Gp zMO*;Cj+2k6ab;+n^Y+gu62sZ1FvFGXgfFWHkmU2la7`2L#YrjSa^O18HSi3MxRL_} z3qLCqIJ2MW(5JjwDW#V*o%=adtX&&@i*sI%)=fN$u51y!_@v)$Wt4G%Rnhg`?Eo0q zy)ioP_qJt6H=BjSK8n>ocU?(h+pn+v`Dl>dBT+&=*6xb z=;(3n%z#a{%jl+Qq#(o1CC&-7ZKJWAatl(y=6V6v{2UL-F*5Eg`j<25oCH%uWy>=> zjHE_KZxCCt36O8k=F^KJke%98K&f5I7VI&UP8;~jWnk9;e|Hx|1xM=<{fPB4!PRC_ zDl&OB0G(S1B_eiu&Bj~W#JVRxCf9kxuH^c*MCAF5o9mi^xLqTtt`IMYo9*WjjNGn0 z+!_oXJW;^-cub@-z4ZRo%On;NAQpPkzG*g6nD9(*Vu!G{wH{tMr$8l+N6mh^; zsbILNf)=6kkWwOB=HZ5jy6_LJ+!@an#Md8oaX|CYhskzxR^`-VmGVmDPv(gpJKJB4 zhi$g63@Y|#9SW5XT<~Q9pp)xXrGJhjpWk(pF~djLZ~?KTv?mj0Cg5f>3eN`Rpy%Rg zXQ?WdUG0;fR47b}ESk6Ctf3)Cye#*ASyISEK94RtcuBZo_V>7dr)KKg(C}ELB{?S&YhQ2H-+TC(imr z0+0ul`jn~0GL#R513b{wA4SmQBnRwIRU1G#IlgD|k3)#O_QHv{m?)N;TiK;t&alLj zS69bi3yt=X!UUMF70d8{QMxaODC4^J;9-S*4vTwmG32X8f)SGUp+dC%VMw0NnDIy= zR8)`XQzpvvBR*e``Ae=#L(zW8n$`ennG-&|Om;rKYR`JZ6zg)#n0uJ;2A^5(Vv^n% z^vB8PC3U;cF4~ISEN`Ou9oT%2gkgv?L(=n6xf+T*`)&sktT zMoZijXSZ|Oyo|JVR=jOG4dEqHr6Z8L94eV_)ST)p?}xlygW{O+_bTG}cAk%ZJLaOA3Y_!KWwK7rhpof{)L}5h+sW$1b6ASM;8N8(WK`)~=TFOgBYRRdT zQsc1FSACi8YAYZQqJ}h!+DhL0kBT4!XMJkE>w66-y<@tQ5AxuQCmN+iiW~K^LJZ8P zk-DNrC#Q;22pk}&T~D$9B~4o6{8MXeeX!C!lWNF~bIIhI9M<~bcRKal!>vV_2^_V1 z{Q}@4-d$il=MdjPD`~pcyOTP>We-kx6NlVeGQ(Kw-3wUF%PBa|m+`knsLFzx2ced8 z1)*~ed&La4ZUNRikX`sfC8S5ZdJnC>$6S#Qbdp)rTJ$}VFGkgiE^L&h2S0JE$SuR1 zZ^KMd_cQJEEg;c1QApGjKq9&%9wM@+Xg<%`n{kcWG4RSUsKA>BO}}|>@kVU8(DW~x z(&@X}K4wtLr&^~PqC-C6HnT*-Z0^{w^QqsTf^iBQ(4n}CI9FaP@Y*euKcHScrf^Mm z(-^jNfu93-uByTr=b>fnU;5u)yD!r&{|fWtnRiIBTW@u1X${wI-wKI@5G}`{J?ehd zPN%M1h#qv#oTXI3d}z~dc(C}eGUyO1ELR`)DRN;#{SnY5Wty_$se7FkYM;oC7JW_} z?6iOo+#Ezd9J%&}!zFo6VH4P_*Fv?i1VE83RNKvHF`CgG*0uE#2wsX4h@F$ZNw5+s z4NM6V8icb-QH(ZXWk*&#D2US2}$tZzo>K7 zgG==2L&~QzgEJErY68#_1uRzv=m~Ey-U*2a|0N_sst{On%YN-p z!xr%BLe_{7iNG;HmbsOp?vO(}dhJWLAiiz1&*!*{kFRWgOJ1Hm&soM(JBQ6CdbTrB z$coi<4OcWpghZLTji#VcA@zc;{(|#Q+LLsywFHSk{r&s?<_ol7nDEE~W^3UW)_1J< z(fIty*!`znC43wWs(v9|{$lQ(gJdo1`O~8=F;CH`xRE3whDa4;P5}(!rMo{Nm0ptz z{N+nY#&q~0DW?svrS8}J8H!=ZM~$aHmOHD4fcjYY=6-L8Yr5K&FJAvJnSbAwzQioN zdMA^6-L9>1LllG~$nWXFHRifXv{e*)A_glQ*EE^CSc{gpMc3;G(;DT&-&I4trF64` zLTF!-a+JR%w zB$6o!+1KpvuHB3w=lGzM?Cn^1r}uk!yUbYUw`_Pv4~<8wBzbICap0B+?cz;8!=H0{ zb3Mq7%7yn4g;4eL00$ODAHD6( zjaz4r^H5VBrWI01^V_N5?1gyHxn!}OtxCMhm_A#)$|RFIzl)nZ_u7?8zpA*#ToqilMh=|=^!-|crFtz*RyY(%Bo0Wn0?6=g)za}U3 zb8m^gy&0c0{EeL5MSfFyYQ1pvE7*Q3D5{b&&f@FR_6M7vZi{dtTj-rI9C?H6;iXZQ zE>+9bEz^L_0PFjb@oMLxIEvFi@6O|nWE&DKR$ImP;>TJWO&nq@Q)Jiqc&C8=3;11)3WzHNtu3ygTthJU>mWp@~&M> z@k_*iKI^7=AEO)_i7-;|TC#U&4+ICojOUDnCo81hcwtajw=jXMAPbk)MxX#2-{te`Jy>sPi_f93nw~XyR z$Jqb{ZMM5|@Cl4Zr;DMJsrM2<62_pzZiYU49)$fI^dPyKKV@khd*L|nD1G-4sA~%Z za18Z?fQX}>>I^;(ho#rgV~JCy-y>QTRCe?U1df*#hAfK?_XQACpl?B0J9i>8-D10A zb(m5VlLFeAj&vEcV@G`2caoVOt6!MN$*Y*nfyQ|iW$LEVN@8Nv&O)-|+&ztX?Mae1 zc8hym6O3*^Lozbt1f2**oJ8-j4!!WX`YiJp|=Z z7`Gq@pr}hK;_}>Qpze=qqt^5JXc?3O#}%0^{K^o62Vzqa(wr4Rl*%MPAamAy{EA7Z z46hi74ENJz3QYT3pX)+D^b>}R3ZEZ5CzLU8>2PWIVNumc?&@`4z)c3LQPDT`@>5Eg z?{-Prga>T6xfZJ)5}Hj6}#x%F|IwIIo%FE##XzeBwd-ts*^o+=iT^skfPWW_lUxBxTzWo$ZsPx z*ZFvpZSV!1CW-)OoprrtA6)WfI+ltO8cvc$h^ukYk;=@7d1D zuu^NMVBmlqup8NC;1Mkrh0fjeQsUx>FefsFW~@l}4fIR)L%i!;naVsC2tXouUhf19 zb#`m{aSOUW$99O#0s@ZivqitX?dHasALT33g}(`S?Goo>(X_h45wh!Rzj1~*RiTCw z%^Pj#!4Qzq!zhfr9X+qPoA6A;xJr2re+$q?MW6B5<(VN?;$U3eSYv&eA3s&ws+`*t zC1lGKs+EsaJM#>T$O?dUBwICEx1`)Dv+L(*gM;9nmIgW&&O^bOn!L6 zzcvuyIZkzz43z3uiyM^!?d?9t>Qk@_6&~r?{NS5t`Go(05FUZL@#}BmQq@Kftmij}s7}HW z>q{+DH=V4Iiog+~GNZ8_;Gv-bmGGEJ!!u<4BzN3rFC`JD)9r_L%hwX}^$Bc7s$fI% z4p@p;0U<BaY=6O%W!dx{q2$7X3MC7S4ppDSRKNT z)|d(hRABrb_%nC{lME@x*xZ3N@v6|flB+Sm_*e&WlpIh@l^K5eMt zVQ$gH+XvBY4e0DV64C{Yu=6jSVcld>VX;r;rEf`&IhR?9!@nJXs=^=&8|Ko*X^Lf~ zH;;?MH}9IJP0n=e4U?7=s76qgwsN(YLo7bgcR?o!Ka5;&@jv3zIlJO%u`Kw}>%bs0 ze$^E@R6r_DoGMt>0xksKOrKkJ(l2=RjQ|x ztScY1Q2sAhE3S-Ux?pk+@oAH(anC~6egUXxFg_C45@D>hO^Hw|vUD`6fi^deqJ8=f z+B&+KvMl@0579}#e5@F!kthm)2rOB7Y5RVCKCNzt^7c z$<>HFNH+ds%yD-~QJpsJs2EeDxv$xuOYNzPN5c zmw^xaQ((Rwbozl8f65@_CFXt7*6L$qAm}jv6XhvG^?R_Vru8r%ID2IA4M7&B6mWm- z%#3Nx)2@ZyelTsoB>e2?WB!9FTj*AsucbI3PfO+`xFD+E_Jrw@X_}0y(bT30qsjAZ zn;wJ=lu-3onDtpp4?P)pBloL7{PxF1<{(2C(5KfAT}J#6W$tvNJkNR_Lx@vuZK9TC zP6bJ%EhC8M_GpEZ!;)r2Ws!$A8UfvcvNw8SOJ<7ekr%*@7I0EA8Addiay-DSg|1{)@7)qd`NDy4aecgx?sR5f25Wer3C1i>DmwIX3SMWQ?5qogvPU@p=gi zA$)@u+CWIFpEbRy^{jrzh(^g( z#iP4aH!1coxM4^uHp4HOTENv#9VDhdbq%3-TByNp7-%-%c&3W~qzf%J8T$~EcxYsn zu-^^z)WzP76%?L@eFzG3*?q4E^)23=sqXV}4JKRs5>*5Ycxp{Lj`am{&Q?>I&Q;rj|V3)Y{RP=k|4Pk29$wK_CW2 zhSfn#8So0%`FC!9g%3Ca9L6yHMFMmoZb%&Ea7UN6@M5S ztl~5_%M8Ic-~0osl&GHmP|dEv;EW_!iNW4C#80IBzM|Wm1Hk(x(={wy&Qy*0%gf#m z1YJ8kP4prVwxfB?RW3Jwe*XG&$rn+YmodzZm&zXMeb&rlHdi;y;j#@G_#RRgRwJIF zyE9|27WE>5Tbj>tjxG7D4EHepha8}w!(n5;@U%~X0No_ZiTjj^nW8}Qgq=zVUe z2~y%!k9wKU>uw1x(y!P5Twh@~oK+mhrY-gz>|mB6Ocgg{MGX!F(UxC0+kBGIcxHx zJ~3e`z-%AxO2ww}$}Vkb+~Vqymt9wlu%~?e_TPrVf1HEHPobz7?T;Bw%MgpeKQ7Js zfU;8z!CuaHpDI2e;DnRG01Z!y@~K!+U{ap)9?A&V9Rgcxsp9#GDPaR;HzgCff8m`I zEm8%04n07&R0a0&D{^Euv(PY!e~a?-MmazXVe~EBDH&id3rPb=3^R~s_>~V+RqYcn zbGr^Sj1=iA5L(zSRLQm6o?=edn>NxoN66mO2MI)F0CB61{AEtNZ@n1K$9wBGx~{~y zJzHxZiI@qWcOG{h8lRY#N%B6PPXyS4M}&-kav;^9(AnD5A&WxN*wRT0`5=*NU|2AddE?e zVoZ)^$AEWdCC#)oip0sF7rM{a)0@OtAUQVL8$WQGHVZjiM!vlMshXX}|6VhiJoySw zOU*BYmYO;Xa&=l<(u8AGBRZoz&p&35X|>dg%}&sjO~JTUFmbZvO!~3+%#oCR682}? zzdsa$BJnl^E$USKv@Wf&MgeGNMt*3E5GobFsnx5TAAZ7Hp$PkC=L3}d&I3QWP1iMa z>o&EGy!c`=vuZ&=u;rWA-yq?76^_T!$=rKgabfNo4|{LBhmi&>wV`$rLaam|pF)Sy z1QY@d&=Bt$J#7|iVt2kYLN3D?7xEQbTIZZ(d0t>-O|mxm^w~;%6D-HhbC49Z-cb z5zs?#xmC<>Uk%&{iS>QGlzyeP2)A005y^_Mj06P3sFD4JMwh*<898cjYQyANzY4lv zOlp%@WmvD4D7khbWBSEgar$cD(=SF&#kp!N8W4yXgUzjGfovn4_0q+M{y>=svT>X6 z@Mjm7B3VYeOF)Eo(rvq6>00bLQZ7M6_{-;NUi*>zHJ_PMjiO{9r@5k1p~$v%_Y(dL z5k^3uRb$>zbTkmVyt@>Z42IyHY)5XeZd^W`yj-6RWjr51i+C*-Cg+tM$@->764}e= z66<$SCMojn&c?tvPHeGbM=#+_2`kwvjYyOHX2&@`_&}vP?ijI8e1EJFFG-Wx&J#j3 zGXD3XoVUiDyz&j9jjZCP5o+{TOfYtFY7L_R{?)0@<9L@-n&T@RJ1yTgFmRfv$HcIbxpW+hgoYZJv1AWU#l*odVEC4e1e z7z$ROVH!G^=EaQp>m+6?CO3eIbw@bK$QU;VZ$_;^p*Qsp>h9X3Pmz!j4H<9au6HgU z$6WXz8`xv}j@A^`0NsW0S|T&>o=wjPE#p1~W+tl150N>i{QgufB>>*Yrs5M#RnJq@ zB}_s3#A^@olNn!7wP^=B5#|(KQ8qsIf6P%u`C6WBg&fa@MnW@J{_qYRF|aN@Q2}iE z-z_ofV89z+_U<(4bFld$AahR6m6rGsAeW`X@0 zBUV^QNL>7ngtUZrB#!35)dB&r@0aVp>{bF(8QgcNj-$qK2Zt8S6vD$>;hgv2(dMzNZ~4h}xC?wBR# z7<<>FL?G_!uy|8rGXg~=L&&2vGiv^5xN=$3c%e6Y2!c}~#|hyKwx`~|hFeIBzqk{U z#O_`an-l{5L>}6;W#!?aroPXf`bX_0p%HRa;6k6K2iYz~yN8%v-Pa`o^1H+@Dz>rJL=E&p#|4*inzX=HFtwrR~dOs=HXb6e=jx*mKZvwzHT!# zgD?#uPbqTTs6HWWOo@}t>vVZn$-51RylA#Pai3pCW>Y^1%&q#xR9dG+PgB}-nsrrL zIz+i3Q9$}U?X*@>5Ierj^5m_V(io}EReo{e4z<$iRO0g@7puF z+tJNrQ|9|VhcSVSqB(Q;*Ff4f1ur+9QQ4U&%zvQNi6R+__6VPj>O0B|pHJ*%-oPEV zZtuYthAM07_W;bpd(@nH^=H%N-#nqP)PPY`G*mJ)M`4+;<+2o5`q0HcR3rq6gZ%uy zfq?o%&R_yv_|63)-sdlBP_)8@5#Fr_g}KAw-SWD}Mvu5s(D{7B(Zl<9hQ2%>MPKj3GbqMx!-ho5Mvue##&3z3h#@ zPrSkL1M|T+Bvn#2QGM{OtJu>H{86f&lDUY$Ip6Nn$wE^iA@L{WbhZd@u@riNj34ja zhGaOJEOzA}5HZ406f%}HV$Zz-S+}hRGZCYW+_vEy*~P~Tg%U-42(!_V0B! zHRLw&FQE}A%?bYtz62Hzoq>PLkN)D&tUhtdK49CM*87`E^Z)Yi>bw8E{C_;e1MNRl zwEtSP{Bx`{S?j;foLmw03aZM$FNUf zqEi!7 z%+a_9{g06^!tb9`{O7v=*Du`<|AUMBuRs5X_i}{ypD+2p@7sSL7$z9-ANbsVUiP0u zsBw+|D}MZcc#Cd{|Ieo&c^8S0e~sV@Ff207NiRG z(8sPhw2JY?r#f>q{-2{|kJTTe!$-6<931ihJa<$2_g%|Tfk=XcbXUdGz4;wKTGOMA zAvVXgKDp&X8aZX*Ex+-9U$d3nx2CFq#j?9l&j~p+0^V-5I)GBUvc2g0*CAo4l!Aha z%Cuj0=gQ|1z0;<5*EMXBosZ@E$7=>dkXcOI(_{CF%=DkaDb_Tj%whGxdAXuA%$c@zKV|sDidgb|8ri7ag3UJt~Rr* z^vX0BMtlNJ;^&Er2&22J<%UBKOWynoHyHMs*W)PX66f1n;eXzK<8zTYA-zJT85PgJ zz7H$#X-J)Yj(Mo(IgWG*-fJ)PY&|dLNIXG~rhx~Tfn$C_K12!ja z8-4n&NOHi312?z357f0KB_(C+uH+Q^gKpS#4Lq^x^c!kz8Nz#IxhYD~6QNxNK|2AF+p4nA)GxQ4M3W#(I%^ z%kDR9>(%Et>rP*eGf%G~Qlg0pn6z(K(Ch1+<#Ywsw5RtBLrY$3Z)+7(49m?P?_R$$ zYVjTcmDa6Ew@jb|&;g!M22|YjIlWJR-H1*EBQoZb7;2eE;wkn4qSJLc2xvv*UqhA? zL%rC!cGFqg(TNBwQK9iwEI*gE!oXLh9J5+6jw<^%3>CJY`)5;wpZGGt6PSX}q9@j* z){Cc>{SI4I%V(>dtZGly4#!A!TLus&WI1l7zG~UhEV|9_0^7T^6XfH+cV~gwpqjz3a!!cWZOlZG(x_yR~QcY2QP?A?XRH=z8>Boj#% zy*GPYM*a7kc_{LekMOp-*~sv^uX^N%x$wniipMQcrRa&-(K+X2uM{nY{6_+M+Qk;B zqa(Lto3{6?wDayZDgOE*ZmdFiJ&);DW}J&~K@ zm1(&;;=EYyD>Yl_Y@?F;t^oDAWttp6g7UGwu5Lcr6L@{;VzulC2EgLnMj|(nH0Q28 z$QPh;$gxX9IqG8*>ATa07mm^?02#8?*h^fIIB=^S$qJYzg-+|Re!LN1-89tY#wKsO zQGCOKPEVbq_$6iSBlPsihX2!zuUOaF> z00_lcoV`X_4Gl4bz`q7A>M!qYsm9x+Vzj(pw8@+#e1Z8&RX8tvcJ!6yA-kG&KAN*O zHEXq2>FtAF81=0@X4H_q1Mtn(*hT}hQw$(VMgwkY8scA5IaGvs1u&|lS=U<4uXh;z zZn|U(pCrGq7J*8Xc-d6-UzkPoUzsH+UR+{M8x-eP6=@iL*9K<$76jKGs}@4l!O_ud zR0xN1PZgau=iSPT8pEfv%aG{LE<1AZM`pAoV9FWXOfrU9;tKWx3wB-eQcu@e5elk?X-zuQ4fqS%v$qTR#$ssD@?mw3$;r>Y80v| zJz>380+a0@zrPa6iTJdM5sZds1=od|Rt#l+5rS@`14q3Y-dJbHqeLA0wc;b+)rAiRc7N3fpOeiVqHe2EZBc%w0BL7=R(^9_8k@C-=ZUaIe*rqKgq4!3U?z%R}hH{MjRzDA0rmFN;7(d zSWF;$IptKg!Sb4HJ-t0`6*hB5s|%3{mqrdoqf2@o=Pi9X%J>LOhxd(&9xHSnP9w$$ zO!kqLeeJab94jb~)hzDO(R^f|8+zd)v25#Hm)W_gNS$A<+aj0|_TL<>nws96&1;hR zBJNvhL8`sa>SZ_XkE<_wITVI@oVa?}2O``G!PKy0#8G7T`z$x^lN$?yJPzc$ZhbsD zczmW3A$&dCW+ejaHQ(b==`}J;Nw2WxZJ2#{GzT}xReT%sb##Ai?iXlilKGg8;4XHT zLT{*OKPN`{T%E~*sK&Jzv9>wu7a zUFm}AMrRIvUH(BTwZ*gKbf$ER){O}FbbR~gO=*_5ypw*KJh>CO5`sA~|;I{}Juf4?urW+r>MYzFE;->0K7xGtlQT>Yk zC~uR;`e^T&ow?(TW?m{ihgpy+H4VE0ALg=NSaf(-U)8AxllftIbRnB{3RgN$$3vX_ zo9+qs+$+QGwW+yu?iKhX*9=^R7URt;b^MGcj9V5STBh0yMi!1i!&OkG4i@dtk*rIbPqOWtrC1f^v5Z77HNLaT~)W-hQ1M};wqj8NL zmuY7_O5RtFT)_^fjwSkDD&g9Skj(LkSM&~?rCT?GKl&~-8fOMIpD|%p*U+gJ#*W8b z?e^*yHP=g~oULDx_EqgIJ*Ax|*{(|cxIowU_a;AfV;S0kO}G(s^m=**_AGyvkgvwlivLPJL=En?g;}I_vv8n^WoS| z0TCB+g_{TWW|BC3t9%}ut^V?}esf|mX8(JH{t?%X({RSCeS0mYfz2I82e9i8sr7IuJjJ?8$ zztT@Qs@4AP`#LEvw^;3aSEKH^zve=g$c{^GbJDlmp*73OYUl^mL%s8}o>OkG3FhGu zm$e0?{&Fho6fCo1hDyJUom%At>q?K7dHd-;EOX|F>RPKug#p3-E)HHF$6-{@`Axa7 zLr03BCoK`|wmlpI_s85T6s`Si_h6GAGamg{d=Np?iRc&Vx1X_!=QorJH0^nO4709< z)>X^QSq|x**;iZJiG0;&jlO6%932Tg9n)3Mz({ zqJ$V?p3*~=Qq(+CQ;eZz5>eF98e)zTp(sJjX%bVCFW&e1uJ^pZzrWx5w|Dm5SUF5=pBXzBhF1gR5EX=wTItnv6p0#F^Wzz)K}&r`KI&{r!;Y z5F~T&ju_f9<=wA|lt}*IzWbqj-(CQL!j0i6ym7p{Y6LzeT8@npi>f^Ags^jdG4%JY z;6a65?GEO~Q!DP?9&T^F#}@B-cbo&Zr7gvAP;x(L^@#h{iWL&Yn;oDksxDS95~kWM zZ)=086Ki=_D~G0wE0q)Evryi%-X;5AFZj!vHIVBS^D5^c@loUhcS#GxJAz4F%1<|6 z03Hw)-qi&$S*!N{wVY1yWzcpH2M%D434^d@m!Ej3RqltTJk+j>lnuIb-9J7o5qJyg!8=dnRe{w%nLcQ>I?#hF zq*@(GNSR@rwx5y{i-w-=Cm1v?;V6X}bs`?1npQ+< zk(VdMjh40ycW+1EPB6|WA5^u4lWVN<8_8iY7BVK-7)jdV0l7jJXRS%q5AA4=^V+4% zPdCd7@vhPsJy(C{Uon8K+zRT_cy|JTnOAO~dYdjz2&bcyCO#}6@wi2+u_Z5lmtJBc zHUJI(D!`*kZBT%@U1+=M0I00gNXFH+3SJ>}i*@nWWZP%naJSkmrtiMy5>UeV00~}N zP>Xnwa(u$9r+|j$fP;OL45wvnjFJ$RKy`gvm+t7YCIcn=`KzBz*K?EG{F_gi+C!k= zqc7B(S^s$GF=%26D&kb1Ey{)_yr%Ysk>H zC6^huL2CnwuhKJh-G;C$34=KzdU+@`2(a7khv5g)>u>9FD5l{Ch<;fKz9%GWY zD6GyBqs}?oRef4Q*oV@{QwCt&?KHYAk91i18fQ1S<276}mWhlV0-}D_F$$?1b>H9Y z2qbw5M-J5A{`oBUZhn@6tq|%50tjhwMf*P((Qix@nQ0@HsU}|rOs(r4j!kAMVt>^N zBGmf?f!OzD>HCi`*g;1)p7AksbUaUObN(B;V4+ZF4(~FY2%i-oH2lWo%6a2`HkMaK zlyF6*tM{tNNnV0y&!&XNeI&PGbx%))ty$|#e>->lwkAh7j zLEbVtU)1u^~JoL}!kM=I7_X7dmtP!m5X`9t(@4 z-D)ACk`Vtb=@OY7uy0E_S68>_S|<3)eO`g`SHSGW4pRlC|hOB zKyqApWi4E&hi|L!roVNc5wQQrv8f4&6@4C1Z0}So8ah2-MV;YZe!w-gsUnP&b^Cr7 zA4+}6d_Mvmm-BPX2TF926OvA{yZtEh*~?e2Rte9lOuC$5P9-bXWYorA34}k?@~3=c zhG#fPnwA1SZ~!%~C%x)mMjcbEGC9Y+I((l^xGBAHX4ibYhrKrsy$!|@87r!L72oZuhaaeve51~SdCoKE5(^%i ztn>y!AZ007FFy<3K?<(Cjo|vR7xY5`TI-UjgPt$?DAV8Q*OQ#(e7KXG*{{BX7!IpG z$k`kz_~K)zS4Ag&t|0H`XG!OyeAbfap*SaIKP;ir9-Nc(ssglxk26SShn0opHLPfd zlVt>pA&Uc;mFa}>c{N2-scITWjQPy)68g?hb@4H$ar1Rh@1Itr+u5wzMltYN7j}lh zYBIG=AHpT9Fz}UCe7*fAw|~UDK);A4T9eTL%q?v=7P#*@ZdHP=jP?2X{$N^8smd{5 zWsyjgs?`x7y6?pR*XuKNLM5^IFlm7zqvVGw{+1r?R)wz4w6`C#SwY*~kkEJ`k+B%;=t>14CY_rNF>2*Gdi%;Pe-)^-|&7unb0smTQG)fhxa$Qy=+oT_e|=r6a#tnMOf)Pl9eb zJXtg;(LdYtTg(kdN1pZQ8vu!VBbegkti-|_N?Uu+(bd$o?q?)?QS`L_orv%%FXgdW zn}xc|n+!u}$=}+thK+66kM&&B(1RxL1r;sh^J-=7Ac*v`hH8;4bR(AD3gt zQYmI|Z&DIK`e8*V485lpR?bl|o1)0yPLM(qQETx~!=nJv(3SWu3ov-$-5O(1ZPgORLel zXIhJhT#uP|rE8oSSLiG(f004;DIUG3*MEu7Tw}55S3SIjni3u7WxhN{$hJMwZj!dZ zK(;CH=NoFJ(U3($M8->(w`1w;h2edDe@R8%a>rS|?2-AK9p)2>#?J7qh}Tjw;!y7?3%4)tK5N1jKm5&kdqY{(Ni@RW)^%H#izQ*B!Rbx*@JiJ-zY6(qEvW_#GgLPB)C0f zfqialsON<3sEFucX@5?7**jB$C_c+@NDxQs`s_!l zg;;-40zpSje{TA(aO{VNy&~Zf<5?PehO1v}0Pwec)MDNa+Vp)3CqA zF@Ie*mQOu&iyw1Nu+VUM7s}=WibK@?NvJ~ z5VM7;p9jIHd-7}gMbUC91DqJ-5S(-^{RK=b<)VU~4ugjgV%X;Shd-L$gd@=RA z2w!nqdd{A|MfnTM6!=inp}fi*DrI*|)$!;-AAY+3nJGN^LIHnz27J(YJmqDmg0e0A zctg_H#n+xkZ_jy8&D0DO+}mDn2TqrMN~=;|2?pnfcv}L2`Rnm2y)NC++e!$Q*XkZl zZNVnGQ>oMb2#(gCZr((8MgMwmsL5C3-D7WeS~51O+xr$@nzl_Yc+R3INlZ>4NVjgJ&ukKy-EA7^L&DQrS!V`X$R2hPS4 zG6iY#Dy~Qn@)aUm@2`$3`&MiMVOzgTsJgy??5mZ^7OE@Z(><-Q!vWl^G#iNg9qAHK zeETh@`4{IX-&M<_Z@C}7z6uPNsaVz574tI;Mjz1QmNe)kU^$i`N*lW$Bb`u4* z>_waVib^jF=}g$^WfT(_yHgZ=6l%S;_!+;7f)0Za30uXIm}$!hiuD5jEmPF5z{+JE z6*=#h8S9v-=^mqb^w#k8kRY{qzeHyMl7@UF6el2W?VXo?*mZFTi7*Jx&o8esZ*-|% zvB%JVBgQu+t8cEt*!f${3f$ibDq(EeZ_e9QE6yF~ zo_OuG#JLN6)x!q)CJqj4{<5out5275^N}anYm1f*C=#|0b=umeHp6I8z&&d-#K0{u zG$JAb;AUBdtvZ5X84;y9%&?+y3nXz29$J1U`q{UqOa0e`&C(sw-NZhhy`NqHpPL7l z73B1sF(GhY0b4ICCRTQA_O`p!t;=^f)zXjESHRuTiOPW$WIR!YhnKf3QSoj0syf{U zcK_4-33Ya)8Zq-M&p75Cy37>~fSdFe7X;-7UA(rtB1z0yV6sVj-aP&r%MRQ8vjsal z?S*;ZWR0DA@N~7;(w51?hp)%iF(Sm_weivhUn00d4X5jWg=zMO2}b*bdC_AO^6}%x zzrH8XQSDT;z~u1ZIM>Qca)AcSXqR%s2H9P1&(v7)ghgAl7KE8aJZa;7|I~oX07jxL za^XqlUQ?P<9P<*E8I6T}SZtuAjqD3w^#C-n+-P3CPK0Com*DE{x5re(_f zzYdx7EK%Ll^_iHneRA?Y(!qH$}g#U zV8wezkggnO9t6A4Ljkgvk9kPL?O(19oH})i!t)w@g|R}pa2$+lKEFCYPY*uF(B5kD qxXb9xBIT?K=t&P>GyUlQlp$PE)FtIS<~?&_nWvftPpTh35C0#(2vGC@ literal 0 HcmV?d00001 From e9153b82bbf4d13c479d191b924bfe66cceba33d Mon Sep 17 00:00:00 2001 From: "Ed Lee @ Intel" <16417837+edlee123@users.noreply.github.com> Date: Tue, 20 May 2025 01:15:46 -0500 Subject: [PATCH 064/217] Updated SearchQnA to use nginx like ChatQnA (#1769) Signed-off-by: Ed Lee <16417837+edlee123@users.noreply.github.com> --- SearchQnA/README.md | 98 +++++++------------ .../docker_compose/amd/gpu/rocm/compose.yaml | 20 +++- .../amd/gpu/rocm/compose_vllm.yaml | 21 +++- .../intel/cpu/xeon/compose.yaml | 21 +++- .../intel/hpu/gaudi/compose.yaml | 20 +++- SearchQnA/docker_image_build/build.yaml | 6 ++ SearchQnA/tests/test_compose_on_gaudi.sh | 2 +- SearchQnA/tests/test_compose_on_rocm.sh | 2 +- SearchQnA/tests/test_compose_on_xeon.sh | 2 +- SearchQnA/tests/test_compose_vllm_on_rocm.sh | 2 +- SearchQnA/ui/svelte/.env | 2 +- SearchQnA/ui/svelte/playwright.config.ts | 2 +- 12 files changed, 123 insertions(+), 75 deletions(-) diff --git a/SearchQnA/README.md b/SearchQnA/README.md index e47efa31f8..d8a084fb21 100644 --- a/SearchQnA/README.md +++ b/SearchQnA/README.md @@ -30,66 +30,38 @@ The architecture of the SearchQnA Application is illustrated below: The SearchQnA example is implemented using the component-level microservices defined in [GenAIComps](https://github.com/opea-project/GenAIComps). The flow chart below shows the information flow between different microservices for this example. ```mermaid ---- -config: - flowchart: - nodeSpacing: 400 - rankSpacing: 100 - curve: linear - themeVariables: - fontSize: 50px ---- +%% Orange are microservices from third parties that are 'wrapped' as OPEA components. flowchart LR - %% Colors %% - classDef blue fill:#ADD8E6,stroke:#ADD8E6,stroke-width:2px,fill-opacity:0.5 - classDef orange fill:#FBAA60,stroke:#ADD8E6,stroke-width:2px,fill-opacity:0.5 - classDef orchid fill:#C26DBC,stroke:#ADD8E6,stroke-width:2px,fill-opacity:0.5 - classDef invisible fill:transparent,stroke:transparent; - style SearchQnA-MegaService stroke:#000000 - - %% Subgraphs %% - subgraph SearchQnA-MegaService["SearchQnA MegaService "] - direction LR - EM([Embedding MicroService]):::blue - RET([Web Retrieval MicroService]):::blue - RER([Rerank MicroService]):::blue - LLM([LLM MicroService]):::blue - end - subgraph UserInterface[" User Interface "] - direction LR - a([User Input Query]):::orchid - UI([UI server
]):::orchid - end - - - - TEI_RER{{Reranking service
}} - TEI_EM{{Embedding service
}} - VDB{{Vector DB

}} - R_RET{{Web Retriever service
}} - LLM_gen{{LLM Service
}} - GW([SearchQnA GateWay
]):::orange - - %% Questions interaction - direction LR - a[User Input Query] --> UI - UI --> GW - GW <==> SearchQnA-MegaService - EM ==> RET - RET ==> RER - RER ==> LLM - - %% Embedding service flow - direction LR - EM <-.-> TEI_EM - RET <-.-> R_RET - RER <-.-> TEI_RER - LLM <-.-> LLM_gen - + User["User"] --> Nginx["Nginx
searchqna-nginx-server"] + Nginx --> UI["UI
searchqna-ui-server"] & Gateway & User + UI --> Nginx + Gateway --> Nginx & Embedding + Embedding --> Retriever + Retriever --> Reranker + Reranker --> LLM + LLM --> Gateway + LLM <-.-> TGI_Service["LLM
tgi-service"] + Embedding <-.-> TEI_Embedding["TEI Embedding
tei-embedding-server"] + Reranker <-.-> TEI_Reranker["TEI Reranker
tei-reranking-server"] + + TEI_Embedding:::ext + TEI_Reranker:::ext + TGI_Service:::ext + + subgraph MegaService["MegaService"] + LLM["LLM
llm-textgen-server"] + Reranker["Reranker
reranking-tei-server"] + Retriever["Retriever
web-retriever-server"] + Embedding["Embedding
embedding-server"] + end + subgraph Backend["searchqna-backend-server"] direction TB - %% Vector DB interaction - R_RET <-.-> VDB - + MegaService + Gateway["Backend Endpoint"] + end + classDef default fill:#fff,stroke:#000,color:#000 + classDef ext fill:#f9cb9c,stroke:#000,color:#000 + style MegaService margin-top:20px,margin-bottom:20px ``` This SearchQnA use case performs Search-augmented Question Answering across multiple platforms. Currently, we provide the example for Intel® Gaudi® 2 and Intel® Xeon® Scalable Processors, and we invite contributions from other hardware vendors to expand OPEA ecosystem. @@ -98,8 +70,8 @@ This SearchQnA use case performs Search-augmented Question Answering across mult The table below lists the available deployment options and their implementation details for different hardware platforms. -| Category | Deployment Option | Description | -| ---------------------- | ---------------------- | -------------------------------------------------------------- | -| On-premise Deployments | Docker Compose (Xeon) | [DocSum deployment on Xeon](./docker_compose/intel/cpu/xeon) | -| | Docker Compose (Gaudi) | [DocSum deployment on Gaudi](./docker_compose/intel/hpu/gaudi) | -| | Docker Compose (ROCm) | [DocSum deployment on AMD ROCm](./docker_compose/amd/gpu/rocm) | +| Category | Deployment Option | Description | +| ---------------------- | ---------------------- | --------------------------------------------------------------------------- | +| On-premise Deployments | Docker Compose (Xeon) | [SearchQnA deployment on Xeon](./docker_compose/intel/cpu/xeon/README.md) | +| | Docker Compose (Gaudi) | [SearchQnA deployment on Gaudi](./docker_compose/intel/hpu/gaudi/README.md) | +| | Docker Compose (ROCm) | [SearchQnA deployment on AMD ROCm](./docker_compose/amd/gpu/rocm/README.md) | diff --git a/SearchQnA/docker_compose/amd/gpu/rocm/compose.yaml b/SearchQnA/docker_compose/amd/gpu/rocm/compose.yaml index a9c81e04ea..12abd84986 100644 --- a/SearchQnA/docker_compose/amd/gpu/rocm/compose.yaml +++ b/SearchQnA/docker_compose/amd/gpu/rocm/compose.yaml @@ -170,7 +170,25 @@ services: no_proxy: ${no_proxy} https_proxy: ${https_proxy} http_proxy: ${http_proxy} - BACKEND_BASE_URL: ${SEARCH_BACKEND_SERVICE_ENDPOINT} + ipc: host + restart: always + search-nginx-server: + image: ${REGISTRY:-opea}/nginx:${TAG:-latest} + container_name: search-nginx-server + depends_on: + - search-backend-server + - search-ui-server + ports: + - "${NGINX_PORT:-80}:80" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + - FRONTEND_SERVICE_IP=search-ui-server + - FRONTEND_SERVICE_PORT=5173 + - BACKEND_SERVICE_NAME=search + - BACKEND_SERVICE_IP=search-backend-server + - BACKEND_SERVICE_PORT=8888 ipc: host restart: always diff --git a/SearchQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml b/SearchQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml index e05304c418..b81c01955d 100644 --- a/SearchQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml +++ b/SearchQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml @@ -176,10 +176,27 @@ services: no_proxy: ${no_proxy} https_proxy: ${https_proxy} http_proxy: ${http_proxy} - BACKEND_BASE_URL: ${SEARCH_BACKEND_SERVICE_ENDPOINT} ipc: host restart: always - + search-nginx-server: + image: ${REGISTRY:-opea}/nginx:${TAG:-latest} + container_name: search-nginx-server + depends_on: + - search-backend-server + - search-ui-server + ports: + - "${NGINX_PORT:-80}:80" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + - FRONTEND_SERVICE_IP=search-ui-server + - FRONTEND_SERVICE_PORT=5173 + - BACKEND_SERVICE_NAME=search + - BACKEND_SERVICE_IP=search-backend-server + - BACKEND_SERVICE_PORT=8888 + ipc: host + restart: always networks: default: driver: bridge diff --git a/SearchQnA/docker_compose/intel/cpu/xeon/compose.yaml b/SearchQnA/docker_compose/intel/cpu/xeon/compose.yaml index 5fb644a848..4503a645bb 100644 --- a/SearchQnA/docker_compose/intel/cpu/xeon/compose.yaml +++ b/SearchQnA/docker_compose/intel/cpu/xeon/compose.yaml @@ -168,10 +168,27 @@ services: - no_proxy=${no_proxy} - https_proxy=${https_proxy} - http_proxy=${http_proxy} - - BACKEND_BASE_URL=${BACKEND_SERVICE_ENDPOINT} ipc: host restart: always - + searchqna-xeon-nginx-server: + image: ${REGISTRY:-opea}/nginx:${TAG:-latest} + container_name: searchqna-xeon-nginx-server + depends_on: + - searchqna-xeon-backend-server + - searchqna-xeon-ui-server + ports: + - "${NGINX_PORT:-80}:80" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + - FRONTEND_SERVICE_IP=searchqna-xeon-ui-server + - FRONTEND_SERVICE_PORT=5173 + - BACKEND_SERVICE_NAME=searchqna + - BACKEND_SERVICE_IP=searchqna-xeon-backend-server + - BACKEND_SERVICE_PORT=8888 + ipc: host + restart: always networks: default: diff --git a/SearchQnA/docker_compose/intel/hpu/gaudi/compose.yaml b/SearchQnA/docker_compose/intel/hpu/gaudi/compose.yaml index be8ca676da..5ff29a5d7a 100644 --- a/SearchQnA/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/SearchQnA/docker_compose/intel/hpu/gaudi/compose.yaml @@ -187,7 +187,25 @@ services: - no_proxy=${no_proxy} - https_proxy=${https_proxy} - http_proxy=${http_proxy} - - BACKEND_BASE_URL=${BACKEND_SERVICE_ENDPOINT} + ipc: host + restart: always + searchqna-gaudi-nginx-server: + image: ${REGISTRY:-opea}/nginx:${TAG:-latest} + container_name: searchqna-gaudi-nginx-server + depends_on: + - searchqna-gaudi-backend-server + - searchqna-gaudi-ui-server + ports: + - "${NGINX_PORT:-80}:80" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + - FRONTEND_SERVICE_IP=searchqna-gaudi-ui-server + - FRONTEND_SERVICE_PORT=5173 + - BACKEND_SERVICE_NAME=searchqna + - BACKEND_SERVICE_IP=searchqna-gaudi-backend-server + - BACKEND_SERVICE_PORT=8888 ipc: host restart: always diff --git a/SearchQnA/docker_image_build/build.yaml b/SearchQnA/docker_image_build/build.yaml index bb622dd0c1..ece9f88a58 100644 --- a/SearchQnA/docker_image_build/build.yaml +++ b/SearchQnA/docker_image_build/build.yaml @@ -46,3 +46,9 @@ services: context: GenAIComps dockerfile: comps/third_parties/vllm/src/Dockerfile.amd_gpu image: ${REGISTRY:-opea}/vllm-rocm:${TAG:-latest} + nginx: + build: + context: GenAIComps + dockerfile: comps/third_parties/nginx/src/Dockerfile + extends: searchqna + image: ${REGISTRY:-opea}/nginx:${TAG:-latest} diff --git a/SearchQnA/tests/test_compose_on_gaudi.sh b/SearchQnA/tests/test_compose_on_gaudi.sh index 0e4952fd4b..6cd2e6ebe8 100644 --- a/SearchQnA/tests/test_compose_on_gaudi.sh +++ b/SearchQnA/tests/test_compose_on_gaudi.sh @@ -32,7 +32,7 @@ function build_docker_images() { git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="searchqna searchqna-ui embedding web-retriever reranking llm-textgen" + service_list="searchqna searchqna-ui embedding web-retriever reranking llm-textgen nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 diff --git a/SearchQnA/tests/test_compose_on_rocm.sh b/SearchQnA/tests/test_compose_on_rocm.sh index a822ff1823..a5fbb2b996 100644 --- a/SearchQnA/tests/test_compose_on_rocm.sh +++ b/SearchQnA/tests/test_compose_on_rocm.sh @@ -20,7 +20,7 @@ function build_docker_images() { git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="searchqna searchqna-ui embedding web-retriever reranking llm-textgen" + service_list="searchqna searchqna-ui embedding web-retriever reranking llm-textgen nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 diff --git a/SearchQnA/tests/test_compose_on_xeon.sh b/SearchQnA/tests/test_compose_on_xeon.sh index 408048060c..fb5cfaa469 100644 --- a/SearchQnA/tests/test_compose_on_xeon.sh +++ b/SearchQnA/tests/test_compose_on_xeon.sh @@ -32,7 +32,7 @@ function build_docker_images() { git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="searchqna searchqna-ui embedding web-retriever reranking llm-textgen" + service_list="searchqna searchqna-ui embedding web-retriever reranking llm-textgen nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 diff --git a/SearchQnA/tests/test_compose_vllm_on_rocm.sh b/SearchQnA/tests/test_compose_vllm_on_rocm.sh index 92de3f9e00..7a20a6a6f2 100644 --- a/SearchQnA/tests/test_compose_vllm_on_rocm.sh +++ b/SearchQnA/tests/test_compose_vllm_on_rocm.sh @@ -20,7 +20,7 @@ function build_docker_images() { git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="searchqna searchqna-ui embedding web-retriever reranking llm-textgen vllm-rocm" + service_list="searchqna searchqna-ui embedding web-retriever reranking llm-textgen vllm-rocm nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 diff --git a/SearchQnA/ui/svelte/.env b/SearchQnA/ui/svelte/.env index 49f7b9dd51..e32ebc172f 100644 --- a/SearchQnA/ui/svelte/.env +++ b/SearchQnA/ui/svelte/.env @@ -1 +1 @@ -BACKEND_BASE_URL = 'http://backend_address:3008/v1/searchqna' +BACKEND_BASE_URL = '/v1/searchqna' diff --git a/SearchQnA/ui/svelte/playwright.config.ts b/SearchQnA/ui/svelte/playwright.config.ts index 66692b6ca9..a412c3c6fe 100644 --- a/SearchQnA/ui/svelte/playwright.config.ts +++ b/SearchQnA/ui/svelte/playwright.config.ts @@ -38,7 +38,7 @@ export default defineConfig({ /* Maximum time each action such as `click()` can take. Defaults to 0 (no limit). */ actionTimeout: 0, /* Base URL to use in actions like `await page.goto('/')`. */ - baseURL: "http://localhost:5173", + baseURL: "http://localhost:80", /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */ trace: "on-first-retry", From 26cb5317661f9de794d27b90450f9b0588db33f7 Mon Sep 17 00:00:00 2001 From: Ying Hu Date: Tue, 20 May 2025 14:29:23 +0800 Subject: [PATCH 065/217] Update README.md of model/port change (#1969) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- .../docker_compose/intel/cpu/xeon/README.md | 45 +++++++++++------- .../docker_compose/intel/hpu/gaudi/README.md | 47 ++++++++++++------- .../docker_compose/intel/cpu/xeon/README.md | 21 ++++++++- 3 files changed, 76 insertions(+), 37 deletions(-) diff --git a/CodeGen/docker_compose/intel/cpu/xeon/README.md b/CodeGen/docker_compose/intel/cpu/xeon/README.md index 2b0cb398f3..24835c8592 100644 --- a/CodeGen/docker_compose/intel/cpu/xeon/README.md +++ b/CodeGen/docker_compose/intel/cpu/xeon/README.md @@ -52,18 +52,29 @@ This uses the default vLLM-based deployment profile (`codegen-xeon-vllm`). ```bash # Replace with your host's external IP address (do not use localhost or 127.0.0.1) - export HOST_IP="your_external_ip_address" + export host_ip="your_external_ip_address" # Replace with your Hugging Face Hub API token export HUGGINGFACEHUB_API_TOKEN="your_huggingface_token" # Optional: Configure proxy if needed # export http_proxy="your_http_proxy" # export https_proxy="your_https_proxy" - # export no_proxy="localhost,127.0.0.1,${HOST_IP}" # Add other hosts if necessary + # export no_proxy="localhost,127.0.0.1,${host_ip}" # Add other hosts if necessary source ../../../set_env.sh ``` - _Note: The compose file might read additional variables from a `.env` file or expect them defined elsewhere. Ensure all required variables like ports (`LLM_SERVICE_PORT`, `MEGA_SERVICE_PORT`, etc.) are set if not using defaults from the compose file._ + _Note: The compose file might read additional variables from set_env.sh. Ensure all required variables like ports (`LLM_SERVICE_PORT`, `MEGA_SERVICE_PORT`, etc.) are set if not using defaults from the compose file._ + like + + ``` + export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-32B-Instruct" + ``` + + can be changed to small model if needed + + ``` + export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" + ``` 2. **Start Services (vLLM Profile):** @@ -91,7 +102,7 @@ The `compose.yaml` file uses Docker Compose profiles to select the LLM serving b - **Services Deployed:** `codegen-tgi-server`, `codegen-llm-server`, `codegen-tei-embedding-server`, `codegen-retriever-server`, `redis-vector-db`, `codegen-dataprep-server`, `codegen-backend-server`, `codegen-gradio-ui-server`. - **To Run:** ```bash - # Ensure environment variables (HOST_IP, HUGGINGFACEHUB_API_TOKEN) are set + # Ensure environment variables (host_ip, HUGGINGFACEHUB_API_TOKEN) are set docker compose --profile codegen-xeon-tgi up -d ``` @@ -103,14 +114,14 @@ Key parameters are configured via environment variables set before running `dock | Environment Variable | Description | Default (Set Externally) | | :-------------------------------------- | :------------------------------------------------------------------------------------------------------------------ | :----------------------------------------------------------------------------------------------- | -| `HOST_IP` | External IP address of the host machine. **Required.** | `your_external_ip_address` | +| `host_ip` | External IP address of the host machine. **Required.** | `your_external_ip_address` | | `HUGGINGFACEHUB_API_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingface_token` | | `LLM_MODEL_ID` | Hugging Face model ID for the CodeGen LLM (used by TGI/vLLM service). Configured within `compose.yaml` environment. | `Qwen/Qwen2.5-Coder-7B-Instruct` | | `EMBEDDING_MODEL_ID` | Hugging Face model ID for the embedding model (used by TEI service). Configured within `compose.yaml` environment. | `BAAI/bge-base-en-v1.5` | | `LLM_ENDPOINT` | Internal URL for the LLM serving endpoint (used by `codegen-llm-server`). Configured in `compose.yaml`. | `http://codegen-tgi-server:80/generate` or `http://codegen-vllm-server:8000/v1/chat/completions` | | `TEI_EMBEDDING_ENDPOINT` | Internal URL for the Embedding service. Configured in `compose.yaml`. | `http://codegen-tei-embedding-server:80/embed` | | `DATAPREP_ENDPOINT` | Internal URL for the Data Preparation service. Configured in `compose.yaml`. | `http://codegen-dataprep-server:80/dataprep` | -| `BACKEND_SERVICE_ENDPOINT` | External URL for the CodeGen Gateway (MegaService). Derived from `HOST_IP` and port `7778`. | `http://${HOST_IP}:7778/v1/codegen` | +| `BACKEND_SERVICE_ENDPOINT` | External URL for the CodeGen Gateway (MegaService). Derived from `host_ip` and port `7778`. | `http://${host_ip}:7778/v1/codegen` | | `*_PORT` (Internal) | Internal container ports (e.g., `80`, `6379`). Defined in `compose.yaml`. | N/A | | `http_proxy` / `https_proxy`/`no_proxy` | Network proxy settings (if required). | `""` | @@ -150,23 +161,23 @@ Check logs for specific services: `docker compose logs ` ### Run Validation Script/Commands -Use `curl` commands to test the main service endpoints. Ensure `HOST_IP` is correctly set in your environment. +Use `curl` commands to test the main service endpoints. Ensure `host_ip` is correctly set in your environment. -1. **Validate LLM Serving Endpoint (Example for vLLM on default port 8000 internally, exposed differently):** +1. **Validate LLM Serving Endpoint (Example for vLLM on default port 9000 internally, exposed differently):** ```bash # This command structure targets the OpenAI-compatible vLLM endpoint - curl http://${HOST_IP}:8000/v1/chat/completions \ + curl http://${host_ip}:9000/v1/chat/completions \ -X POST \ -H 'Content-Type: application/json' \ - -d '{"model": "Qwen/Qwen2.5-Coder-7B-Instruct", "messages": [{"role": "user", "content": "Implement a basic Python class"}], "max_tokens":32}' + -d '{"model": "Qwen/Qwen2.5-Coder-32B-Instruct", "messages": [{"role": "user", "content": "Implement a basic Python class"}], "max_tokens":32}' ``` - **Expected Output:** A JSON response with generated code in `choices[0].message.content`. 2. **Validate CodeGen Gateway (MegaService on default port 7778):** ```bash - curl http://${HOST_IP}:7778/v1/codegen \ + curl http://${host_ip}:7778/v1/codegen \ -H "Content-Type: application/json" \ -d '{"messages": "Write a Python function that adds two numbers."}' ``` @@ -179,8 +190,8 @@ Multiple UI options can be configured via the `compose.yaml`. ### Gradio UI (Default) Access the default Gradio UI by navigating to: -`http://{HOST_IP}:8080` -_(Port `8080` is the default host mapping for `codegen-gradio-ui-server`)_ +`http://{host_ip}:5173` +_(Port `5173` is the default host mapping for `codegen-gradio-ui-server`)_ ![Gradio UI - Code Generation](../../../../assets/img/codegen_gradio_ui_main.png) ![Gradio UI - Resource Management](../../../../assets/img/codegen_gradio_ui_dataprep.png) @@ -189,7 +200,7 @@ _(Port `8080` is the default host mapping for `codegen-gradio-ui-server`)_ 1. Modify `compose.yaml`: Comment out the `codegen-gradio-ui-server` service and uncomment/add the `codegen-xeon-ui-server` (Svelte) service definition, ensuring the port mapping is correct (e.g., `"- 5173:5173"`). 2. Restart Docker Compose: `docker compose --profile up -d` -3. Access: `http://{HOST_IP}:5173` (or the host port you mapped). +3. Access: `http://{host_ip}:5173` (or the host port you mapped). ![Svelte UI Init](../../../../assets/img/codeGen_ui_init.jpg) @@ -197,7 +208,7 @@ _(Port `8080` is the default host mapping for `codegen-gradio-ui-server`)_ 1. Modify `compose.yaml`: Comment out the default UI service and uncomment/add the `codegen-xeon-react-ui-server` definition, ensuring correct port mapping (e.g., `"- 5174:80"`). 2. Restart Docker Compose: `docker compose --profile up -d` -3. Access: `http://{HOST_IP}:5174` (or the host port you mapped). +3. Access: `http://{host_ip}:5174` (or the host port you mapped). ![React UI](../../../../assets/img/codegen_react.png) @@ -207,7 +218,7 @@ Users can interact with the backend service using the `Neural Copilot` VS Code e 1. **Install:** Find and install `Neural Copilot` from the VS Code Marketplace. ![Install Copilot](../../../../assets/img/codegen_copilot.png) -2. **Configure:** Set the "Service URL" in the extension settings to your CodeGen backend endpoint: `http://${HOST_IP}:7778/v1/codegen` (use the correct port if changed). +2. **Configure:** Set the "Service URL" in the extension settings to your CodeGen backend endpoint: `http://${host_ip}:7778/v1/codegen` (use the correct port if changed). ![Configure Endpoint](../../../../assets/img/codegen_endpoint.png) 3. **Usage:** - **Inline Suggestion:** Type a comment describing the code you want (e.g., `# Python function to read a file`) and wait for suggestions. @@ -218,7 +229,7 @@ Users can interact with the backend service using the `Neural Copilot` VS Code e ## Troubleshooting - **Model Download Issues:** Check `HUGGINGFACEHUB_API_TOKEN`. Ensure internet connectivity or correct proxy settings. Check logs of `tgi-service`/`vllm-service` and `tei-embedding-server`. Gated models need prior Hugging Face access. -- **Connection Errors:** Verify `HOST_IP` is correct and accessible. Check `docker ps` for port mappings. Ensure `no_proxy` includes `HOST_IP` if using a proxy. Check logs of the service failing to connect (e.g., `codegen-backend-server` logs if it can't reach `codegen-llm-server`). +- **Connection Errors:** Verify `host_ip` is correct and accessible. Check `docker ps` for port mappings. Ensure `no_proxy` includes `host_ip` if using a proxy. Check logs of the service failing to connect (e.g., `codegen-backend-server` logs if it can't reach `codegen-llm-server`). - **"Container name is in use"**: Stop existing containers (`docker compose down`) or change `container_name` in `compose.yaml`. - **Resource Issues:** CodeGen models can be memory-intensive. Monitor host RAM usage. Increase Docker resources if needed. diff --git a/CodeGen/docker_compose/intel/hpu/gaudi/README.md b/CodeGen/docker_compose/intel/hpu/gaudi/README.md index 1b640a428b..75366d3492 100644 --- a/CodeGen/docker_compose/intel/hpu/gaudi/README.md +++ b/CodeGen/docker_compose/intel/hpu/gaudi/README.md @@ -53,18 +53,29 @@ This uses the default vLLM-based deployment profile (`codegen-gaudi-vllm`). ```bash # Replace with your host's external IP address (do not use localhost or 127.0.0.1) - export HOST_IP="your_external_ip_address" + export host_ip="your_external_ip_address" # Replace with your Hugging Face Hub API token export HUGGINGFACEHUB_API_TOKEN="your_huggingface_token" # Optional: Configure proxy if needed # export http_proxy="your_http_proxy" # export https_proxy="your_https_proxy" - # export no_proxy="localhost,127.0.0.1,${HOST_IP}" # Add other hosts if necessary + # export no_proxy="localhost,127.0.0.1,${host_ip}" # Add other hosts if necessary source ../../../set_env.sh ``` - _Note: Ensure all required variables like ports (`LLM_SERVICE_PORT`, `MEGA_SERVICE_PORT`, etc.) are set if not using defaults from the compose file._ + _Note: The compose file might read additional variables from set_env.sh. Ensure all required variables like ports (`LLM_SERVICE_PORT`, `MEGA_SERVICE_PORT`, etc.) are set if not using defaults from the compose file._ + like + + ``` + export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-32B-Instruct" + ``` + + can be changed to small model if needed + + ``` + export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" + ``` 2. **Start Services (vLLM Profile):** @@ -94,7 +105,7 @@ The `compose.yaml` file uses Docker Compose profiles to select the LLM serving b - **Other Services:** Same CPU-based services as the vLLM profile. - **To Run:** ```bash - # Ensure environment variables (HOST_IP, HUGGINGFACEHUB_API_TOKEN) are set + # Ensure environment variables (host_ip, HUGGINGFACEHUB_API_TOKEN) are set docker compose --profile codegen-gaudi-tgi up -d ``` @@ -106,14 +117,14 @@ Key parameters are configured via environment variables set before running `dock | Environment Variable | Description | Default (Set Externally) | | :-------------------------------------- | :------------------------------------------------------------------------------------------------------------------ | :----------------------------------------------------------------------------------------------- | -| `HOST_IP` | External IP address of the host machine. **Required.** | `your_external_ip_address` | +| `host_ip` | External IP address of the host machine. **Required.** | `your_external_ip_address` | | `HUGGINGFACEHUB_API_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingface_token` | -| `LLM_MODEL_ID` | Hugging Face model ID for the CodeGen LLM (used by TGI/vLLM service). Configured within `compose.yaml` environment. | `Qwen/Qwen2.5-Coder-7B-Instruct` | +| `LLM_MODEL_ID` | Hugging Face model ID for the CodeGen LLM (used by TGI/vLLM service). Configured within `compose.yaml` environment. | `Qwen/Qwen2.5-Coder-32B-Instruct` | | `EMBEDDING_MODEL_ID` | Hugging Face model ID for the embedding model (used by TEI service). Configured within `compose.yaml` environment. | `BAAI/bge-base-en-v1.5` | | `LLM_ENDPOINT` | Internal URL for the LLM serving endpoint (used by `codegen-llm-server`). Configured in `compose.yaml`. | `http://codegen-tgi-server:80/generate` or `http://codegen-vllm-server:8000/v1/chat/completions` | | `TEI_EMBEDDING_ENDPOINT` | Internal URL for the Embedding service. Configured in `compose.yaml`. | `http://codegen-tei-embedding-server:80/embed` | | `DATAPREP_ENDPOINT` | Internal URL for the Data Preparation service. Configured in `compose.yaml`. | `http://codegen-dataprep-server:80/dataprep` | -| `BACKEND_SERVICE_ENDPOINT` | External URL for the CodeGen Gateway (MegaService). Derived from `HOST_IP` and port `7778`. | `http://${HOST_IP}:7778/v1/codegen` | +| `BACKEND_SERVICE_ENDPOINT` | External URL for the CodeGen Gateway (MegaService). Derived from `host_ip` and port `7778`. | `http://${host_ip}:7778/v1/codegen` | | `*_PORT` (Internal) | Internal container ports (e.g., `80`, `6379`). Defined in `compose.yaml`. | N/A | | `http_proxy` / `https_proxy`/`no_proxy` | Network proxy settings (if required). | `""` | @@ -170,21 +181,21 @@ Check logs: `docker compose logs `. Pay attention to `vllm-gaudi-s ### Run Validation Script/Commands -Use `curl` commands targeting the main service endpoints. Ensure `HOST_IP` is correctly set. +Use `curl` commands targeting the main service endpoints. Ensure `host_ip` is correctly set. -1. **Validate LLM Serving Endpoint (Example for vLLM on default port 8000 internally, exposed differently):** +1. **Validate LLM Serving Endpoint (Example for vLLM on default port 9000 internally, exposed differently):** ```bash # This command structure targets the OpenAI-compatible vLLM endpoint - curl http://${HOST_IP}:8000/v1/chat/completions \ + curl http://${host_ip}:9000/v1/chat/completions \ -X POST \ -H 'Content-Type: application/json' \ - -d '{"model": "Qwen/Qwen2.5-Coder-7B-Instruct", "messages": [{"role": "user", "content": "Implement a basic Python class"}], "max_tokens":32}' + -d '{"model": "Qwen/Qwen2.5-Coder-32B-Instruct", "messages": [{"role": "user", "content": "Implement a basic Python class"}], "max_tokens":32}' ``` 2. **Validate CodeGen Gateway (MegaService, default host port 7778):** ```bash - curl http://${HOST_IP}:7778/v1/codegen \ + curl http://${host_ip}:7778/v1/codegen \ -H "Content-Type: application/json" \ -d '{"messages": "Implement a sorting algorithm in Python."}' ``` @@ -197,8 +208,8 @@ UI options are similar to the Xeon deployment. ### Gradio UI (Default) Access the default Gradio UI: -`http://{HOST_IP}:8080` -_(Port `8080` is the default host mapping)_ +`http://{host_ip}:5173` +_(Port `5173` is the default host mapping)_ ![Gradio UI](../../../../assets/img/codegen_gradio_ui_main.png) @@ -206,17 +217,17 @@ _(Port `8080` is the default host mapping)_ 1. Modify `compose.yaml`: Swap Gradio service for Svelte (`codegen-gaudi-ui-server`), check port map (e.g., `5173:5173`). 2. Restart: `docker compose --profile up -d` -3. Access: `http://{HOST_IP}:5173` +3. Access: `http://{host_ip}:5173` ### React UI (Optional) 1. Modify `compose.yaml`: Swap Gradio service for React (`codegen-gaudi-react-ui-server`), check port map (e.g., `5174:80`). 2. Restart: `docker compose --profile up -d` -3. Access: `http://{HOST_IP}:5174` +3. Access: `http://{host_ip}:5174` ### VS Code Extension (Optional) -Use the `Neural Copilot` extension configured with the CodeGen backend URL: `http://${HOST_IP}:7778/v1/codegen`. (See Xeon README for detailed setup screenshots). +Use the `Neural Copilot` extension configured with the CodeGen backend URL: `http://${host_ip}:7778/v1/codegen`. (See Xeon README for detailed setup screenshots). ## Troubleshooting @@ -226,7 +237,7 @@ Use the `Neural Copilot` extension configured with the CodeGen backend URL: `htt - Verify `runtime: habana` and volume mounts in `compose.yaml`. - Gaudi initialization can take significant time and memory. Monitor resource usage. - **Model Download Issues:** Check `HUGGINGFACEHUB_API_TOKEN`, internet access, proxy settings. Check LLM service logs. -- **Connection Errors:** Verify `HOST_IP`, ports, and proxy settings. Use `docker ps` and check service logs. +- **Connection Errors:** Verify `host_ip`, ports, and proxy settings. Use `docker ps` and check service logs. ## Stopping the Application diff --git a/DocIndexRetriever/docker_compose/intel/cpu/xeon/README.md b/DocIndexRetriever/docker_compose/intel/cpu/xeon/README.md index 9f20546a3d..5d0ff79475 100644 --- a/DocIndexRetriever/docker_compose/intel/cpu/xeon/README.md +++ b/DocIndexRetriever/docker_compose/intel/cpu/xeon/README.md @@ -2,7 +2,11 @@ DocRetriever are the most widely adopted use case for leveraging the different methodologies to match user query against a set of free-text records. DocRetriever is essential to RAG system, which bridges the knowledge gap by dynamically fetching relevant information from external sources, ensuring that responses generated remain factual and current. The core of this architecture are vector databases, which are instrumental in enabling efficient and semantic retrieval of information. These databases store data as vectors, allowing RAG to swiftly access the most pertinent documents or data points based on semantic similarity. -## 1. Build Images for necessary microservices. (Optional after docker image release) +\_Note: + +As the related docker images were published to Docker Hub, you can ignore the below step 1 and 2, quick start from step 3. + +## 1. Build Images for necessary microservices. (Optional) - Embedding TEI Image @@ -30,7 +34,7 @@ DocRetriever are the most widely adopted use case for leveraging the different m docker build -t opea/dataprep:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/dataprep/src/Dockerfile . ``` -## 2. Build Images for MegaService +## 2. Build Images for MegaService (Optional) ```bash cd .. @@ -44,6 +48,19 @@ docker build --no-cache -t opea/doc-index-retriever:latest --build-arg https_pro ```bash export host_ip="YOUR IP ADDR" export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token} +``` + +Set environment variables by + +``` +cd GenAIExamples/DocIndexRetriever/docker_compose/intel/cpu/xeon +source set_env.sh +``` + +Note: set_env.sh will help to set all required variables. Please ensure all required variables like ports (LLM_SERVICE_PORT, MEGA_SERVICE_PORT, etc.) are set if not using defaults from the compose file. +or Set environment variables manually + +``` export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" export RERANK_MODEL_ID="BAAI/bge-reranker-base" export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:6006" From c70b021689a38ad1187eb7a3708cb57972524c20 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Wed, 21 May 2025 12:58:07 +0800 Subject: [PATCH 066/217] Integrate CodeGen set_env to ut scripts. (#1976) Signed-off-by: ZePan110 Co-authored-by: Ying Hu Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .../docker_compose/amd/gpu/rocm/set_env.sh | 8 +- .../amd/gpu/rocm/set_env_vllm.sh | 8 +- .../docker_compose/intel/cpu/xeon/README.md | 113 ++++++++---------- .../docker_compose/intel/hpu/gaudi/README.md | 75 +++++------- CodeGen/docker_compose/intel/set_env.sh | 51 ++++++++ CodeGen/docker_compose/set_env.sh | 50 -------- CodeGen/tests/README.md | 33 +++++ CodeGen/tests/test_compose_on_gaudi.sh | 34 +----- CodeGen/tests/test_compose_on_rocm.sh | 13 +- CodeGen/tests/test_compose_on_xeon.sh | 30 +---- CodeGen/tests/test_compose_vllm_on_rocm.sh | 13 +- 11 files changed, 177 insertions(+), 251 deletions(-) create mode 100644 CodeGen/docker_compose/intel/set_env.sh delete mode 100644 CodeGen/docker_compose/set_env.sh create mode 100644 CodeGen/tests/README.md diff --git a/CodeGen/docker_compose/amd/gpu/rocm/set_env.sh b/CodeGen/docker_compose/amd/gpu/rocm/set_env.sh index 117f81667f..afaa29b341 100644 --- a/CodeGen/docker_compose/amd/gpu/rocm/set_env.sh +++ b/CodeGen/docker_compose/amd/gpu/rocm/set_env.sh @@ -5,8 +5,8 @@ # SPDX-License-Identifier: Apache-2.0 ### The IP address or domain name of the server on which the application is running -export HOST_IP='' -export EXTERNAL_HOST_IP='' +export HOST_IP=${ip_address} +export EXTERNAL_HOST_IP=${ip_address} ### The port of the TGI service. On this port, the TGI service will accept connections export CODEGEN_TGI_SERVICE_PORT=8028 @@ -27,7 +27,7 @@ export CODEGEN_TGI_LLM_ENDPOINT="http://${HOST_IP}:${CODEGEN_TGI_SERVICE_PORT}" export CODEGEN_MEGA_SERVICE_HOST_IP=${HOST_IP} ### The port for CodeGen backend service -export CODEGEN_BACKEND_SERVICE_PORT=18150 +export CODEGEN_BACKEND_SERVICE_PORT=7778 ### The URL of CodeGen backend service, used by the frontend service export CODEGEN_BACKEND_SERVICE_URL="http://${EXTERNAL_HOST_IP}:${CODEGEN_BACKEND_SERVICE_PORT}/v1/codegen" @@ -36,4 +36,4 @@ export CODEGEN_BACKEND_SERVICE_URL="http://${EXTERNAL_HOST_IP}:${CODEGEN_BACKEND export CODEGEN_LLM_SERVICE_HOST_IP=${HOST_IP} ### The CodeGen service UI port -export CODEGEN_UI_SERVICE_PORT=18151 +export CODEGEN_UI_SERVICE_PORT=5173 diff --git a/CodeGen/docker_compose/amd/gpu/rocm/set_env_vllm.sh b/CodeGen/docker_compose/amd/gpu/rocm/set_env_vllm.sh index 52d69da19e..475191539a 100644 --- a/CodeGen/docker_compose/amd/gpu/rocm/set_env_vllm.sh +++ b/CodeGen/docker_compose/amd/gpu/rocm/set_env_vllm.sh @@ -5,8 +5,8 @@ # SPDX-License-Identifier: Apache-2.0 ### The IP address or domain name of the server on which the application is running -export HOST_IP='' -export EXTERNAL_HOST_IP='' +export HOST_IP=${ip_address} +export EXTERNAL_HOST_IP=${ip_address} ### The port of the vLLM service. On this port, the TGI service will accept connections export CODEGEN_VLLM_SERVICE_PORT=8028 @@ -25,7 +25,7 @@ export CODEGEN_LLM_SERVICE_PORT=9000 export CODEGEN_MEGA_SERVICE_HOST_IP=${HOST_IP} ### The port for CodeGen backend service -export CODEGEN_BACKEND_SERVICE_PORT=18150 +export CODEGEN_BACKEND_SERVICE_PORT=7778 ### The URL of CodeGen backend service, used by the frontend service export CODEGEN_BACKEND_SERVICE_URL="http://${EXTERNAL_HOST_IP}:${CODEGEN_BACKEND_SERVICE_PORT}/v1/codegen" @@ -34,4 +34,4 @@ export CODEGEN_BACKEND_SERVICE_URL="http://${EXTERNAL_HOST_IP}:${CODEGEN_BACKEND export CODEGEN_LLM_SERVICE_HOST_IP=${HOST_IP} ### The CodeGen service UI port -export CODEGEN_UI_SERVICE_PORT=18151 +export CODEGEN_UI_SERVICE_PORT=5173 diff --git a/CodeGen/docker_compose/intel/cpu/xeon/README.md b/CodeGen/docker_compose/intel/cpu/xeon/README.md index 24835c8592..57eda8f821 100644 --- a/CodeGen/docker_compose/intel/cpu/xeon/README.md +++ b/CodeGen/docker_compose/intel/cpu/xeon/README.md @@ -6,22 +6,10 @@ This README provides instructions for deploying the CodeGen application using Do - [Overview](#overview) - [Prerequisites](#prerequisites) -- [Quick Start](#quick-start) -- [Available Deployment Options](#available-deployment-options) - - [Default: vLLM-based Deployment (`--profile codegen-xeon-vllm`)](#default-vllm-based-deployment---profile-codegen-xeon-vllm) - - [TGI-based Deployment (`--profile codegen-xeon-tgi`)](#tgi-based-deployment---profile-codegen-xeon-tgi) -- [Configuration Parameters](#configuration-parameters) - - [Environment Variables](#environment-variables) - - [Compose Profiles](#compose-profiles) +- [Quick Start Deployment](#quick-start-deployment) - [Building Custom Images (Optional)](#building-custom-images-optional) - [Validate Services](#validate-services) - - [Check Container Status](#check-container-status) - - [Run Validation Script/Commands](#run-validation-scriptcommands) - [Accessing the User Interface (UI)](#accessing-the-user-interface-ui) - - [Gradio UI (Default)](#gradio-ui-default) - - [Svelte UI (Optional)](#svelte-ui-optional) - - [React UI (Optional)](#react-ui-optional) - - [VS Code Extension (Optional)](#vs-code-extension-optional) - [Troubleshooting](#troubleshooting) - [Stopping the Application](#stopping-the-application) - [Next Steps](#next-steps) @@ -43,38 +31,37 @@ This guide focuses on running the pre-configured CodeGen service using Docker Co cd GenAIExamples/CodeGen/docker_compose/intel/cpu/xeon ``` -## Quick Start +## Quick Start Deployment This uses the default vLLM-based deployment profile (`codegen-xeon-vllm`). 1. **Configure Environment:** Set required environment variables in your shell: - ```bash - # Replace with your host's external IP address (do not use localhost or 127.0.0.1) - export host_ip="your_external_ip_address" - # Replace with your Hugging Face Hub API token - export HUGGINGFACEHUB_API_TOKEN="your_huggingface_token" - - # Optional: Configure proxy if needed - # export http_proxy="your_http_proxy" - # export https_proxy="your_https_proxy" - # export no_proxy="localhost,127.0.0.1,${host_ip}" # Add other hosts if necessary - source ../../../set_env.sh - ``` + ```bash + # Replace with your host's external IP address (do not use localhost or 127.0.0.1) + export HOST_IP="your_external_ip_address" + # Replace with your Hugging Face Hub API token + export HUGGINGFACEHUB_API_TOKEN="your_huggingface_token" - _Note: The compose file might read additional variables from set_env.sh. Ensure all required variables like ports (`LLM_SERVICE_PORT`, `MEGA_SERVICE_PORT`, etc.) are set if not using defaults from the compose file._ - like + # Optional: Configure proxy if needed + # export http_proxy="your_http_proxy" + # export https_proxy="your_https_proxy" + # export no_proxy="localhost,127.0.0.1,${HOST_IP}" # Add other hosts if necessary + source ../../set_env.sh + ``` - ``` - export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-32B-Instruct" - ``` + _Note: The compose file might read additional variables from set_env.sh. Ensure all required variables like ports (`LLM_SERVICE_PORT`, `MEGA_SERVICE_PORT`, etc.) are set if not using defaults from the compose file._ - can be changed to small model if needed + For instance, edit the set_env.sh to change the LLM model - ``` - export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" - ``` + ``` + export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" + ``` + can be changed to other model if needed + ``` + export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-32B-Instruct" + ``` 2. **Start Services (vLLM Profile):** @@ -85,45 +72,45 @@ This uses the default vLLM-based deployment profile (`codegen-xeon-vllm`). 3. **Validate:** Wait several minutes for models to download (especially the first time) and services to initialize. Check container logs (`docker compose logs -f `) or proceed to the validation steps below. -## Available Deployment Options +### Available Deployment Options The `compose.yaml` file uses Docker Compose profiles to select the LLM serving backend. -### Default: vLLM-based Deployment (`--profile codegen-xeon-vllm`) +#### Default: vLLM-based Deployment (`--profile codegen-xeon-vllm`) - **Profile:** `codegen-xeon-vllm` - **Description:** Uses vLLM optimized for Intel CPUs as the LLM serving engine. This is the default profile used in the Quick Start. - **Services Deployed:** `codegen-vllm-server`, `codegen-llm-server`, `codegen-tei-embedding-server`, `codegen-retriever-server`, `redis-vector-db`, `codegen-dataprep-server`, `codegen-backend-server`, `codegen-gradio-ui-server`. -### TGI-based Deployment (`--profile codegen-xeon-tgi`) +#### TGI-based Deployment (`--profile codegen-xeon-tgi`) - **Profile:** `codegen-xeon-tgi` - **Description:** Uses Hugging Face Text Generation Inference (TGI) optimized for Intel CPUs as the LLM serving engine. - **Services Deployed:** `codegen-tgi-server`, `codegen-llm-server`, `codegen-tei-embedding-server`, `codegen-retriever-server`, `redis-vector-db`, `codegen-dataprep-server`, `codegen-backend-server`, `codegen-gradio-ui-server`. - **To Run:** ```bash - # Ensure environment variables (host_ip, HUGGINGFACEHUB_API_TOKEN) are set + # Ensure environment variables (HOST_IP, HUGGINGFACEHUB_API_TOKEN) are set docker compose --profile codegen-xeon-tgi up -d ``` -## Configuration Parameters +### Configuration Parameters -### Environment Variables +#### Environment Variables Key parameters are configured via environment variables set before running `docker compose up`. -| Environment Variable | Description | Default (Set Externally) | -| :-------------------------------------- | :------------------------------------------------------------------------------------------------------------------ | :----------------------------------------------------------------------------------------------- | -| `host_ip` | External IP address of the host machine. **Required.** | `your_external_ip_address` | -| `HUGGINGFACEHUB_API_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingface_token` | -| `LLM_MODEL_ID` | Hugging Face model ID for the CodeGen LLM (used by TGI/vLLM service). Configured within `compose.yaml` environment. | `Qwen/Qwen2.5-Coder-7B-Instruct` | -| `EMBEDDING_MODEL_ID` | Hugging Face model ID for the embedding model (used by TEI service). Configured within `compose.yaml` environment. | `BAAI/bge-base-en-v1.5` | -| `LLM_ENDPOINT` | Internal URL for the LLM serving endpoint (used by `codegen-llm-server`). Configured in `compose.yaml`. | `http://codegen-tgi-server:80/generate` or `http://codegen-vllm-server:8000/v1/chat/completions` | -| `TEI_EMBEDDING_ENDPOINT` | Internal URL for the Embedding service. Configured in `compose.yaml`. | `http://codegen-tei-embedding-server:80/embed` | -| `DATAPREP_ENDPOINT` | Internal URL for the Data Preparation service. Configured in `compose.yaml`. | `http://codegen-dataprep-server:80/dataprep` | -| `BACKEND_SERVICE_ENDPOINT` | External URL for the CodeGen Gateway (MegaService). Derived from `host_ip` and port `7778`. | `http://${host_ip}:7778/v1/codegen` | -| `*_PORT` (Internal) | Internal container ports (e.g., `80`, `6379`). Defined in `compose.yaml`. | N/A | -| `http_proxy` / `https_proxy`/`no_proxy` | Network proxy settings (if required). | `""` | +| Environment Variable | Description | Default (Set Externally) | +| :-------------------------------------- | :------------------------------------------------------------------------------------------------------------------ | :--------------------------------------------- | ------------------------------------ | +| `HOST_IP` | External IP address of the host machine. **Required.** | `your_external_ip_address` | +| `HUGGINGFACEHUB_API_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingface_token` | +| `LLM_MODEL_ID` | Hugging Face model ID for the CodeGen LLM (used by TGI/vLLM service). Configured within `compose.yaml` environment. | `Qwen/Qwen2.5-Coder-7B-Instruct` | +| `EMBEDDING_MODEL_ID` | Hugging Face model ID for the embedding model (used by TEI service). Configured within `compose.yaml` environment. | `BAAI/bge-base-en-v1.5` | +| `LLM_ENDPOINT` | Internal URL for the LLM serving endpoint (used by `codegen-llm-server`). Configured in `compose.yaml`. | `http://codegen-vllm | tgi-server:9000/v1/chat/completions` | +| `TEI_EMBEDDING_ENDPOINT` | Internal URL for the Embedding service. Configured in `compose.yaml`. | `http://codegen-tei-embedding-server:80/embed` | +| `DATAPREP_ENDPOINT` | Internal URL for the Data Preparation service. Configured in `compose.yaml`. | `http://codegen-dataprep-server:80/dataprep` | +| `BACKEND_SERVICE_ENDPOINT` | External URL for the CodeGen Gateway (MegaService). Derived from `HOST_IP` and port `7778`. | `http://${HOST_IP}:7778/v1/codegen` | +| `*_PORT` (Internal) | Internal container ports (e.g., `80`, `6379`). Defined in `compose.yaml`. | N/A | +| `http_proxy` / `https_proxy`/`no_proxy` | Network proxy settings (if required). | `""` | Most of these parameters are in `set_env.sh`, you can either modify this file or overwrite the env variables by setting them. @@ -131,7 +118,7 @@ Most of these parameters are in `set_env.sh`, you can either modify this file or source CodeGen/docker_compose/set_env.sh ``` -### Compose Profiles +#### Compose Profiles Docker Compose profiles (`codegen-xeon-vllm`, `codegen-xeon-tgi`) control which LLM serving backend (vLLM or TGI) and its associated dependencies are started. Only one profile should typically be active. @@ -161,23 +148,23 @@ Check logs for specific services: `docker compose logs ` ### Run Validation Script/Commands -Use `curl` commands to test the main service endpoints. Ensure `host_ip` is correctly set in your environment. +Use `curl` commands to test the main service endpoints. Ensure `HOST_IP` is correctly set in your environment. 1. **Validate LLM Serving Endpoint (Example for vLLM on default port 9000 internally, exposed differently):** ```bash # This command structure targets the OpenAI-compatible vLLM endpoint - curl http://${host_ip}:9000/v1/chat/completions \ + curl http://${HOST_IP}:9000/v1/chat/completions \ -X POST \ -H 'Content-Type: application/json' \ - -d '{"model": "Qwen/Qwen2.5-Coder-32B-Instruct", "messages": [{"role": "user", "content": "Implement a basic Python class"}], "max_tokens":32}' + -d '{"model": "Qwen/Qwen2.5-Coder-7B-Instruct", "messages": [{"role": "user", "content": "Implement a basic Python class"}], "max_tokens":32}' ``` - **Expected Output:** A JSON response with generated code in `choices[0].message.content`. 2. **Validate CodeGen Gateway (MegaService on default port 7778):** ```bash - curl http://${host_ip}:7778/v1/codegen \ + curl http://${HOST_IP}:7778/v1/codegen \ -H "Content-Type: application/json" \ -d '{"messages": "Write a Python function that adds two numbers."}' ``` @@ -190,7 +177,7 @@ Multiple UI options can be configured via the `compose.yaml`. ### Gradio UI (Default) Access the default Gradio UI by navigating to: -`http://{host_ip}:5173` +`http://{HOST_IP}:5173` _(Port `5173` is the default host mapping for `codegen-gradio-ui-server`)_ ![Gradio UI - Code Generation](../../../../assets/img/codegen_gradio_ui_main.png) @@ -200,7 +187,7 @@ _(Port `5173` is the default host mapping for `codegen-gradio-ui-server`)_ 1. Modify `compose.yaml`: Comment out the `codegen-gradio-ui-server` service and uncomment/add the `codegen-xeon-ui-server` (Svelte) service definition, ensuring the port mapping is correct (e.g., `"- 5173:5173"`). 2. Restart Docker Compose: `docker compose --profile up -d` -3. Access: `http://{host_ip}:5173` (or the host port you mapped). +3. Access: `http://{HOST_IP}:5173` (or the host port you mapped). ![Svelte UI Init](../../../../assets/img/codeGen_ui_init.jpg) @@ -208,7 +195,7 @@ _(Port `5173` is the default host mapping for `codegen-gradio-ui-server`)_ 1. Modify `compose.yaml`: Comment out the default UI service and uncomment/add the `codegen-xeon-react-ui-server` definition, ensuring correct port mapping (e.g., `"- 5174:80"`). 2. Restart Docker Compose: `docker compose --profile up -d` -3. Access: `http://{host_ip}:5174` (or the host port you mapped). +3. Access: `http://{HOST_IP}:5174` (or the host port you mapped). ![React UI](../../../../assets/img/codegen_react.png) @@ -218,7 +205,7 @@ Users can interact with the backend service using the `Neural Copilot` VS Code e 1. **Install:** Find and install `Neural Copilot` from the VS Code Marketplace. ![Install Copilot](../../../../assets/img/codegen_copilot.png) -2. **Configure:** Set the "Service URL" in the extension settings to your CodeGen backend endpoint: `http://${host_ip}:7778/v1/codegen` (use the correct port if changed). +2. **Configure:** Set the "Service URL" in the extension settings to your CodeGen backend endpoint: `http://${HOST_IP}:7778/v1/codegen` (use the correct port if changed). ![Configure Endpoint](../../../../assets/img/codegen_endpoint.png) 3. **Usage:** - **Inline Suggestion:** Type a comment describing the code you want (e.g., `# Python function to read a file`) and wait for suggestions. @@ -229,7 +216,7 @@ Users can interact with the backend service using the `Neural Copilot` VS Code e ## Troubleshooting - **Model Download Issues:** Check `HUGGINGFACEHUB_API_TOKEN`. Ensure internet connectivity or correct proxy settings. Check logs of `tgi-service`/`vllm-service` and `tei-embedding-server`. Gated models need prior Hugging Face access. -- **Connection Errors:** Verify `host_ip` is correct and accessible. Check `docker ps` for port mappings. Ensure `no_proxy` includes `host_ip` if using a proxy. Check logs of the service failing to connect (e.g., `codegen-backend-server` logs if it can't reach `codegen-llm-server`). +- **Connection Errors:** Verify `HOST_IP` is correct and accessible. Check `docker ps` for port mappings. Ensure `no_proxy` includes `HOST_IP` if using a proxy. Check logs of the service failing to connect (e.g., `codegen-backend-server` logs if it can't reach `codegen-llm-server`). - **"Container name is in use"**: Stop existing containers (`docker compose down`) or change `container_name` in `compose.yaml`. - **Resource Issues:** CodeGen models can be memory-intensive. Monitor host RAM usage. Increase Docker resources if needed. diff --git a/CodeGen/docker_compose/intel/hpu/gaudi/README.md b/CodeGen/docker_compose/intel/hpu/gaudi/README.md index 75366d3492..e94ccbf30d 100644 --- a/CodeGen/docker_compose/intel/hpu/gaudi/README.md +++ b/CodeGen/docker_compose/intel/hpu/gaudi/README.md @@ -6,23 +6,10 @@ This README provides instructions for deploying the CodeGen application using Do - [Overview](#overview) - [Prerequisites](#prerequisites) -- [Quick Start](#quick-start) -- [Available Deployment Options](#available-deployment-options) - - [Default: vLLM-based Deployment (`--profile codegen-gaudi-vllm`)](#default-vllm-based-deployment---profile-codegen-gaudi-vllm) - - [TGI-based Deployment (`--profile codegen-gaudi-tgi`)](#tgi-based-deployment---profile-codegen-gaudi-tgi) -- [Configuration Parameters](#configuration-parameters) - - [Environment Variables](#environment-variables) - - [Compose Profiles](#compose-profiles) - - [Docker Compose Gaudi Configuration](#docker-compose-gaudi-configuration) +- [Quick Start Deployment](#quick-start-deployment) - [Building Custom Images (Optional)](#building-custom-images-optional) - [Validate Services](#validate-services) - - [Check Container Status](#check-container-status) - - [Run Validation Script/Commands](#run-validation-scriptcommands) - [Accessing the User Interface (UI)](#accessing-the-user-interface-ui) - - [Gradio UI (Default)](#gradio-ui-default) - - [Svelte UI (Optional)](#svelte-ui-optional) - - [React UI (Optional)](#react-ui-optional) - - [VS Code Extension (Optional)](#vs-code-extension-optional) - [Troubleshooting](#troubleshooting) - [Stopping the Application](#stopping-the-application) - [Next Steps](#next-steps) @@ -44,7 +31,7 @@ This guide focuses on running the pre-configured CodeGen service using Docker Co cd GenAIExamples/CodeGen/docker_compose/intel/hpu/gaudi ``` -## Quick Start +## Quick Start Deployment This uses the default vLLM-based deployment profile (`codegen-gaudi-vllm`). @@ -53,28 +40,28 @@ This uses the default vLLM-based deployment profile (`codegen-gaudi-vllm`). ```bash # Replace with your host's external IP address (do not use localhost or 127.0.0.1) - export host_ip="your_external_ip_address" + export HOST_IP="your_external_ip_address" # Replace with your Hugging Face Hub API token export HUGGINGFACEHUB_API_TOKEN="your_huggingface_token" # Optional: Configure proxy if needed # export http_proxy="your_http_proxy" # export https_proxy="your_https_proxy" - # export no_proxy="localhost,127.0.0.1,${host_ip}" # Add other hosts if necessary - source ../../../set_env.sh + # export no_proxy="localhost,127.0.0.1,${HOST_IP}" # Add other hosts if necessary + source ../../set_env.sh ``` _Note: The compose file might read additional variables from set_env.sh. Ensure all required variables like ports (`LLM_SERVICE_PORT`, `MEGA_SERVICE_PORT`, etc.) are set if not using defaults from the compose file._ - like + For instance, edit the set_env.sh to change the LLM model ``` - export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-32B-Instruct" + export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" ``` - can be changed to small model if needed + can be changed to other model if needed ``` - export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" + export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-32B-Instruct" ``` 2. **Start Services (vLLM Profile):** @@ -105,7 +92,7 @@ The `compose.yaml` file uses Docker Compose profiles to select the LLM serving b - **Other Services:** Same CPU-based services as the vLLM profile. - **To Run:** ```bash - # Ensure environment variables (host_ip, HUGGINGFACEHUB_API_TOKEN) are set + # Ensure environment variables (HOST_IP, HUGGINGFACEHUB_API_TOKEN) are set docker compose --profile codegen-gaudi-tgi up -d ``` @@ -115,18 +102,18 @@ The `compose.yaml` file uses Docker Compose profiles to select the LLM serving b Key parameters are configured via environment variables set before running `docker compose up`. -| Environment Variable | Description | Default (Set Externally) | -| :-------------------------------------- | :------------------------------------------------------------------------------------------------------------------ | :----------------------------------------------------------------------------------------------- | -| `host_ip` | External IP address of the host machine. **Required.** | `your_external_ip_address` | -| `HUGGINGFACEHUB_API_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingface_token` | -| `LLM_MODEL_ID` | Hugging Face model ID for the CodeGen LLM (used by TGI/vLLM service). Configured within `compose.yaml` environment. | `Qwen/Qwen2.5-Coder-32B-Instruct` | -| `EMBEDDING_MODEL_ID` | Hugging Face model ID for the embedding model (used by TEI service). Configured within `compose.yaml` environment. | `BAAI/bge-base-en-v1.5` | -| `LLM_ENDPOINT` | Internal URL for the LLM serving endpoint (used by `codegen-llm-server`). Configured in `compose.yaml`. | `http://codegen-tgi-server:80/generate` or `http://codegen-vllm-server:8000/v1/chat/completions` | -| `TEI_EMBEDDING_ENDPOINT` | Internal URL for the Embedding service. Configured in `compose.yaml`. | `http://codegen-tei-embedding-server:80/embed` | -| `DATAPREP_ENDPOINT` | Internal URL for the Data Preparation service. Configured in `compose.yaml`. | `http://codegen-dataprep-server:80/dataprep` | -| `BACKEND_SERVICE_ENDPOINT` | External URL for the CodeGen Gateway (MegaService). Derived from `host_ip` and port `7778`. | `http://${host_ip}:7778/v1/codegen` | -| `*_PORT` (Internal) | Internal container ports (e.g., `80`, `6379`). Defined in `compose.yaml`. | N/A | -| `http_proxy` / `https_proxy`/`no_proxy` | Network proxy settings (if required). | `""` | +| Environment Variable | Description | Default (Set Externally) | +| :-------------------------------------- | :------------------------------------------------------------------------------------------------------------------ | :--------------------------------------------- | ------------------------------------ | +| `HOST_IP` | External IP address of the host machine. **Required.** | `your_external_ip_address` | +| `HUGGINGFACEHUB_API_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingface_token` | +| `LLM_MODEL_ID` | Hugging Face model ID for the CodeGen LLM (used by TGI/vLLM service). Configured within `compose.yaml` environment. | `Qwen/Qwen2.5-Coder-7B-Instruct` | +| `EMBEDDING_MODEL_ID` | Hugging Face model ID for the embedding model (used by TEI service). Configured within `compose.yaml` environment. | `BAAI/bge-base-en-v1.5` | +| `LLM_ENDPOINT` | Internal URL for the LLM serving endpoint (used by `llm-codegen-vllm-server`). Configured in `compose.yaml`. | http://codegen-vllm | tgi-server:9000/v1/chat/completions` | +| `TEI_EMBEDDING_ENDPOINT` | Internal URL for the Embedding service. Configured in `compose.yaml`. | `http://codegen-tei-embedding-server:80/embed` | +| `DATAPREP_ENDPOINT` | Internal URL for the Data Preparation service. Configured in `compose.yaml`. | `http://codegen-dataprep-server:80/dataprep` | +| `BACKEND_SERVICE_ENDPOINT` | External URL for the CodeGen Gateway (MegaService). Derived from `HOST_IP` and port `7778`. | `http://${HOST_IP}:7778/v1/codegen` | +| `*_PORT` (Internal) | Internal container ports (e.g., `80`, `6379`). Defined in `compose.yaml`. | N/A | +| `http_proxy` / `https_proxy`/`no_proxy` | Network proxy settings (if required). | `""` | Most of these parameters are in `set_env.sh`, you can either modify this file or overwrite the env variables by setting them. @@ -181,21 +168,21 @@ Check logs: `docker compose logs `. Pay attention to `vllm-gaudi-s ### Run Validation Script/Commands -Use `curl` commands targeting the main service endpoints. Ensure `host_ip` is correctly set. +Use `curl` commands targeting the main service endpoints. Ensure `HOST_IP` is correctly set. 1. **Validate LLM Serving Endpoint (Example for vLLM on default port 9000 internally, exposed differently):** ```bash # This command structure targets the OpenAI-compatible vLLM endpoint - curl http://${host_ip}:9000/v1/chat/completions \ + curl http://${HOST_IP}:9000/v1/chat/completions \ -X POST \ -H 'Content-Type: application/json' \ - -d '{"model": "Qwen/Qwen2.5-Coder-32B-Instruct", "messages": [{"role": "user", "content": "Implement a basic Python class"}], "max_tokens":32}' + -d '{"model": "Qwen/Qwen2.5-Coder-7B-Instruct", "messages": [{"role": "user", "content": "Implement a basic Python class"}], "max_tokens":32}' ``` 2. **Validate CodeGen Gateway (MegaService, default host port 7778):** ```bash - curl http://${host_ip}:7778/v1/codegen \ + curl http://${HOST_IP}:7778/v1/codegen \ -H "Content-Type: application/json" \ -d '{"messages": "Implement a sorting algorithm in Python."}' ``` @@ -208,7 +195,7 @@ UI options are similar to the Xeon deployment. ### Gradio UI (Default) Access the default Gradio UI: -`http://{host_ip}:5173` +`http://{HOST_IP}:5173` _(Port `5173` is the default host mapping)_ ![Gradio UI](../../../../assets/img/codegen_gradio_ui_main.png) @@ -217,17 +204,17 @@ _(Port `5173` is the default host mapping)_ 1. Modify `compose.yaml`: Swap Gradio service for Svelte (`codegen-gaudi-ui-server`), check port map (e.g., `5173:5173`). 2. Restart: `docker compose --profile up -d` -3. Access: `http://{host_ip}:5173` +3. Access: `http://{HOST_IP}:5173` ### React UI (Optional) 1. Modify `compose.yaml`: Swap Gradio service for React (`codegen-gaudi-react-ui-server`), check port map (e.g., `5174:80`). 2. Restart: `docker compose --profile up -d` -3. Access: `http://{host_ip}:5174` +3. Access: `http://{HOST_IP}:5174` ### VS Code Extension (Optional) -Use the `Neural Copilot` extension configured with the CodeGen backend URL: `http://${host_ip}:7778/v1/codegen`. (See Xeon README for detailed setup screenshots). +Use the `Neural Copilot` extension configured with the CodeGen backend URL: `http://${HOST_IP}:7778/v1/codegen`. (See Xeon README for detailed setup screenshots). ## Troubleshooting @@ -237,7 +224,7 @@ Use the `Neural Copilot` extension configured with the CodeGen backend URL: `htt - Verify `runtime: habana` and volume mounts in `compose.yaml`. - Gaudi initialization can take significant time and memory. Monitor resource usage. - **Model Download Issues:** Check `HUGGINGFACEHUB_API_TOKEN`, internet access, proxy settings. Check LLM service logs. -- **Connection Errors:** Verify `host_ip`, ports, and proxy settings. Use `docker ps` and check service logs. +- **Connection Errors:** Verify `HOST_IP`, ports, and proxy settings. Use `docker ps` and check service logs. ## Stopping the Application diff --git a/CodeGen/docker_compose/intel/set_env.sh b/CodeGen/docker_compose/intel/set_env.sh new file mode 100644 index 0000000000..ea48c198bb --- /dev/null +++ b/CodeGen/docker_compose/intel/set_env.sh @@ -0,0 +1,51 @@ +#!/usr/bin/env bash + +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +pushd "../../" > /dev/null +source .set_env.sh +popd > /dev/null + +export HOST_IP=$(hostname -I | awk '{print $1}') +export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +if [ -z "${HUGGINGFACEHUB_API_TOKEN}" ]; then + echo "Error: HUGGINGFACEHUB_API_TOKEN is not set. Please set HUGGINGFACEHUB_API_TOKEN" +fi + +if [ -z "${HOST_IP}" ]; then + echo "Error: HOST_IP is not set. Please set HOST_IP first." +fi + +export no_proxy=${no_proxy},${HOST_IP} +export http_proxy=${http_proxy} +export https_proxy=${https_proxy} + +export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" +export LLM_SERVICE_PORT=9000 +export LLM_ENDPOINT="http://${HOST_IP}:8028" +export LLM_SERVICE_HOST_IP=${HOST_IP} +export TGI_LLM_ENDPOINT="http://${HOST_IP}:8028" + +export MEGA_SERVICE_PORT=7778 +export MEGA_SERVICE_HOST_IP=${HOST_IP} +export BACKEND_SERVICE_ENDPOINT="http://${HOST_IP}:7778/v1/codegen" + +export REDIS_DB_PORT=6379 +export REDIS_INSIGHTS_PORT=8001 +export REDIS_RETRIEVER_PORT=7000 +export REDIS_URL="redis://${HOST_IP}:${REDIS_DB_PORT}" +export RETRIEVAL_SERVICE_HOST_IP=${HOST_IP} +export RETRIEVER_COMPONENT_NAME="OPEA_RETRIEVER_REDIS" +export INDEX_NAME="CodeGen" + +export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" +export EMBEDDER_PORT=6000 +export TEI_EMBEDDER_PORT=8090 +export TEI_EMBEDDING_HOST_IP=${HOST_IP} +export TEI_EMBEDDING_ENDPOINT="http://${HOST_IP}:${TEI_EMBEDDER_PORT}" + +export DATAPREP_REDIS_PORT=6007 +export DATAPREP_ENDPOINT="http://${HOST_IP}:${DATAPREP_REDIS_PORT}/v1/dataprep" +export LOGFLAG=false +export MODEL_CACHE=${model_cache:-"./data"} +export NUM_CARDS=1 diff --git a/CodeGen/docker_compose/set_env.sh b/CodeGen/docker_compose/set_env.sh deleted file mode 100644 index bde459adde..0000000000 --- a/CodeGen/docker_compose/set_env.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env bash - -# Copyright (C) 2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 -pushd "../../" > /dev/null -source .set_env.sh -popd > /dev/null - -export host_ip=$(hostname -I | awk '{print $1}') -if [ -z "${HUGGINGFACEHUB_API_TOKEN}" ]; then - echo "Error: HUGGINGFACEHUB_API_TOKEN is not set. Please set HUGGINGFACEHUB_API_TOKEN" -fi - -if [ -z "${host_ip}" ]; then - echo "Error: host_ip is not set. Please set host_ip first." -fi - -export no_proxy=${no_proxy},${host_ip} -export http_proxy=${http_proxy} -export https_proxy=${https_proxy} - -export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-32B-Instruct" -export LLM_SERVICE_PORT=9000 -export LLM_ENDPOINT="http://${host_ip}:8028" -export LLM_SERVICE_HOST_IP=${host_ip} -export TGI_LLM_ENDPOINT="http://${host_ip}:8028" - -export MEGA_SERVICE_PORT=7778 -export MEGA_SERVICE_HOST_IP=${host_ip} -export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:7778/v1/codegen" - -export REDIS_DB_PORT=6379 -export REDIS_INSIGHTS_PORT=8001 -export REDIS_RETRIEVER_PORT=7000 -export REDIS_URL="redis://${host_ip}:${REDIS_DB_PORT}" -export RETRIEVAL_SERVICE_HOST_IP=${host_ip} -export RETRIEVER_COMPONENT_NAME="OPEA_RETRIEVER_REDIS" -export INDEX_NAME="CodeGen" - -export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" -export EMBEDDER_PORT=6000 -export TEI_EMBEDDER_PORT=8090 -export TEI_EMBEDDING_HOST_IP=${host_ip} -export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:${TEI_EMBEDDER_PORT}" - -export DATAPREP_REDIS_PORT=6007 -export DATAPREP_ENDPOINT="http://${host_ip}:${DATAPREP_REDIS_PORT}/v1/dataprep" -export LOGFLAG=false -export MODEL_CACHE="./data" -export NUM_CARDS=1 diff --git a/CodeGen/tests/README.md b/CodeGen/tests/README.md new file mode 100644 index 0000000000..4909899be7 --- /dev/null +++ b/CodeGen/tests/README.md @@ -0,0 +1,33 @@ +# CodeGen E2E test scripts + +## Set the required environment variable + +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +``` + +## Run test + +On Intel Xeon with TGI: + +```bash +bash test_compose_on_xeon.sh +``` + +On Intel Gaudi with TGI: + +```bash +bash test_compose_on_gaudi.sh +``` + +On AMD ROCm with TGI: + +```bash +bash test_compose_on_rocm.sh +``` + +On AMD ROCm with vLLM: + +```bash +bash test_compose_vllm_on_rocm.sh +``` diff --git a/CodeGen/tests/test_compose_on_gaudi.sh b/CodeGen/tests/test_compose_on_gaudi.sh index 413ce53808..87acfbaa5a 100644 --- a/CodeGen/tests/test_compose_on_gaudi.sh +++ b/CodeGen/tests/test_compose_on_gaudi.sh @@ -10,21 +10,11 @@ echo "TAG=IMAGE_TAG=${IMAGE_TAG}" export REGISTRY=${IMAGE_REPO} export TAG=${IMAGE_TAG} export MODEL_CACHE=${model_cache:-"./data"} -export REDIS_DB_PORT=6379 -export REDIS_INSIGHTS_PORT=8001 -export REDIS_RETRIEVER_PORT=7000 -export EMBEDDER_PORT=6000 -export TEI_EMBEDDER_PORT=8090 -export DATAPREP_REDIS_PORT=6007 WORKPATH=$(dirname "$PWD") LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') - -export http_proxy=${http_proxy} -export https_proxy=${https_proxy} -export no_proxy=${no_proxy},${ip_address} - +source $WORKPATH/docker_compose/intel/set_env.sh function build_docker_images() { opea_branch=${opea_branch:-"main"} @@ -54,28 +44,6 @@ function start_services() { cd $WORKPATH/docker_compose/intel/hpu/gaudi - export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" - export LLM_ENDPOINT="http://${ip_address}:8028" - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export MEGA_SERVICE_PORT=7778 - export MEGA_SERVICE_HOST_IP=${ip_address} - export LLM_SERVICE_HOST_IP=${ip_address} - export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:${MEGA_SERVICE_PORT}/v1/codegen" - export NUM_CARDS=1 - export host_ip=${ip_address} - - export REDIS_URL="redis://${host_ip}:${REDIS_DB_PORT}" - export RETRIEVAL_SERVICE_HOST_IP=${host_ip} - export RETRIEVER_COMPONENT_NAME="OPEA_RETRIEVER_REDIS" - export INDEX_NAME="CodeGen" - - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export TEI_EMBEDDING_HOST_IP=${host_ip} - export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:${TEI_EMBEDDER_PORT}" - export DATAPREP_ENDPOINT="http://${host_ip}:${DATAPREP_REDIS_PORT}/v1/dataprep" - - export INDEX_NAME="CodeGen" - # Start Docker Containers docker compose --profile ${compose_profile} up -d | tee ${LOG_PATH}/start_services_with_compose.log diff --git a/CodeGen/tests/test_compose_on_rocm.sh b/CodeGen/tests/test_compose_on_rocm.sh index 94f006e358..173a0538fa 100644 --- a/CodeGen/tests/test_compose_on_rocm.sh +++ b/CodeGen/tests/test_compose_on_rocm.sh @@ -35,18 +35,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/amd/gpu/rocm/ - - export CODEGEN_LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" - export CODEGEN_TGI_SERVICE_PORT=8028 - export CODEGEN_TGI_LLM_ENDPOINT="http://${ip_address}:${CODEGEN_TGI_SERVICE_PORT}" - export CODEGEN_LLM_SERVICE_PORT=9000 - export CODEGEN_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export CODEGEN_MEGA_SERVICE_HOST_IP=${ip_address} - export CODEGEN_LLM_SERVICE_HOST_IP=${ip_address} - export CODEGEN_BACKEND_SERVICE_PORT=7778 - export CODEGEN_BACKEND_SERVICE_URL="http://${ip_address}:${CODEGEN_BACKEND_SERVICE_PORT}/v1/codegen" - export CODEGEN_UI_SERVICE_PORT=5173 - export HOST_IP=${ip_address} + source set_env.sh sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env diff --git a/CodeGen/tests/test_compose_on_xeon.sh b/CodeGen/tests/test_compose_on_xeon.sh index 4aaa180ec3..a50e5f0a7e 100644 --- a/CodeGen/tests/test_compose_on_xeon.sh +++ b/CodeGen/tests/test_compose_on_xeon.sh @@ -10,20 +10,11 @@ echo "TAG=IMAGE_TAG=${IMAGE_TAG}" export REGISTRY=${IMAGE_REPO} export TAG=${IMAGE_TAG} export MODEL_CACHE=${model_cache:-"./data"} -export REDIS_DB_PORT=6379 -export REDIS_INSIGHTS_PORT=8001 -export REDIS_RETRIEVER_PORT=7000 -export EMBEDDER_PORT=6000 -export TEI_EMBEDDER_PORT=8090 -export DATAPREP_REDIS_PORT=6007 WORKPATH=$(dirname "$PWD") LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') - -export http_proxy=${http_proxy} -export https_proxy=${https_proxy} -export no_proxy=${no_proxy},${ip_address} +source $WORKPATH/docker_compose/intel/set_env.sh function build_docker_images() { opea_branch=${opea_branch:-"main"} @@ -56,25 +47,6 @@ function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon/ - export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" - export LLM_ENDPOINT="http://${ip_address}:8028" - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export MEGA_SERVICE_PORT=7778 - export MEGA_SERVICE_HOST_IP=${ip_address} - export LLM_SERVICE_HOST_IP=${ip_address} - export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:${MEGA_SERVICE_PORT}/v1/codegen" - export host_ip=${ip_address} - - export REDIS_URL="redis://${host_ip}:${REDIS_DB_PORT}" - export RETRIEVAL_SERVICE_HOST_IP=${host_ip} - export RETRIEVER_COMPONENT_NAME="OPEA_RETRIEVER_REDIS" - export INDEX_NAME="CodeGen" - - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export TEI_EMBEDDING_HOST_IP=${host_ip} - export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:${TEI_EMBEDDER_PORT}" - export DATAPREP_ENDPOINT="http://${host_ip}:${DATAPREP_REDIS_PORT}/v1/dataprep" - # Start Docker Containers docker compose --profile ${compose_profile} up -d > ${LOG_PATH}/start_services_with_compose.log diff --git a/CodeGen/tests/test_compose_vllm_on_rocm.sh b/CodeGen/tests/test_compose_vllm_on_rocm.sh index 1d78f2a0d7..33fef0b279 100644 --- a/CodeGen/tests/test_compose_vllm_on_rocm.sh +++ b/CodeGen/tests/test_compose_vllm_on_rocm.sh @@ -34,18 +34,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/amd/gpu/rocm/ - - export CODEGEN_LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" - export CODEGEN_VLLM_SERVICE_PORT=8028 - export CODEGEN_VLLM_ENDPOINT="http://${ip_address}:${CODEGEN_VLLM_SERVICE_PORT}" - export CODEGEN_LLM_SERVICE_PORT=9000 - export CODEGEN_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export CODEGEN_MEGA_SERVICE_HOST_IP=${ip_address} - export CODEGEN_LLM_SERVICE_HOST_IP=${ip_address} - export CODEGEN_BACKEND_SERVICE_PORT=7778 - export CODEGEN_BACKEND_SERVICE_URL="http://${ip_address}:${CODEGEN_BACKEND_SERVICE_PORT}/v1/codegen" - export CODEGEN_UI_SERVICE_PORT=5173 - export HOST_IP=${ip_address} + source set_env_vllm.sh sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env From 94d483014e3f897ee4aebd2f566e0fea7dcaa888 Mon Sep 17 00:00:00 2001 From: Jean Yu Date: Thu, 22 May 2025 00:27:18 -0500 Subject: [PATCH 067/217] Add HybridRAG as a new application in the GenAIExamples (#1968) Signed-off-by: jeanyu-habana Signed-off-by: siddhivelankar23 Signed-off-by: Ruslan Rin --- HybridRAG/Dockerfile | 10 + HybridRAG/README.md | 98 ++++ HybridRAG/README_NOTICE.md | 3 + HybridRAG/assets/img/chat_ui_init.png | Bin 0 -> 15118 bytes HybridRAG/assets/img/chat_ui_response.png | Bin 0 -> 76033 bytes HybridRAG/assets/img/chat_ui_upload.png | Bin 0 -> 87990 bytes .../img/hybridrag_retriever_architecture.png | Bin 0 -> 99359 bytes .../docker_compose/intel/hpu/gaudi/README.md | 161 ++++++ .../intel/hpu/gaudi/compose.yaml | 240 +++++++++ .../docker_compose/intel/hpu/gaudi/set_env.sh | 56 +++ HybridRAG/docker_image_build/build.yaml | 51 ++ HybridRAG/hybridrag.py | 464 ++++++++++++++++++ HybridRAG/tests/data/Acne_Vulgaris.txt | 1 + HybridRAG/tests/data/Diabetes.txt | 1 + HybridRAG/tests/test_compose_on_gaudi.sh | 259 ++++++++++ HybridRAG/ui/docker/Dockerfile | 26 + HybridRAG/ui/svelte/.editorconfig | 10 + HybridRAG/ui/svelte/.env | 7 + HybridRAG/ui/svelte/.eslintignore | 13 + HybridRAG/ui/svelte/.eslintrc.cjs | 34 ++ HybridRAG/ui/svelte/.prettierignore | 13 + HybridRAG/ui/svelte/.prettierrc | 1 + HybridRAG/ui/svelte/README.md | 42 ++ HybridRAG/ui/svelte/package.json | 60 +++ HybridRAG/ui/svelte/playwright.config.ts | 87 ++++ HybridRAG/ui/svelte/postcss.config.cjs | 27 + HybridRAG/ui/svelte/src/app.d.ts | 19 + HybridRAG/ui/svelte/src/app.html | 28 ++ HybridRAG/ui/svelte/src/app.postcss | 86 ++++ .../DocManagement/LinkfolderIcon.svelte | 36 ++ .../lib/assets/DocManagement/fileIcon.svelte | 30 ++ .../assets/DocManagement/folderIcon.svelte | 30 ++ .../lib/assets/avatar/svelte/Delete.svelte | 30 ++ .../lib/assets/chat/svelte/Assistant.svelte | 44 ++ .../assets/chat/svelte/PaperAirplane.svelte | 68 +++ .../assets/chat/svelte/PersonOutlined.svelte | 26 + .../src/lib/assets/layout/css/driver.css | 94 ++++ .../src/lib/assets/upload/deleteIcon.svelte | 22 + .../lib/assets/upload/loading-button.svelte | 25 + .../svelte/src/lib/assets/upload/next.svelte | 31 ++ .../src/lib/assets/upload/no-file.svelte | 37 ++ .../src/lib/assets/upload/previous.svelte | 31 ++ .../svelte/src/lib/assets/voice/svg/paste.svg | 1 + .../src/lib/assets/voice/svg/uploadFile.svg | 1 + .../src/lib/modules/chat/ChatMessage.svelte | 70 +++ .../src/lib/modules/chat/MessageAvatar.svelte | 30 ++ .../src/lib/modules/chat/MessageTimer.svelte | 67 +++ .../src/lib/modules/frame/Layout.svelte | 48 ++ .../ui/svelte/src/lib/network/chat/Network.ts | 41 ++ .../svelte/src/lib/network/upload/Network.ts | 82 ++++ HybridRAG/ui/svelte/src/lib/shared/Utils.ts | 54 ++ .../lib/shared/components/chat/gallery.svelte | 156 ++++++ .../components/doc_management/docCard.svelte | 150 ++++++ .../treeView/svelte-tree.svelte | 35 ++ .../treeView/tree-branch.svelte | 46 ++ .../doc_management/treeView/tree-node.svelte | 111 +++++ .../shared/components/loading/Loading.svelte | 48 ++ .../shared/components/loading/Spinner.svelte | 68 +++ .../components/scrollbar/Scrollbar.svelte | 48 ++ .../components/upload/PasteKnowledge.svelte | 52 ++ .../components/upload/upload-knowledge.svelte | 49 ++ .../components/upload/uploadFile.svelte | 184 +++++++ .../src/lib/shared/constant/Interface.ts | 47 ++ .../src/lib/shared/stores/common/Store.ts | 41 ++ HybridRAG/ui/svelte/src/routes/+layout.svelte | 48 ++ HybridRAG/ui/svelte/src/routes/+page.svelte | 318 ++++++++++++ HybridRAG/ui/svelte/src/routes/+page.ts | 26 + HybridRAG/ui/svelte/static/favicon.png | Bin 0 -> 70954 bytes HybridRAG/ui/svelte/svelte.config.js | 38 ++ HybridRAG/ui/svelte/tailwind.config.cjs | 43 ++ HybridRAG/ui/svelte/tests/chatQnA.spec.ts | 82 ++++ HybridRAG/ui/svelte/tests/test_file.txt | 104 ++++ HybridRAG/ui/svelte/tsconfig.json | 16 + HybridRAG/ui/svelte/vite.config.ts | 25 + pyproject.toml | 2 +- 75 files changed, 4431 insertions(+), 1 deletion(-) create mode 100644 HybridRAG/Dockerfile create mode 100644 HybridRAG/README.md create mode 100644 HybridRAG/README_NOTICE.md create mode 100644 HybridRAG/assets/img/chat_ui_init.png create mode 100644 HybridRAG/assets/img/chat_ui_response.png create mode 100644 HybridRAG/assets/img/chat_ui_upload.png create mode 100644 HybridRAG/assets/img/hybridrag_retriever_architecture.png create mode 100644 HybridRAG/docker_compose/intel/hpu/gaudi/README.md create mode 100644 HybridRAG/docker_compose/intel/hpu/gaudi/compose.yaml create mode 100644 HybridRAG/docker_compose/intel/hpu/gaudi/set_env.sh create mode 100644 HybridRAG/docker_image_build/build.yaml create mode 100644 HybridRAG/hybridrag.py create mode 100644 HybridRAG/tests/data/Acne_Vulgaris.txt create mode 100644 HybridRAG/tests/data/Diabetes.txt create mode 100755 HybridRAG/tests/test_compose_on_gaudi.sh create mode 100644 HybridRAG/ui/docker/Dockerfile create mode 100644 HybridRAG/ui/svelte/.editorconfig create mode 100644 HybridRAG/ui/svelte/.env create mode 100644 HybridRAG/ui/svelte/.eslintignore create mode 100644 HybridRAG/ui/svelte/.eslintrc.cjs create mode 100644 HybridRAG/ui/svelte/.prettierignore create mode 100644 HybridRAG/ui/svelte/.prettierrc create mode 100644 HybridRAG/ui/svelte/README.md create mode 100644 HybridRAG/ui/svelte/package.json create mode 100644 HybridRAG/ui/svelte/playwright.config.ts create mode 100644 HybridRAG/ui/svelte/postcss.config.cjs create mode 100644 HybridRAG/ui/svelte/src/app.d.ts create mode 100644 HybridRAG/ui/svelte/src/app.html create mode 100644 HybridRAG/ui/svelte/src/app.postcss create mode 100644 HybridRAG/ui/svelte/src/lib/assets/DocManagement/LinkfolderIcon.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/assets/DocManagement/fileIcon.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/assets/DocManagement/folderIcon.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/assets/avatar/svelte/Delete.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/assets/chat/svelte/Assistant.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/assets/chat/svelte/PaperAirplane.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/assets/chat/svelte/PersonOutlined.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/assets/layout/css/driver.css create mode 100644 HybridRAG/ui/svelte/src/lib/assets/upload/deleteIcon.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/assets/upload/loading-button.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/assets/upload/next.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/assets/upload/no-file.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/assets/upload/previous.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/assets/voice/svg/paste.svg create mode 100644 HybridRAG/ui/svelte/src/lib/assets/voice/svg/uploadFile.svg create mode 100644 HybridRAG/ui/svelte/src/lib/modules/chat/ChatMessage.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/modules/chat/MessageAvatar.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/modules/chat/MessageTimer.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/modules/frame/Layout.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/network/chat/Network.ts create mode 100644 HybridRAG/ui/svelte/src/lib/network/upload/Network.ts create mode 100644 HybridRAG/ui/svelte/src/lib/shared/Utils.ts create mode 100644 HybridRAG/ui/svelte/src/lib/shared/components/chat/gallery.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/shared/components/doc_management/docCard.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/shared/components/doc_management/treeView/svelte-tree.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/shared/components/doc_management/treeView/tree-branch.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/shared/components/doc_management/treeView/tree-node.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/shared/components/loading/Loading.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/shared/components/loading/Spinner.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/shared/components/scrollbar/Scrollbar.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/shared/components/upload/PasteKnowledge.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/shared/components/upload/upload-knowledge.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/shared/components/upload/uploadFile.svelte create mode 100644 HybridRAG/ui/svelte/src/lib/shared/constant/Interface.ts create mode 100644 HybridRAG/ui/svelte/src/lib/shared/stores/common/Store.ts create mode 100644 HybridRAG/ui/svelte/src/routes/+layout.svelte create mode 100644 HybridRAG/ui/svelte/src/routes/+page.svelte create mode 100644 HybridRAG/ui/svelte/src/routes/+page.ts create mode 100644 HybridRAG/ui/svelte/static/favicon.png create mode 100644 HybridRAG/ui/svelte/svelte.config.js create mode 100644 HybridRAG/ui/svelte/tailwind.config.cjs create mode 100644 HybridRAG/ui/svelte/tests/chatQnA.spec.ts create mode 100644 HybridRAG/ui/svelte/tests/test_file.txt create mode 100644 HybridRAG/ui/svelte/tsconfig.json create mode 100644 HybridRAG/ui/svelte/vite.config.ts diff --git a/HybridRAG/Dockerfile b/HybridRAG/Dockerfile new file mode 100644 index 0000000000..782160dccd --- /dev/null +++ b/HybridRAG/Dockerfile @@ -0,0 +1,10 @@ +# Copyright (C) 2025 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +ARG BASE_TAG=latest +FROM opea/comps-base:$BASE_TAG + +COPY ./hybridrag.py $HOME/hybridrag.py + +ENTRYPOINT ["python", "hybridrag.py"] + diff --git a/HybridRAG/README.md b/HybridRAG/README.md new file mode 100644 index 0000000000..6cdb5733f8 --- /dev/null +++ b/HybridRAG/README.md @@ -0,0 +1,98 @@ +# HybridRAG Application + +Enterprise AI systems require solutions that handle both structured data (databases, transactions, CSVs, JSON) and unstructured data (documents, images, audio). While traditional VectorRAG excels at semantic search across documents, it struggles with complex queries requiring global context or relationship-aware reasoning. HybridRAG application addresses these gaps by combining GraphRAG (knowledge graph-based retrieval) and VectorRAG (vector database retrieval) for enhanced accuracy and contextual relevance. + +## Table of contents + +1. [Architecture](#architecture) +2. [Deployment](#deployment) + +## Architecture + +The HybridRAG application is a customizable end-to-end workflow that leverages the capabilities of LLMs and RAG efficiently. HybridRAG architecture is shown below: + +![architecture](./assets/img/hybridrag_retriever_architecture.png) + +This application is modular as it leverages each component as a microservice(as defined in [GenAIComps](https://github.com/opea-project/GenAIComps)) that can scale independently. It comprises data preparation, embedding, retrieval, reranker(optional) and LLM microservices. All these microservices are stitched together by the HybridRAG megaservice that orchestrates the data through these microservices. The flow chart below shows the information flow between different microservices for this example. + +```mermaid +--- +config: + flowchart: + nodeSpacing: 400 + rankSpacing: 100 + curve: linear + themeVariables: + fontSize: 50px +--- +flowchart LR + %% Colors %% + classDef blue fill:#ADD8E6,stroke:#ADD8E6,stroke-width:2px,fill-opacity:0.5 + classDef orange fill:#FBAA60,stroke:#ADD8E6,stroke-width:2px,fill-opacity:0.5 + classDef orchid fill:#C26DBC,stroke:#ADD8E6,stroke-width:2px,fill-opacity:0.5 + classDef invisible fill:transparent,stroke:transparent; + style HybridRAG-MegaService stroke:#000000 + + %% Subgraphs %% + subgraph HybridRAG-MegaService["HybridRAG MegaService "] + direction LR + EM([Embedding MicroService]):::blue + RET([Retrieval MicroService]):::blue + RER([Rerank MicroService]):::blue + LLM([LLM MicroService]):::blue + direction LR + T2C([Text2Cypher MicroService]):::blue + LLM([LLM MicroService]):::blue + end + subgraph UserInterface[" User Interface "] + direction LR + a([User Input Query]):::orchid + UI([UI server
]):::orchid + end + + + + TEI_RER{{Reranking service
}} + TEI_EM{{Embedding service
}} + VDB{{Vector DB

}} + GDB{{Graph DB

}} + R_RET{{Retriever service
}} + DP([Data Preparation MicroService]):::blue + S2G([Struct2Graph MicroService]):::blue + LLM_gen{{LLM Service
}} + GW([HybridRAG GateWay
]):::orange + + %% Questions interaction + direction LR + a[User Input Query] --> UI + UI --> GW + GW <==> HybridRAG-MegaService + EM ==> RET + RET ==> RER + RER ==> LLM + direction LR + T2C ==> LLM + + + %% Embedding service flow + direction LR + EM <-.-> TEI_EM + RET <-.-> R_RET + RER <-.-> TEI_RER + LLM <-.-> LLM_gen + + direction TB + %% Vector DB interaction + R_RET <-.->|d|VDB + DP <-.->|d|VDB + + direction TB + %% Graph DB interaction + T2C <-.->|d|GDB + S2G <-.->|d|GDB + +``` + +## Deployment + +[HybridRAG deployment on Intel Gaudi](./docker_compose/intel/hpu/gaudi/README.md) diff --git a/HybridRAG/README_NOTICE.md b/HybridRAG/README_NOTICE.md new file mode 100644 index 0000000000..6874c1bf59 --- /dev/null +++ b/HybridRAG/README_NOTICE.md @@ -0,0 +1,3 @@ +# Notice for FFmpeg: + +FFmpeg is an open source project licensed under LGPL and GPL. See https://www.ffmpeg.org/legal.html. You are solely responsible for determining if your use of FFmpeg requires any additional licenses. Intel is not responsible for obtaining any such licenses, nor liable for any licensing fees due, in connection with your use of FFmpeg. diff --git a/HybridRAG/assets/img/chat_ui_init.png b/HybridRAG/assets/img/chat_ui_init.png new file mode 100644 index 0000000000000000000000000000000000000000..392228d5ae6d29bc42bd4d5c7416589c3368cf25 GIT binary patch literal 15118 zcmeHu`BzhCw?8ef(n^8$TBU$MEwvO8P!W(R(W*rml~H7f6aksXgdrqRQK&LRWm2Xn z5Rfr4CCot)kXdFz2oVAV2oRExgv{UQeeZYOzu>OEYx&d3InO%h?44)t&$BGFk8ClpjUrVo~F%RHmWXz_nTs-d_H@8JX6u9~o?l9yp zs%L2?y;vyO^ZlOi=OqBH+27{P+L#jF%wWNj8u)x2zUKgQ!38 zmb$?s>y3dEW&e^|BJ*L)jGFMtCYdhrmBi1N{`edD>HOR`j2}K7zc~u{Jbv_@)u+>+ z@9%x{>3IKp&99_~JKry9eLDUAkAKe1PekWA+t&*7TH^eTXBwo&-=sHg*Vv_c!`i~G zQRcvE(U!`yqu_q_XLlFm)5F6n`CECarjQ92)tiuq!BhIdlr*FOguv83)Zz;O{{vp|9HAXm3=O@c>EDIaKk>P4ou$M0rLN+-^)rARhpVyC=PWyY>$WqV&} zb6{(G6a(hVFUxY2dcM!M5-V6-8$ff@`j`1J5Xw7c7dKDCK)$|1JA!auSKGd&{Y+-2 zMAdlOcEe2AI}Lx@0^y8T*+iYvmd>b`U=^V+;9=b8Q4O6bu`Se%5~Mk^%X3tjqgljz z?_J{aW(cETpn5jQI)8xTjj8i_Q&9ORJWZ4NzzYDYf63v?KeBJX>F8mfc#WN=o1*+( z$6u?d>234aZX=7+tX_5D@6ceoEhzA)^kv4)TW?(K27MRK7XmD+`*sI2_IS9A*2UBm z1aouS1JQVkm+X)zn7V}NETrjli@R_9#Zklz9RdCo(o^i|P$F_Y@hag}F~4>r1r>+; zHAA07-~`jcjyM^wza$OSMEj@dCHRYdr<|;o@hQ~ltE{U%iil?&D~sB{<$On^huxQx zk>NxlM_|!GD+4un;RqV0ud_8)L&Nr5NAYft4q9>e<(jvYJNXdnKtiR$!7%uio5_(( z+QV-6+W{-i*HT+N>Oon3S@jKNY5ce8ha4J18LLIveR@IVg-*tyBhY4XetuK7??}w3k(DNj5QSr-yyIyVR7uXi zUlbL3Woq_moq4;Vsgtu=W8?DBTkm{COBtL~5U_#meCS7>zAos9G(V1fkTf{>=XW}r zMUFRIfVeXLf~!LdWZ)9z(W+|>7iLiA0j!D=^aW+8ae0p({bqU^3S(~uTievYq}jkx z#S11{fp{#OMGXcJsqKbJq_G^UWz7f)o}&&}H^5-xwb3!eB!vrd8c%8K-QshJzvAj$ z-0W^QS@K>yq;E^YMRK>lhigUw4X+9tIS)s0P^SNW#2=wE^+H2;1Xy6kaq< z$kR+ygSvoOHra)yi>-BNT+T}QZMG5DHQXfPTHyLaCAZQt!`8Z>>8x>i3RRlMABPrF}ee^4{M<+1^~XaKXUaK}WROf>+o}$#pOlf1Hq87E$)`q=rAHIo*Y2 z6Kng?z3vm*^HZlS{jD+Z#Ph?uk$MA^HYO8z)p&7woj&{s{}b4-wxurHP!h7TnO8A9 z=xeQG;KZ*<2^bAo5s?VQGUuUG6unrA6Coy7EgD}5)9Qw?d5rg)$$KqQ1Z?bKnk?{E z3YzkPhz%{vx%xrsC<)@k$xl~9NS0cO8lLPox)LD;WW3A|xd}Q3czS`I4ZMA3a9eGXh-V`#zR5bXkg^CH%?Vy z>Q2p+_(R@~*TCS&)Wz0y?qmpbcBvq+FfSj=wwxrG8kWO+_8uR^*>Xz98JS_2{;*lL zOEvWNpTiK@9^aHh)IxW&mjnpTWvY7Z6(U;d_ZE{m9%@hnSlLS=*4rR2m?LOU7l!vs zIMcns^c##P7|{wwm$=iXeG|H*)nbSV>ebTQFXw=Y1YfZtIgrkV_r5B=chtAcIRs|A zTs)UYEKk$ZS>rx10E9dA#Pw?D38BvvzV9jdJ0O-$RmgUqXfdSm-QS0}4L zghtLWY%c+68~g-ln2E{1J|0RnU(NP3_47;}+uUjb2fC?l_or_RM0CHXLnk>7XgWgT8}xK5Q9-vy(b7e3?e`Qg8G_kO zjx}zI#EcdaSxSZ(k}+U_c>S&Lymp99jywnk74D*20NS(Mzmdzfq=VUdnS|!>G~IdA(m?blxSNxtAYU0^QBTVyOaf2)?RC`|f(R@pOh;AE?29WU z$xTOFIXZgy_MV%ZP67+kIx{yHNJYVWI?T^DT{=Wbv8!-zqmZam&S40lE${JCmelOi z#qshr&20&z3kwR;Y`N=?f4xlv60sr6h3Rc|^f6eg=k@WejnUosy;fg1pM8&#G0!}_ z832OQJPr*;!~9<(f6(mD4uN?Ma?VB$@p$Udd&vV=l%U3kf9To7Y%is0Hhy*1 zz-4pM2B$oX32fPBERADbW`q+dIk=yyt3dOwDP9}8_-g<^`?=?v>6u}~v5o%TJZM?C zq+#fgmNj;0b!dFGp@rL-cMJ;gl17kOucaiGy%r1-4M!u0X5(#r8l9(B=M5D_Qwn_{ zTgX@drx)f>)6eF;>3k0w8N%v@ZCrpRTVH-nN zDEh|(SB!|=16B(8&d$@j3Z{fWN5r_xry+l)sWl{D1@~ax3teK5!Qxt4xj=i zqH49(Csv(Cf6T$XLb{}GIL-C{a2oCA=Irg1v;Nf4S^-h{?Brjpe|Rsu`M5M>Zpf7#F}3hv&DYhB+z#hso+zzg`&lChmD7S8d@Rw6fdG*BATl-+|K zAZpa0#2qXm={i|w_Ho4H@4d}UH_2?vwakE6iL?SK&)UeXMNo#*EtS9qI*kv>$CuwH z_gZ!=w$xaMwbHfeIel4`)kO(l^_ zXjAXDCE4d!nU4guBuD8OuRN`)Ir^_uXZMB|oh$gv1(s~2viFq3oAL+iXmkCIm}=mu4W1sk8K3t8ikr|>aDfBO?BhvII}(8>FypmZHm zGxs_^@-OSaw|!8rlg-2oYX1{3$@ zGi?+3DY4y`&^@pl-*I$)w+)2`p|=F zjsxKnVw;Vs8)=99AyIYguwN>juquxUpKD1juhh*`@bXSbV2!xP@p(Og(~y~_ z?UB4z%;DI{)IlkN&J{z%SYQbZauKN>3So(=!uEOwaws#yo-Kh+txE@M$cT>ZnoxtG z?Zu+t+lF|L_fI+>l;e&RG~HApYmyvuC&5abxyLm;@hiBOyuE=Ya9wc7sO@*84tgKE zP2x`#-n*A`7tATf79~I;bsYvz%n`J-xXacd;>wM-d|cnRRHgp#IxoiS!#37bm+2wm z1ZJO%%&bp@~7@-Xos8~ipm@!HE|SHvzqFBS_2R0}jZ&D9p96d>xD zQ{1)Gx=lT!=qQttrlyGz9Sw1MrK0=`^Ksm(#-8gWt~6 zjSL`Li1o`}(rPT+yIlL@VTI6mw=l?noniFuk3GG4!Gq-%W1Af#mwY<`qMYYSKyO2+ zc-*DRwZqf2Rjc#3lc!m6&&O&NLu%vV!yetr064zq+QiCxH`JyJ}FVz^%fQlIdH+6cgZ7+j6O%$)oIoNp?PJyMt|2vwuKSRPVz|(MkZD zHj&@70NiE+J}q^++>NrV?ZUH+?HzPV&2>j3ih!8gH-;Mmoy0St2emyNUCpXXf6qjr z@O@kZ6Q}D5SKL$IwCc>bZf^{!1&0^i82hVn^Xd584E!%(7ZY5$buYsp4Y+38&Zs}G!5eBesPqj5y-?@4#pXyoD@_$3 zr!p^Do$i8|X=c)~&AFzAK5rjo>nU&&Td5c9Q zYQiWz_;ylAl8T#uC#Sy~zB~F{*C6QF&v4$sjxd3>|I%8wSTty5tS%v77WO#my2gm@ z3D%ucX!FEug%3~nE-M~FwgvqWOutfr`Dp?}?lE!2&wk*;)l_Lzm*Ktf(n=D7Fqm9RLu}E1*>)`3oYAHF6oXE8ouQO}ZY&9m#=`ZYC05-F^F@ch!H<2r# zvwpn+bTzP>E;7CvA3MH}Yi}*G8L-My)R||xIaX~1%uG*B4H4S)>)V-?T`eRxFZ0NV zek__JB~P*I>1}WgzueVS)~)L)^Ar{K%~T z;lhEo@~j;FxKjS2_TfhHREcgl@2{@PpqgZ>{zB|LL&1cNAs1JbnUSViiX>YnskH8n&&P~tn#hQ1%~hOVBJ18xj{$SP9)A6zoS;`6jWeYReTA0#O=mY ze5#0_lM%!--XT^7m|V2|FDdCw>Eugu#Iz=SA{SJiB}LwJt!2&DW6yRs;9d8T+)5Fb zl4Uh(xTFQW3qU&s3}b{*R|LpLD$}};bjH6twC>i1=#bkc|6GqRMzw+VXUi1B7k--i z+~ZIB8-C9|r$3STJnJs>?E)_tES0mzt2e$sV!M86MQ?wAv*pMEgXC&+{n&o|3(L$?CxTM{po~eT`IH|S0H%q=cl({S7VN#+b&%( zmkOZ0FG1pq7)|@LOfo)kxk5C@@9N7M|JF+V%p#=&!VFS1Nj4uX5{=wN=(C{iA8sbDx8{C{Syy__+XX z7XNl_eEheg^$`()>ucX;;^%IB@N_=4@l>}GwBD8a;cB!b?QqgcNitS1jboq(lrBG8 z*;DFV?HnrbXnqDLARmF=s%@@d~n%QTq~hs67^%ijG{ z+1aSjEyXocr{uH`pLEJQ9jSZY2y)(@!nxY1&(Arm+UjUl06*OVeWJew{MSy=hU@7T z$i%Ye=%N#O&Pe--C(NZGzY9pq@>dSE z<@@QNC9Mx(gO%kACrzMp?>+SOM%d2MgyLe7!D<=&RSInyKM|7X9JyNmvM=ND&qtM8 zmlM8zZ3mfFDhJGcm~&tn2v^-kEPt6{$x1WaAcx#E=-$T_4!@nrPba(Oc3||)`MdW9 zlQqnRvK`Tuc5w*CkQE0YM`f=Ogs9p!32=%{Xu-F#QcM6v!))GWmFGF>gHSB)gAT}U z2^8hdAJY_zGbN0ng9bH2>@$n7cT$De?75HRq~FCdQJ*_cxS3KE`u@jdej>+R|7RJQ z*P|+NF}T9;*YSszFD0FJQ#9Y!lC8dXKf?9%o{Qk&T`TtOL4WS(prYUn9mpsF0tibQ zp)Y(Ne%hkxf-hy2v?`ok`XDXkx*9OBy)skf1^%v6>aUmoUcv@*PUVBF4cDT{8VKRN zJ~}3T_rW2WajC4*Ry5OUR0Ya;=#?MR;3tqyLQjbfI?I$^|66Qy_;p^N9D=iUk3VBl znx$xQ!YOK@&wvklE{lT?PtTYj%61+I=zIMI$?S^JOXPx+?NhNOq0kvtS6Rwdg6(3w zrkZIx0v<&1F}X#xWXzV}J=$*#CucdDoyE}Q_7}N9MqU7jB0ym*6)#>U2<7p->;`gm zMdAz2gC4c%*zKpLl!8I4LPDF3b#M`0lF?TrWX-YFChn&Ajfmfs>zkP(V$0`}<(svF zX{c&`4~CZ8{@jtO-y!!SQWQhDvptLrB=?XX!}xdQH2A)d6%QTo_PC(ASRQD}Oaq89 z(%qKJI%rw5*y8|fpUvPxh(I5o2qzezK(3I1MYqBkNs{(%kRaon$B~^vdZPAB^c}tF zPN6_s*$6^I^b)jVh&TtAHQ_Jlb zYk^`$s&X-eS@zo7^RkM-jaLf~v=56&Kg8>x8zcKOfqX_9S=i)5Ir>0beMa&=$rYam zTPFagjduZpEG?y3p;5SgA~P1@m2dh|@>WuGtqnpc%Q9AvS(20EhO_CT*pS5CYXepRjkT=bwEA@QV&HO*dH&8_$}7@0U=@|s@wfXgo&a4@TtoU&Ac zH!K=pPQ?3j=Kc01?N^WgFdaS!5ab7V4-Up4Dwufh*{fI2zWDf}N!JbI*}fwYmA(!XUAEN?~HudAeOy!=T&DjAdIjT_q`Ft=_|l)U$8C|4roZDY;Q-trHGw61lu zUPpLC8&(kzLH23+T+vo}L#|MqoqUjgB*7NbP9mw#{($fdz%+m>NXF&q?JMI;WwNYo z=xh3p4L;U;adjn%Myo1YUiMQCkBc}hOE^XKRb^+yXWn5{Oc`L!}I{zJlw zb}nbOuwq;I_4WsrQS6TeJcEe=J|v!7xe!Oiqc<~yFp1Aq$s7MMtiXW9| zn=r)@+E}yzp%{9tU#@_a&B9)4m6abH5uCFQ?3bLFn|d=dX>q3fl)9@BUBl2tPoj6; z))&`SL@qSQV~KH$IbT4ud;Wm0_4;tT5|Wt1aTn+oP$LhZ>jSyL=x!=#$>&m#m~B zRYjvA3=Ib`E@YEc7x`HO%zevBX4~mZ;f33$h_MV45PBg1)ZQe0w4z1p=3B$oQWND7 z(h5F7M&_evQHq+do%`O}hhlWs;v*LBLdPxr^{zAC&}ZPgDR{}2#3P&5a2HmTjobvE z?iTyxjvhX7;`l}@>74iCDbjdquv=}=WgUdJc`fKF2<5n1-EbgZB=>6P89eZfyunO> z(-@Y%?SAIgxuB|&)RD8ryz6AcM+v}Fu`A=7{K?Ye#=sDYyu52^mdUN1t{kf^sOz=W zhb@HM!?m^#I`RwXFYZ1ddNtZ&vQ!-<#pmTrRJJ@LZlSVVL$2fdoJe{PE(L_)EDY7- zLeA40Z>{tys7HSCSa|fogJSK?Hxf2YrxqYjYMl zuE@~;OOv3@-h56@T)U&Mg9g)7V>>AWCevlE^v$rTx_u8qXM<7!DL5v%cx1gRYn)c) zT9!pv^igxg7ddWZ1{xKrUweE-rwtnkwzi{Cj(T|zo1+`ZV_u3uYy;jLV2U(e;>Loi z8EdkdRtQtLnF^=6AOoX%xP-9E!XDXz~QLr*rX z6^ts-C*GvMt0H06!-J$?-Fh9g?HhaLH7eNh47xFfu@uuAsRwTk2{cAeTGAg+MsgA> zx5hhD@x0?;)0Ki#RLCygOjx@eqh*+-~HX9EY{kl2vOT1(F93vG|~`lOf`jt9Sa4A~o@e*MCC;0+4FFakkP3lvz~eAG=P( zpNzhJ0~f-u!5G()pCJ}^AVbiP&oyD93Pipq5r^T3CD@d{daM3Qnw%~1f!Z8QTS`<@m!E0isDNbm#&jGm8RWj?0ZAijQDd){nyp+uLCWI*F|${yK?PBIHGy02rR^XVf{TP7zLpZ< z{=wy!upAeM4xcbi;}e|cjRIQ5rj^qtxgBN#WDK}%KgSOa>#Jt9M9oHOs@znOz2Ct6 z#N}okCiND2EN4m{86E^g&7UOvu%P?7x#%@&wqI7$3gsO^_~4s@I0w1U&H7AvHkG_R zMAOkWH{z$*uRe1H^=p$weL`92<W4Ev%O{9h{u|AF0uUxMUIko<$2 zTfcBEPyS}+3x<8curC<)1;f5zm{j@t|3cS4|69j@ f7;e9l$TP^+&SNO?jD%gv&+hQ)_U6S_dGl7xt8h~=1a_n z4jnq9_Vkg?p+hH_4jnq`&2$8~lFYvK`_Q3xhtwWDeCdr^MZ^3|UZw9a&L6Y;RXQ4W z$MJ{O)z}Nem9)|O*TYzbL{GhHmU1Z6oKY1`dG;>g>KTfmdU;A#VUoEL$I(dCtEYM6 zduq2i)sIg8Bc(FSKKUf@M)Qq|T*DJdPj}*9K3Jf%XN62c2~%}M!Q@IZ$!{y#dZDig zmj>;tv%$C{>$qc{J_05M9L+vj>X7{j0>@{*V;}#XT@G(%J2-o%a_-{ayKiqEQTcoG z?BmyfT)@$s#dP}c!I{j3K!JmkLm^l196LDs^ZdWqV9HaK zZAt4&(3LyK1cJQ%=l%#PDpJSK>drdwRx^o}@Gul~H; zb#8*6C7EuM$?s#y2=yCKL{c_m-QPCOGa6mw&LRYKP?VVmadu*OU76%k6_dr=?LSix_f z)DNxL7D>##oDHe*ckgVB)}OK#zI|_&mj^U8Rz(_J5og%n?p2 zKbo3oV&FSl{nJ9CMzd}sq#DgVU)+yYq~wDh?*G>7--?I;LmrM468)37i4Q(6g(we* zjo(3?WGoHgzSTs*x3+O~&#j38>7jH>)!775Rc7ag0K557o1yeibHS{THB^0rKiP#( zk#Bytam=1F#)_%)o!B#V{rGFoNEpZKy*!Ps5l>f)^x+{)RNY@r&Elsm3>b5QeY!O9 zM}axKjh!U!@YGP+hcmHsoo%irTt@mX#`M-JhBrpKB)y{g`c}_ks&Xh7IG|xWMBD?r zk>w#YRaa}#;q!j-HdiM53jqP;=qbI85&0S=dqQ4*OSUorh1mFR#Y4&4fM?}2&_ESk zd!wp+>c34j4x}z9P!wUf8+{zFr~fMTx2~4DeT~C!ktTo1T>%fV`K)-3x&jAjf0du^ zYvolr>t0#t=4v%Op@;Xu=n!VK7y4pgaH&lLY~Tm`R||2IWi%}Kyy)#O#+Bw1gWc}P zbIOeho8Cw~IK|qB;nY3W1D!(5WG_9%cL>tYC%PI{EC?Id*egHIBH#mx23F=X4y<1C z$)B)JM!>Y_E~9OEbyGWo!>OBSgK)t?#Ei=69bhv`D%(s3I65&vK^WZ$0USvb`|_S% zvE9oK|9R0G#^hetRhmkz^Eu^Ei`fr~bpGwMVS!MK=&9;okbaR_9wS&|Kx=D-_LSW4 zPcpeINjP8f$gCDDT=L;6QI zUS+szjoN`JJ&r4p$V?i1B{7{$TZ5fbrsB2FCsIqNqkZ~JygQ3ZVbfSk7hb*Y`q_6Gr5XBK$9irv^=HXH!N5pMU<0_@C@txuPzh z%j=Yve|6GFzLG(Y90Pm$m_6A)+Em5CnA4QMaQU?d(-b$%a`P?j*b)zt0 zv+%aF<5WHL?bLc;e|0>;*<*THjFyThdfCv?(J?+Q6vWofw>C9wGp;$XW(q0oi_9pQ z5_ev+H+b#6`zMcAksyj6s*zA}$?-Ka_~tU%Rw`CU%^}>_)*+BqV58@K<-Kn4O^u-3 zf3dLKE;YHc5{EHDdyzdzLAss``vPCN^&5#Gzbyk+ypkB>j+9#i!xsmeWH!biDX8E2 ze#8#Or-_}tps(jHx_weHEoS956G085e#pFXZ;9fyFHV9?&lkE(eowgW>jgt1oeNF; z-eB}=_yW+iv{9bKd8;({>D&n65(KHQIC^VKAxrtGAccR)o@B~M_Ho}#aMBQ?x~`Um zNL^Ca#h+n#b%__R3RWglmSJ=G`Fnv&XLZG%(jI(mjJgqL?^qHL*!JS8n=N_DnZ zS{lh;NK98$Jul3)uRc#Bc@h33qGk?)G6aoIgQTbaSst*8=v#)*s@#_H!2T}vz(YmR zyYj6YFYkt2lyz)$SRk^9YmSJNt9tRnKI-9Ta_h_=^=1yr?c{(#Ug|L$hga}E`T^Dx zBN5`@FY*+$ew!-ari&w9PJn%@-^rN2xRKUnsNB=loRoIJzgw9u2@@SKVE>=HYSz40xMMj_PXI<}ZdBBi`E0bo!@bbpF zv_b}k`jBK%O4n>*=1Gt9K_Q)+-cBGCAgjHhY}vU)agJQvSNX*JyGK;|*nH`?W+V8K zp6!VIMYl03#YM{B3ghV03@A^TV-B7#yIoZcmR_bYonL&E>{VoqjV89gXJESL}UT4N=bzdYR&f@fB- z{I)W2PC#V)&$g5!Nm|T3Q!I#WR@<@I)R!h@>Tz?JI9$QQ2`*yHmH9V#t_aj2$_?Dz zZzi zS%aQHk|pAm3LBv8$9fNHd!ce@3=Zo%9|`G`k2s~R8yxHTu_pSuTv&HWD`=6rwDIjT z+b@68S}|6-XH*Wz!*-ysx@l$P17C*NLnR zX7E`q3zR7H^s?>c-ILIT-Ouwu-(*C{VjWVAN&at2*vbSs z+m%o910IV`YhYEppPk|E;(RyoYp>j-rEC@se2L4vx){mNyL7iPVWrw2+je4}7#UXU zF^vn}$^9V(cOmPc)v@bh!|ngLahWweP;gVO<7+I%y+*%(srdTr{!CZL8N*QQ!1u%q z9;XU^yx(rrnnl|f!)ems+Er(2+HP8TCjw4>UE!XrDiwdz#HRKpBx|Ep^{miNnt0C8 zW9uWCzVgFECNYUV)4w>zN?kCh^Mbdp1<5F|GtSpqgT1pPc1YtDUKM=4eU_eiK6>gG zuIsRGK5rn5wFldm!u_h>G#uBzrXqwxxLXkGBdfi!UdW%Kt zvHO5R>2%cxR^A_wDGO~yc<=g5xVob5=RO73&$XUK1Ow?Mxf@9wY1 zkp0X!RGLIxOAmGZ0Ob#OTr7McvQ%)2-XYvJMyBmHO~x>t*1UB@9EYuO^->| z>Rh{#3=2g~VQ1(|GPT&Ja7CJCJL$5$t*|{z6h}}ue=vvzGGCy9kb>z}Rihfxgh7)? zBhAIUq#HG>08YPF)$8@;^1@iAC!>$GG014g{Xuf7ONd`57 z@-u(ANCf#Vuj|qYSRG2M6-Jl~>5j}$l%JM0l&iyti%B+muUFrI6{Yb{GJa3iTrxA= zeoVNaz(apM#u^vKl1rhB?@y{vms&=KR~^r|XLx@5d>!G02Yr{NBvn7{M?!yZH;$Sq z4(H)XVVt84eg~ly=`-Cf-3;uOY~jgCL7!tEFI&KC$~$)KZ6}wnk7+|4ZK;1O>N(P- zr%WiA3Ox4y=F6gy6sN8x>)uPVg{-FF-QtXm#mq}~;EG>1NtyE{#ZA`CGs+>p)R5Sz z$BU{6{Pek~{aNK{jwa{RehZb)F<{lW_@9C<%Tl&Y6SQ8~UULFt4enE?_7<0i?5(p& zrz9X@e@art`r&jUUY}0F{Gh_NeF(;Dk@ZFQb>26Bb{8=q78_OEk5jEK>FYN3!Qp?p z5|K^*j&slTSI(?7%z$@fQ!GkBrO@j`KeQ&wROM<=Q|Y6UNRW%41oZJ5D<=;a!$4mr zmY~PohuO5aL5*vv>SB!8PK5?5%^DF&C77t)N&XEX#G~hnHj2^bxc4K@oycKRkjrH# z&OK6<;J*_z^1d7i#foM!n(s&CD7Dd1-AMWZ$iLYWPYGfx)r1d?P*5qFNS7UQ;_wL` zy$0(hkRZBFp>?Cy_hhnvuT$|zI>*|LZstg;8_g!5&dj9IyH$rUUq|@p2Ili|Wc>V~ z=)W0|%|Un3e1npM5D>Yx8_*io6E(%VgXv{sAURXm?<|>0yCLlaCmKot&BFp8@TEir z*Kpr34)#JH#lfqYHuIQQBTOC2WA*sA4T?d_NhScR5f%)wQ&?VP)G!3?Yg# zzlX#Vfc#Y7q(P%&)zRO3{vX1oI>kXBhJo%D;rJ(Opp%LXYZ@-@FpudYWos=-&uk~h z#L?TdR?L*f22~f6sepK>^-pTm>)$kbKBs<4NwwnO*a(VC82gxK8`I=_`jC@tz#~4W^i_lD%{F!Kn{_71 z*~7d~mQdK_+(&Ov(s*f$V!P;;;Ye7ER+vZFxo$$AO^Fav_=~DPQAFVWxKSPpg6+fk z8ol-?{#zL%Ab)Cev{GiXKPbe7LQQe{dFy7lMf4iIL_lmS-b(8Ib7#UP$44&X20q@* z_b(4pt@z_yNM=X+I${jG1A{N_yI8zXKnkUJq@)g) z`G!&PlpWZ0IVfhCUuk>uAwyJu@=VN}SyO-d;9Dm=R}G&2^DZ$dYO-N^8qE{#s2JN% z+g+qc+=o6~i<-tkrHbhWm-*S1L;I_{a>pRYbq$%6LpN0G@%I{Q&FwzfXu%P$=E)%1>M7iYTX5N^S-q-o!K>^#aqAO9G{#=qM@9y60}H&3?L;03b3`%wC_ zWy=&F{3+#!MJ&Mcs^{SeOw8IjYX22B1jey5gE=379|W>8M$x(Q|U@ZzOd= z6>BiG5%@%2b-dqLo1Hyu*1v2{ z2roP%PVYg;dm^Lv(Z``Q)ZMEw-g&gcTCS_F)mzJ5@KT0_^|rq;rZw8Vz6aFf+pfKQ z!Y4e;UWch{XRwDsE|p!GKyLo|s`{Gg$LEGUe92@)4`!*sskN%*F99l4GZcqC{`R&! zbmJo}=FF1T0aQPv8z}$35~N9Ph80>vw3CvDg$~3xq*2Fl-KLvC)m;K6$AXDF7xri{ z{lxxE4!vDwL2T9xVTGEs$=7(w8Fa-|yA|Gxd-o@M<%giQYb#r!W+8eq$eDeLqQ zI$ZGZ-@-jm{bdK9j5kCrP7MI9R6c=ej4d^U0~D zW#ErpaZ-+Ug!?0FC*&(icX6IpxO{p4{ny@cJ9;fF!3eSh2$K4k?2G&P9QU&y$7N=q zD+f^v1X$QdrvXOZ+on&BtUyEb?5wmJXSFvFCjD@j=k-MQ^6W$V&NIzJDPCR(WsFc* zr-Gd6zd^K}<{a(lT>0c5zj#%&aPUnSx_UUB8qh|kj+X?wl7pA}kZKV}_CfsH!CmjZ ziBX=8hL8j#1nV~+Qd%3-5Mq55|8GGlY-b0X?qE}q9-^#YO`Uc(rtd(1H%flx{dciH zhep%T6NzcCgx*O0i{8`x0sW9(G>wvRUE$t`JMpLW=8lXI5!BK#=ve}Kq5{{sJ~uh@ zsiIt*(q4EQL>tiPl_EGJ^4HoFm6@i+N(XwLL!qnD2>NcuZ$X%kF_UZC$*G9$Qe!%! z4*5LfMfAk?E`K*%6KIVGrfw@o| zRgNxq;-w8B{}Mrj5PM3Vg^0ud&`P5|X7KII(p|^$L0&&8 zb8j%-3*M>c`k~d+8#k@_@k2OKlj01 z3r5Pt(Kjs_{B#dg>veuv{gx@GZ@7@udOleNy8RP9d@etHM+KL@+qM()XW(cwfc=7r zIPiLaXJvmg;cVlbZvVa^I#^+MJGd^{uD`JOtfI|ju*~jnKPbz!Hkk4F4_-&`;9opCAvFE?slJdy5SP( zfu`qxq7_jUu%(n%n{apWhVAaeuztbR%*(tW!mPgkZVSaE1HVP^-ZrqFF8}ylJ44t; z46azd5<>Q5;PAXGN@hM`)s2ruVsh8 zP%&}u{&+XJqVofYhA(A}TqV;Mom?lX7~8qWQS$kF{Qiyr0pLEX_TBUF6A~^1Do&ka zOw_Js4@D;~r;|MqJ3Sfg?c&18>FFHnUB}V#mvqSTr zrmW_VD~IN&Alp#84vGzXQDLkiwGWTq2x7~}nN>KN<;i&2{#if810wH*e7ARe92~^v z(z6$GpAc*m#Fkt)rbqidp&SH5xU~4zlq!u@FwVs6lDnNp9{~^I%&IpX7;me+g&Os1 zP70XhjwKYl)m(UwOz7jBS?h$j3F|lUaIh=aCYmULnKrOlICd?26HF(4HDz|6E?lac z*EhQsbdg&Lj+|o#C&aD{3BLl0ORi7)${je<#Q@9MNXqBims`?rmCz?f??_V@SD^eN z_N93~x%%*<>e-NpR5D|B0klOQ>t5=MJn!26d1%7N#2?@2lPuJDN0_W)6Y&$!em;P@ zYdwjTQ@DFIh(cf}>5s|I1>DrW9N^}C?b<>UWR_~o(o)nNq2xTZ@)!m}(iUg4@-w;5*QMZY_N6d3rp`0>DyoLMx^6V? z+Sr>ZuKi*u4`xEp^nR&Ds^c5AGu|4<$ynfn#aumB(`$5J?39182E%3aA>-<9=owIS z6czH+#GKG~W8b~yS2nH|VMq~GrH!PTr#mFTH_>|6S6tv*sMKj`39QJxK;96ZXcCCRA4b`JdZEAo;9FNpKjiHkLv?$(i`S?Cvwsgnx!0n6LlyF-hGYWTp`{Go`}e2L(` zUfO5%&>R?Dx11I_sWn*L4&QNe~qB4E8>eK>a?r8VFBqpb>7->H~S^j|OA>*j~=62h2s&jkv6 z{Wz(p%5=`xc`hh7(OnE2E8OBv!3X?VFC^HS!Q_V#GU{y+gQvf zrau%=`tgt7!%v1Mja&t0TQSxRjQlK2x)IMGioC&o(P+Djb-M&i?T^efNt2dG z%f6M%iMt4hJ}S#nGb@Aep}^3Pcr;Ar;SrHZ&UwF#__}PJo@#)9rUdf zhfF%)C}{!hHGJ~4>Es3#h52fj&tPeV!+SwQ3c%7f4y&PD_V&2A7Z>w%`6C@f1{-JiWS z77D-jYvhB)#Xpj&!7{T&hFpHsxcTmql*}G~*?dz2FZam83U^ANO%zWPLe zVd?W5>4+WLEHXHgYGJ?><72i>-Rw>T*S%9!-A83?%9mv}yNIRKk%hJ~4zu;%>vfs* zn*v$M*9F<9xm{%n8fz)r@_eiu2VwUqL`N&@>;8KWr~x~+Z_!ct=FZiAaLR%($glM} zwM7ANtbbdjp0@nNnfC}58vH3~n)$8I&Ytq~Ukvo_k%7pat=(X*(Jy_bIuD-O<_Z1_ z{Aad*6~gZO>-o1HfoLvG&n$o%USc}Uxe~#|H~w}QmEQz>4>99FeQvQ7O?kQ|2(^#-G59rbP|jW z!2Lu$&fFc}UD9HT1@ig*%|3KR>GyOP;AB1WkKB6jKl{A?d-jNF0UXhtdHU9H#gf)v zz%Gdr5^It{t374?50vts2$}f*MCBa%dKmKGsQ2xE3%VjdU;c2?S!h&fUcdlj=H(h} zU^m+}=4NIp-tS<3@4PJ^U5)X`Kdf75^YRNJCF-c6Uu5l7A#~kecCK0?esvf!$)C6M zK$EuAcl$t}Lr>~ z$i&=|qCPBw^~|A3(P3?j3;NV&jd!@MY6iAh4j#6a0;+fBzBb4*L>O%R))OF^2<4}% zF&rt33NUqD8^BRpwY4+)3*GAO|2i3={es{AD@Eimpay}-#DaFr9BpY9GbfJ{nn;n| zEk)$ywS<3ZN*P(84ilOR5{RO$HF@`wS;GzUBP?7H(9+M9OKz@2p}gCNvl#5GG&Sms z_{=~J#y{6K!QgG+HzG4KS(#~o{LjwT3xS~8Qf0sC4-4|Uc+QQ|7uzoMf)}o6v5oz@ zxjg;Fu&LvAXJSj2)`l=TB0o$R$pPL;+UaWl+XgCh|A-3koH17@g08u5|MLjEb6> zTn%EIIa3CEO2rCnEXrS%5p(v}SiPAX`|E&Fetl_QX_6uyVd=Lv*4<~G?=W^TDumFtk)1p zWI$Dd^6pO`sNPqga=|3<=uv1XL$6ZNZc}eEZe|4$$@?-KS-TT_`dgYnK&?pDEsM^b z!kx7UWhM#6@0l)^7OpA6jis7d*VSpX6SCYD!hSHAB**?>Ez7Dc$dgzKaa%iV5=6pe zg7`0hF{xeS{X3E~Ho#O$sG&)qHP5$H?U@Dw@H`g?SZDnJjUFyQ(JKx&K$(=p@W8n% z$z&nsV5POp0g}qY1tF-XDB^V-3!N{~dEuT6EESnfUWft61#cqn0Q^x}M!1aDTPm7y zA(xtEQ5sfftmQX&SVZF(o#U$=fOLl zcjzE3^5E0II{=W1`SP@T8;q_NkY|JECCmyh;5q>D)v3Ax@*P9JV0X$6V+3Uqe`i%6QH_1u8j}AITo7JM4 zKYw@>v78o=vNJMbjwNPqDaSz&J1<-ZCKgWM2aj_AFH=Q`YtGkf~jf> z_mpT>C-=vA(8YF9SuX7cd@~HXTGYi=4%VhGgCFRxR;ez4JHUks zPnmDa3envo3T;>f9^Jgg=iloFvI(z0{!0`v$YF&`g5d{lt68J%l1e_j0?Ag2xE{oY zI28`NtUaH>p!;pOJPs?8xE7Q|Q!AT2zOj@8=qvKPIqppvRoOLHHBIQzotpuqM(;(J z8Mq%U|14{SDp+PC*S$5~{XPtSeJ?}Cb;C|z!S8tw;=yGeb`VsRNkEDGhWI!%T#Vh^ zgh?QP^rd!BS1&B{J`e(a80>~KHC=&IzV7r{6^Oh#ShKt{fmym(A(NRhzym9Q1Tr$8 z&S$_FAakI{LUceip2J^{x&)a){l0qh5ipI69N;7{U)>OQ1Hm|?8A_f@2fOh4lGP)2 z@4O3IQsP?S35O-vI9=dz)IPvDuR$T1zgSC6W4)k2P{-xj!$rPNssI22m52~f9CY58wShcTtXn zc1J4S<$I~4j9cm{KPB)%|NIkF>2bTIG~JtQOd3n;%|<-~>c2W8CZ1UD z92cwVOkI9>#p7d02xV_CfHH~QCXvptvgXUnN*)gsXc1*&vP@f?Ynr-Y$ds2DRB+L} z?HcPPr(>MN`ix(9)hKN1U)h+B<$2W3k}3JqpX8INaJk-Ih4~Td?<)d_Y&LiO0v}wt zd{*^p%*@P85LtykSeKnczUeHVg7;iUj759YDS;?nc`xAn!^wTHFZpX}N$Zf-4GRm4 z`1p9`(6=(HZgq1JDk>@s5P}wDa4@H#fr4)!INRF(7;l?*Tbrn`LB-Aycc}pgIe9y6 z?XGzoj$h8>Cc5CM-kpoXO*)2#w|MlQU4+qol;nnvwX{6Cb5TT8^i`>bO7~s8)a2^l zU!K;MXxUuPEaSE7VhJn}l+Y5Z0XAb`U|?p(JR&Pp^VU+1wWEWlj8IKX0pGD-zt-2+ zg-aW=p&Rk~X54&yecj#akmbQo@*iI(eq!d};v!5`s6xw-NMvbY;r^X}e*E|Whr`Rt zToWQPn z!#jRT5t9##Z4$Wi-xcWJy?b{i-Nmm@7T5CWm8P-pT%D5d);Br%JaX?IEx^-$b;`iO z1o5QdWCSrc^@!bzN6T#XVaJm1IB5>?cE}itqgvz)GhV%Vg+`+%CMLcs+Y1#Mj~P2% z%^6v4Yip~nt}ZGPiT`MO>z?RmOZL}s_*qm=p}Dl)DRe=Mtj^w+PM-hEp2Fgid&6(VFu>;kOF#Hd zv^%=Ch094uym_HsRS+8@#e1UtRauq3!R*7F>*A<^DoqvnAjyobB-3o#xupyDnImE) z#3bIJrpLLD-O$ta@R!eyk?dGS;BOxL__}+1+~5k^w~~@-xP-T@t?kB4tE{YS%+qV^ zVI767oAcdOTp4&gKGg+2K_CNmg~4EeJur4@B25AXP)rkaUKm)}df}}S(*`3Wqiq6~ zKkU)3>9=<-78Mri0!+8QzMWlxLbe}dC{MFKn!L5uab}KgC9VL$Pi1Fk`%|{<8c=k4 zK+UA7nVECGL3w%k^71k}T+PPK&F%NcGy66vA~X&7C{e@mul4mtdDVb&|MSn468TuY zB7<|1diR}{P6aEkjFkfV9~b{~cH^a<-tv<>Yinz6-0z=WbL$qZX-T!tMCNbbk&q~R zJFPs_3rJ9Ta(Qv_TXAs}+%MtF7w;xB!-60-X^i)*K4R~c;f1`Bo*O>eP*qh`L&IbZ z?aI`UoDpL~591nVG~B;o+tkNw?qx$>US3K{N?2H!?TY-49~{`dpFbZ~g~Z0Pqp#d5 zJ2pN|rm!$G8yOqVx1T(D5-GPL;_BCYPfqT(uGo2mWhXm2xqhXjVRLiyYD`ChsPSD% zNh?j`Cf_Or@Fo?Q)FQldypyoL{`MMsjPzrp;pDEauD3EKdU~S+16kG5da|iL=ae6* zSEt*bXAT}MG8A-J0Q}917X$U4D=&p3Mio&-A%PDbK=a_lbi(4Zo@|?HyyB;MchZPH z&-u>}y5lchx^%T&Q$u5Tc=$_vd}oxuqod>Q=KQi;ZsF-s!)j}y<9E4KyH`TY5FK$@YU9CU4de*U*_w)m+! zz(_Pc_O=!JcKrPLv#(EUg~XX(GW94;P*|SV+a%twu)JKSx;8KG#)x`1^85DUhPRaC z_&+TCi@qON{r()4Dvf`V_52&){fG;_oS7;0L+9L|+oNYzD-By$NFS92WS^aD$H|aM zBwks!NASBcG8dVHIYo`G#n%3^H@qe;DOpijdGF%s@6TVzTs|AD3@qUNd$Mg}H*elq zxNe=?E`K#W{K>gu%a0#F+B-O;rlbseUoi@?fUzb0@;x&C=rvPyM<052!@yBeR(4`= zQ0L{#$^e@0?lWuag|0ZCGr_|B z-OL+T8#GHJjCOZ-Pifzrth56>KX#!v)e2*ESnuV_Z>t?Qqf71;Sy@}(xLo+m$M?jE z6D4&EDO9Ro@(;k^022pvch?y(S0Id)gjRYi4b~=H>u(j7l$1o!Nu{Y7;)&f=R^6Oc z#LZ2&nn^Sk>rzGpwt-v6wa(nKur+@9GO(lDWx{puYMyitr zs*%Uoum?Y2q^+&3sTmnwV+KKcem@;-g}hurt_6a*BA>Rpdc^Ha!1VgYz2g*EbO51^ zjZ^FK9V&$@Um8yr)H|hHIcy{I%Ct(r2zXxJ6Q?VwVWfDm48$l?l5>; zOiYiZtHb%3al7O8iEX=fz9E>L`F1_0XrFcJD`sZqO4FG?fBvM&x&u3&Z+7a$hucyq z>l}Jv$&HUAIPFqj!C}_JcAxPoqW7>j7NWZ5gv>JGA22B`5G5&Nr_5ZPIv#S z#=}hVg2PK{+Q6vP^k-dtePSXaU)1q-7H?hStM&Bs653}6&_pZ7hf!qyu-HQ(-4an| zi~H)b5x#{MD0`7b_};TkjR{AY&TCgttRM4gu3v%$UQq6E@kJq*?NeknAj8Kt9W3+v zt|fE%Fi~&=_XsFG>kBSTz1`gvq2~;?dPbW&5U z4dck3s_LoX8WS~2>-Y-EJ|`QsYMf}(V>!MQIwz{m8F5|vMV;EIPeqmOcj2jiW0ntZ zyc#VFf*xHN>^D94DR0ZemK$ms!D&;`d)w_(aijxmyCSn`Yf#-v=+;T2yZN0aZ{m|W zhoppkw!*`iquT{1gEc?v7tyc{HpKzO>uE; z=!k0rdi3xO!J$_(RkSLvClCM;k=mtXi%`RjVOz{f~OH1zb@vtAI$q>Fi2 z`f*pPs&&m|G5&a5{u8mYxwa9la^0hj8NIfiIu7362E`BF^fXXO1|`$x68@dm$27>ad zMDvNgXX^X+?*ju5@0vF@J^cR6;+b}cXQ{)$3;L=~`+27ChCJbi2n0t1(P_I~rIGw-C zIdkp&Yel*`2f8$PL+<{vMl3of?S-(*&zm=I{x~1-N@%Q}{KLm9R)M8n%`)65@u958 z;^)&5LZ9bvWha~((jmIu78bT@h?~x`P0a8{Sl)2?Zd_wh_e8Z%Yyk7{bt9s=UP1Tz zG*~__U;j%UgZU~K2WR`(7Wd`L!zcLTRtxi$@8%13nn|14Z|lii&XBaxdie06mshn( z{F4$T>-v9?!!JH8zm-W#O_hh>O?=uvn=qb2+QBAy9*&KGe&`(my}L;Mho)<0I=8MacC{Z!x?!)Bc|8T|sjh+xYwZIqo^gGe(&%gY zzlKiS&E|M*J`?^*Q^nWg6WQUKiofb$@ui&A@vD7ckp#KTfs2&#}EaAs95? z04b%7Xn(P-(VjF^71=#zFxALg{Os6=pGgP`cfM+Y_VqNZ<+8?IAGNHx2355ZTg>F- zZ5~(?M&HeZbpBh+GoO%bu!y_p` zSnKsyiCq23Yuc)@P0mN0^rDa-e!hmm7`cgN%WicGed(#GsgqbN7Ra%jQ`9?Ob!u(S z{Udkz?0z1eX=Ej-@b=GxN`CO#m)$Q>PakJT{#x^LnXE1@C{QE6JVxQwGS91X32|q; z_+Hd)`rcYB^2z4RFa}eyHlceLY9bwM0G%y!r5iP_2!5ZiL{QL#xoDnK`MQ{|#1H(` z=Ds$!UE{U53=Nx@%e!D#6imIxGmaQX)=55?yZuH_N4ij5+#;j9Uk3n z7C;ml#MN{rWvE8C=TL4L>;3v-(;fg9Nh;wM5{}=KG$Lemd?z&@WnG=D(QZf!r)^eZ zy(6RVag-&Pr-i$zKe)?V8_MvtIh{-*lM4$hC7?&qW$d-6HsR!849eod2)p=4JqN)CY@R*S{{`8F!|xsfkvQpAieH>QVu1w4DF>`i{}uJ?muc zrO=nx#%|7C!tC2^nSea6*4+=yON7a3HUP3Enhe$g={Ur}{|eh@hs)po#Q5IH_oLRe z&xUQkJDOVG&@d9GIU*=9AKkpU++fjuwk{V-s6mW}m%Gh(0WJwhs@J+EHme6~7tVF0 zdH}Gc@~9z;+lW2skw%qy)OKfH00In8ec;=&VG!RN*6W%pbP8INj9b5($wg)>I(0&)+vz0x3s6(^W1}&t86Wh3!Yip0mlWj2N)3%abvVR`s{@ zp+Ut_pDxZ?_gIn1dMEhZda_RJAgdF$foW%y+3ppv1U61cRaL`w>HY?G&= z?_%P}nA#2Tffi#YbFXL#$oSx3HnXM}Ynj{pGg8SxSV%Gx382pMuP{p{C-67{UrU+K zcg1Hte+sof68Mfe)YwM$6#x`p0ExIWT8^}{dxl42qc#_{EDXXi9m#C;)uF z<>o$a64Up-Pw#gs+D(;my2Q_~?c=qIQ|~)-uPOARLu-Hkb6wrIZSEd74gseq5Bi2V{&%9!VCSMyHPj-tViU#8n6Un_U z0lG}**4mSjtdvFs2Q4oJ`yH#8T+>^Mo|wKS%alfk^PUCz?p<$+i6xM)*d_)HmlP&u zW+EU2vo+-`0L@Ee?(pdXP<9`P+XDU;fM@jrhKhmee6HT!GJ#7|o7Kj3#JPx@C3Sn7 zK55slU%y+cXJVotBQriRQ4kIR@Wm;SkHsAZBRy)-Vorc(z5oQAb`V_xb88=A`ull! zyrjHjGKGf&xd}5f!2cwO7;ZKZ{KczQT07cr7rwW5aRK^|LPV?gd8*RV#O|9-4*`ujf{P26K+T6AX8zJicg={X}k#di7xK&1e5W+Yhrd|yPcccEqv zUSi=a7)JAS%CgHex+wy+K37P-V9_R1!n4s{1wr*^k8= z*WYi@!7H1?n}D?b`d_7$ojtgOV%Phcwg}uT~guvPx{3F zNOW&$bHY)wUuK@%QOY(p1N}OvP}Qjm^23wsDdR4=#=-rGO1iS-Ss%9Qkz-j-j~Twzm}R$ zO9L@pOYPXlKOSzuM~04H0(w5xA|oSF%Y8V1{(NcU zW|s!^&aTFTy55^zs@$c>7RE)!x%K`35!Snc?<)9pNHs_N*oZV4`K~0I*$B**}C*uD)7N; zI&~LdrEg8;V+rEo;`}^3>gryAodJiRQs&YL;2waTksTcd4PI+=b8}nyY*L+)98AXs z)7d1Xr1Z?p_==yr`6$D!J@nE8u>owTo%V-<ze$PvkE zpI-t1n{UsvutgS?46jz;HM{Tw=w*8fucr^hAn?h(t|9?=4yoi4dIxqqpe2 z_m+z2qIW^`7R)H4C8F18gJFmsWe9>BA?qOb^StkWy&u-Lt!-P&r`*YHnCm*P^E{5< zz90Mewa7@ET@QVSrDgx^`-fR=2iSSBL-$Cj>>VO$Ipe2#q9{q{i#jswcL`;l806%H z{wOA7`9csC-tf~?OxcO)Q;Bb4} zwPAA2vASQEI)@zV=kxXJ*G0wGdK=y4pFY{|Z{vYTO#c&Z=2*Gdlc2|SEBWxvo_9Tf zjIc1mp(nGw0g{rE?Ck8!yZE2RIs5Nj$g%Hw7cQMSHD#}%OX#~1mDcu$j%y;d=Lv3n zyw<$$?kFaM`=F2f+f*3M5sFvIsH&>U%gd{)OWD8qmdLlEp}gG5-u@{CLuYUAWW$RF zqKx9=RaLaxit)7zX8;USkdq_Dh`sSv`boPDWI*kzaI3Mgr1@7OVGPqXPn$Hfa=gGx z3MKHIe9d~W9##T2FpyxG*3wQAL#^mmkh9fYiGE|buYmKZ#q2|a(UK(BygtN~pPvs! zLwR|5sME&WGg(<#fI~-6;k0@{%Hhb~YlTy4$!Ktcrn7hlHs_ zmcE;M#X*_!dBhE6pC)&!(&AqsW(z<1GA&QzTYVnrKYH}#^XG@qsdgtCIAkL=xr*FL z?KG25*`WIPm4fH(a9lnQCb1~1t5o;tDhbY<$`Azgka0tOo z0yg;@D%k>Pt7TWDtyn^;2nRNKDQGZbD84MWjudBfiv7MRCyljp8>zy?ujm9t4X%pd zD$9EQ_btABrNMFQ$i(g6=oGPyHd+=G_F^F}onP2NH+n=0y+8_hC@)0mMQdb(18E|vaqvmYuX0HqE!Y1@rX_Tz~a(PU#E~^rz zsp)O&%;=}*Z?=OwKw?4gE#vX*%*=fi<4>6Y7w@6b_qWzNI%GAKV3R#}{`{p&mrQCZ z%H3nP5-y%O1!q}40^TT$REf=NljhtZx?rw7>Q=u=wR)b^gLYD3)=SOj8!T~mgOqytS(+pK<^(is#m(7p^&Y1`Mf#nE^{Jtv zlId?F{_ZK$;K)do$EInF$g)U>+uYkxSS_$Bpc7hMwMBhp?#8JOG6{8sRfaaPou?ur z#+Didth3L!kIgx1I`a|GX`C}0lpzTCYFjzxD+5Z+yiEjc)a>3$-TO6yuoGaN3O0# z+%f|h!`3{A2``C6i~ z%@xY)va=g`8h79`~s3cUYF#1ggXd2d{EWkY`zu zIH0Zx&lx0&?+%TOBsBe2Gn7}-AyBkt%IeNuH+=5kH)JX`ujTclio>2UQz}yqwEb{l z|A;%`tstJwCU!(_SAIBV+Q|l7OfM=T8y2GD;;eQI_A*OB8=G5qcNV*-X>EaSN!pg8 zlR3oDy$FK70ImN(A>oiRF(E-yODo?o?!>?;W*}*-tWlM_ZIeR<1qG-|1E@Da^ZSdrr0?K9ww31z+geyy7%tP2D`~=pw>r!VkL0r& zRm;pGyqT_$9}(@O}ZLKV?BVMAl3c*>7dU2|@BHdiGI z+6&hj6rkhU93onFZ{$5q@lIOt(RQ9)_tY|Cr@4zpa3-C}O*>>R+AW`tQ` ze;e`3w|i#oHbq9;SIZ0x9AGU3u}dHKJtn7Di3jp?MVV!`X9>PBJc$ilIgLk>voB){ zmhU#Ln~~6AGU$ct-mW@pJGWsOpsdO-D46@zZjQK{SY_$kwU<27%xaoOX=hYQJHOS| z=5CZCpHngGQWxP;`jJi_P3tL=3Y>L7mOgKB6WS<{m9azGE5=+}WE~xOdz7Q~YQ;JU z8r5W@x3?sh?-m9_^ExuKZfs&Q%I2!;h3v^({Ps+*F`LoNA+Yj^1-&L=_F&2*M^|qh zH|d11C_JNv>q>S?yaGp@6-a|VK0cW19nQafU%=0?vnxUG2u)m~ROCM6PjnvtQDyff z0Xy!DBR*|i@4}G8G8hg92p(AGdxJ6oQqiI!(BGk2hnbcQUtD}$gZGa}kSpqJDVLg? zkq?olovY1etQTr)MS@>O*CN}lc>|I2SV&QS`ZOXbsbHF9=>x`()*?qqfS;dS?oKhA zb}~=pm`zunt+#cb1^b?3%PlB1%cK>oK}t)yd&XpNy=K4^2B!KPCY#Kkac*3BV3 zB-K69v>o?jI7V%AwnT_a(mQ@@LlkWlF%vJmnaboxYD6 zwMUA+8IK`#!ffKjK>ylx*c)Jqh-y%8J{q zUid!M=fVG$CQ|RNEba}|t$c8Y_jCf-#a!B*VQy=xy^9sa;!7 zCo}z8KH0cy%Z@~*kAK9^PG8+nOa_HH{?dU)_|YwF8HX}>MpsvwRZOg*zJB8HZ5f<3 za0^|Woz2afT8}Ckf2e4{33NoyF>V**9J%Nj&jBrMTcbfs;-Fh{ZB#|3#82!o-5!lk zNT>!)b8F`1A~u1M%0uowB^TI{;$mY#r%O&wo|<|t#DF=xVDL=P@#0=SqG0fsg~x!M z;q-kIStQ~FYLH0mRfmPAit{O@e$+(xR$PQ?sfrqDh#MzGT!-w9WnL3*<4X6=X_B@_ zsc3aYKcQ^&F4U;T9qe-%E)91N{WEISVwiG$-wNa&|urb*ll@laTN~qa45s*=L zi7&g!vObTw&Yk{6nR36Df|*4g-^Mh1EV8q!YrM)KlSo`N1=?SXN$#pX*90!&VYNc} z(Yi~!5&D>qc3Ff$h*~_3aRF9SksO(EhSYD1sr;_3?J9>dRWO^NE><{oR3p=bF-|A3te zppWb7I9d5}WqtJ%<;O!HZS3IOC36$+nlN)L9PwZEG@ndl@ zP>5BQu{iOzx`!<3O-^B-vhEhczKfC8MUj$D(uEqtt7nuIV=G+F8O5ew6j-(um;W#x zl5IYi^CP=)ky6n915`8E{z2|d^S$B~W!REPm0g7OWIMiufq0H%b1ZUiZRck-d;Iv# zWz4>J{TbicwyrKP%#;hIugK}|^>5?Hz#oS!m{HpNM8tdL70rlYEjw><-lrCmJF~Pb z*1qG*TODKHLn~i*YSv_@MY`1%q~)-+r9D&#@Z8W=@bapuc*FldpH?s1Vt~-WI1)QH z=AvZ6?_1ulYX#f?pb$h5^-*_UWxj?#K>FCwbrU^|6;nivl?FKOfEN0zjN*AcrdV(aw5s- z0<8LHj8IFwV42%H)l!;QvD<1L?@E{Uv8sbTTh*n(0m9P)z{CSR&?Oq@f@G#_(bVY3 z|J)0`8AI;%vLE!*Xn!rppso}kx`k7HL$hW1ZsD%AtADNH^t z=lG69uAEU8_hpZ}=DK{jh3)YTTi@@m&a<$v3<_+X=}_ozQO<$8qtUgKkeqC7XE!|G z)^k$+xeZtPzMFSb-buo#a>l1qWEH~>9bb0 zf~rEDY95k{76ooV{Qu}E&#-(4M1&>3i!tj!J`?%*`838(Gwn4%iY>HjXutW<6N#iP zrK8t5ej-B?Q0B9}`xv*l_-%Kw!}V}RM#k&Q?Q=ed?Op0+MCt{lxpgfn*DsuL?)=5T zpv8G@nC~9B$CK5DKfWN@XbX*rBNK0z7<1@x?;8t;N~ZeUc-@C;ZPF^Rb+JeF#BIlU z+aSU#kBUoWTfmuFSy{Qc(u4+uyyiKVO{zQ$3?5b{+SSf6bj5lp`3{=(rCp){d6Sie z<%oWyw0!&a4VueiD@x@^xeK)}hdOXJxW-tgJ497$_4`Px9eI~k=OXzFM~qEVGmomC z;Z8BXVGgCUp66oL%8Iqk=24p`cxdGBP)p^MOsW1FJh|YL8%*E9k1@`?alAmjMq=S{ zF)=9#Z>CZIlY_cUgKmO#4qeNvxvu2geSAS?tAxV*nny7+3jIVjnT?DF(;`a2nW1I4-crZ6sP1_aRPi zt;5Sh*|5CzU?m!%W6T$Yl3pEH*p#d(`ieXQ-~z-aULKy=xSs}Gw``~q;>|inmRbr#?oyJR?ZK63J z8I{uAwL>MF!S$P4QZlR-2u?J*as!-PlgpLRmgOzm$$HV^$v^DmMzzHC?xTxQM*O6s z)I-ySzz>(0hwt`bv~+X|30WGLz~yz7kg%f|an!TA-0DFmG>s9^DebHB%$#Vs`^L57 zseh7*Rey{zEW*c?W#hAzle1DjqzBdasouPK)7{<8lH!OW$KC3UcA*Xq4)*f$LL9*M zHHmg9a6R}ZuP3Eep5*4uf~;?_sYA1YH}1Ya?0`Djyv7?U;$r#KY7F1o9xpt**K2h< zdieAxO)7c9u9cEm_Dy93f?mo&hVYVua_3*?dp2gjGWq91lGQ-Uon%L_xY^e4&fh-# z89;FP7r4jaSeaZ4=Vy9<^E+I4<5+8Hu#W!R^(JkX^YG32Q9fXFtQiyoiDhS8o|UYo zL1gE|T{v?}=A4`2-LMX}NP&U9P0)4_1!`F=<_`uO&o?bsHXPU>A-;M#$T^3j8)ge0 zmVtfL*0#7g+=#KnfGa*SaEFkan|r;3wPZv;>^$+Df7H8_n5QRN6Dh2*65PT8J5MlZa?R`^fjsJe3JmfB2KN5(;35K(weabNS|Jgs zjb+VKlc$X$=c2APt5RO$Z5>2~sbu0tn;za2)J%qvmG2*)@7XMokP-&?5e1X7%i?Wa zngvBfl1F?uHaCG4-xHMt=@7nzL|YrXrW~umY$6tOmMOQ=^_28Im>qParbaUpDxgVDgZ|ZJcDM^#oR;L#dT?( zqJf5#LUbbEQV0T-@Y_#xO1Lxa{~ba;nk0Fxi`M+L(Svul%iZ2TuxzL1v(qd(IJ(%i zrE2@d+Yei^?#{+yqjM8J^M_}3Ystv80}g-m=+*j%zU@1~Kfa|ucBB~KdDhJ22FPOs zgNcoa$-?QyMv~wUwFk%su~#h@nt#~(593v0`=RO`;ztYdwa`mCn@vu;nu8yjA8HaF z`j()NDd|O>i7wNjQrwCc`cwI4JV~+Q!8`S<<}m}#Z%|gl4*-3m&y2LEbknwogrlzu zz@h@TXs{8PI_W1B-cS?jqAW^LPgVMsDgoWT=Gsf5T2v9tdHMNZEA8p&;a5A6pn!VC zfx=!V!I$?9Ovl_V9o@3%E$m$=uMUc+=t3c$@jb78B^x3stnfkh zha>ytx=_h()jX7wjt#SknVFlL8$6C=GBR=0aGfjTPzcFCQf_D~vfi7MrWfo4a5`26y=}q;`A*b~Dl&H!Rkk#zZ~x^z@uf1Bw8LA1eok zYqgGRSXP_%V>m zV{UF~nV*-J5Fh_^3|f`{N&L-otmnQd_;3*ij)S_eWX!HtD+ny1p5y#AesxKYURN?=+uh&jHUVI1@ zPQdconp+Ld5u?=A)LBq=KZvb`UM@Cw+2jtNy#UesoHw*VBMJ1DupFdl)8RUBu3Wa_DYRe^?11=49^_-R`@E06- z8tfh7z+8{Juq|{a@|EQ)$;*@6xB)f3qIzmR&EW9xO%jrkLNQDl{#a>2;8|~7w#6db zed~CFM!We11PT(_-oh;|n|RJdq6H%W!^;Rn>Bo-}aXU4uXxQ47iuxEPVIpQ`-FoEH zBVqC|O?oy*8|>If;RvR6D8I1qa==%RH{q?lmo|npaBQ#~oYo?qAe}(HTKpC<2DlnZ zb1n1F_i5p)K)odPiU@I_s;X+C7D{&O1h??x8IRo#^?JjuXNJRW!hGzn@~v|0|EoXD zrJ_z>wlVGO=m2np>iY>&6>QbIx z_{1XCG1_(q+m#LM0rVRmed#134HaLpoyan8X0W}v7A5gaiOvioCss&I` zQm(A54C?6^l5W9w%u4EGT!RZNk=wQo7m{okr&1+u?IDu9?!>dOAARRA;V<1J;11il2{f z5#6}Hz7BQnqr(?djegKOS>F09x@v1{58bT(k86D%N?S6+rzg*nowKyGR4!>_9@i@w z?kP2d3uOr=o*zG+3U&AP+8wutgJGrAzW+>7*wu1RJTBND2 zIB-$rujhW-U~|yCTBjI}y<8O?J-t=en*IHKe6rNcl<`?bk8b%7SwOs@K!g^bRw^u4 zG`9>q?f^FiFk~t5E_a@vK7ASxN1X8!`Ofl;3~|1DH zEB9jU&CC|S2|q*wcO`?W@jmTUvETNv61L?nmaDt{KL~R^A<&sb%@Gx4)2q4QZ-z13 zn8Kuvv*>S76`4q6+ht#4RicaoVrPeRF7$o-c1KKGM`s*pJm-DlT|| z-OjXbxrbeEN6i{&w8JM2dDLRSl_w-#?vXuR_n?JOnpZTrx%qB;|B015TyG`ZMN-XQ zomSZ((7$oxMhgOieEjyW|K3G7u=slP>@CHj%SFYlQel)HAnVO?j%?`eqFt*sgN>Sv z_juzd%`M}KPrS;ITG`G5nNOb3%?IuAr*Q9n4!)S)V>V6&8fU8f^IYbi;-Z zrJ=7H!@E*6Fm~!+n_K3YgP#nY8MWfMJH~>Xrqc>9)`oD=pplrZ^OYcxs(2mJ`U(xm zu}jXr@q{89>0!y%cE-vsv0N}bJ0R+Y;QT8I5kB71*y(k5a2FsYb9~gb&74;TU%hQ+ zQ^FGr`Q@`W?Q0&ZG1hE& zdE-WRim{Zq2W-$_%NZ`EKgZ^3JI+47c>LZk9f zde*EZ2SptG;yDUQa3|$LR_$BHN>NDM`}G-gRPnx{^~zrKTh>q8(}p>@nSNPxGYCZM z9#K0WV?T>7Us;Y9{G2D-L(*JU1dNy0wdGs#Gb}T^d1FSlo&^d?^HTRr8@@NTIN=JB z3U_CF&pJMM(Uo@{c3l0#jX0E;lf~TJT*4uk@`Uc+PXxkG(b&NnM0v*i514dcOn5BZ zAR(EZpYM2p%*kmz7B`wjVyWPCL)Q_MD^6A^=*(VSbmXZx$nom<5^Lc)`$IukUe#k* zZxCl&=}MvbDZ6!Pak0;aGL+H})P<#N8LRWzur5GPNf34ec`5Jm{kG3h+j5zb({uOk z0Yw>mDC`(>df@fzQvLJ}E^R!$Ph_Xex!47(-G-61YQ zZ(nMS<|xPBjv9=!6yBM|3O$v*kBrvu#&d}#^ShH23G*COZ7Y#gDmO5-QM^^ip<&c~ zHP_s%_$_ zN5DoI7;)7;#_6LRyoF&~?Dzbce`|wXj2Xi4UPZhO<|B*|Q1NCU=VX z);~`=qnibkt3PB5)^|ndkTCdFzr&1moq->E?TvZ3NVotkSYisBU3}6^Wl~M>-Bc}J zK{<-GR9K{LH+!xvP5Y{}?&@6kQn@1k-^I_cAZ?n&QF?hOTB2)Zwj zh*Eq?hlSQt>_-NBOV^c=Bd;%rq-RJ=c|?=t?REA8HF=}#m>q_=_g(IwGE(Yi#lWyQ z2tEp!6Cf9Sr&%&hT*S~?JK6n@6Vl~XC%usH{rkkhtE0WB(n^^Ub9%}sizgv|5f9DG zqq>)CeE5|zQp#liVY6DWhGSEsmqj)v+S%OjV(~my)cnt&>o5lQp30IVr1p^@Hn+?7 zEXL;gW|$mPjJr7xt>2@{8)a(>>pO z7nP~`5E3>c>0i7qU+n~>HcK2x+KcAd@T`N<;>Z$IpPK|9Jdi?l(XYa8(ie7)_AzR@5h@Z9B@kxlj9t=_eLQu(cDv&W1`rf57D&F;MvPzB2M;uQgBCjcRbyy^S;kX|A zGmm>>h>ekIME=ua;UNt0;+(1XN5j?rUg%Qy<(Ga{*{KPs_0irK!RHcQaH1Eulf|(| z#C+RRfeHXi?`TZUR-ZyHHR~``SLIl=Y?CdnPbRD>{=l zK_0{kH#q`hxdzIu8O2#@;(%46;&G~S^^c>JefiX#3zDuJ4+nK(EOAt$y?ymQ`@TCh zlkF65@n+-5QxvzIvXyVG{nIp$Kxh--Z#OEA$vECK%=s1~_hiYJNOpyosKorV?miCX z`kRzjB~lX6S0Wj&An^A&3FCaZa`8NF%0w~dd<;yGTwU+N-~l3i)M_x_Wt0% ztr@=YYG)~sV%j}m0_-k()4(h@dxT41JQ5{#Zhm25czD<=x=)fScs$=Tad1@|O zAy~IyfhT0SnV@!cgNdEp#>&cTcP?&7@;)c$TGHh2>~G^^c8u#SA$G^6`ux@~Bodjs zNBi|Ut9r5gKizPw`VqPMXv4K-0FxHOZrB<|wY1gJ@&u*X z<$0q7iaK+&lIn@j^v@?4Gj7&V-UWK2DwjbN279sGN56(6kL$Ns}^noSp+Z@4Mnj1ZI|?3XAE zmPGbPYz~_au2>}3LK4m+%eg@q5CiA4aQJMrQn0m^m3FOrX}8}1)SmEup{$%O{oA6W zT9vO}cq(>4rZ4j!-rjP(zCjD7JBxF3Bgmq0p3w2K_QJR&^a3Mf?`w35Q-s|v(hQpY z`KMK^Lw{{^b8}}ESD<^D6HcJcSayP|ze4AT@I8g#L09*Bif4-iLoAR=%QtQ<>ML{; z@#}qk&+tCAM;yH3&>FBcLjR_bS`SPhyEpT771oLzz-;M%_Xvt4NBOqZi~2c74!KU&SNtwWs3eQM~v#kigTEz zD|kbBIJ_K+@n+*uR#Dm9O#9Yr|3|^gyt?uaJaY zPHa1O!)ftK$#4yLzZP{~bF~xNEEUfy|2rw6^LJ8$Wh5SUb2v_b#>WU(j&zxxjU{-> z`g}RNx*0Lp*n$`uxnFqoc>}&_EQ(IY96A!|hw% z6P=Q8xFFxGeYBPeIZ69uX(IKdQu-&z%RTV-Hi5VKAzgs zKbOn*&#$0=iR*&{r`?FSTUXySJ2=SG3gs^CjTZ(+2D z29s2av`%%&LvwqEIXg;+&QE$gq?!KvNB+O~ULJQ^W9l9x4c*z&;ZRpp3`>qn{dx2P zN5g&vCH4839OyKy16@&EbhGXIa6J^$}1{in`_&6v2j@X$~Qzy(zUrq|D(di8dP>)3C< zy0br*2DbXY#{ESO8hO_LB=5)sUs)L(X<{+HLj)ff3tyn2EOG=UyxWj@0_9T7-o1v& zv`8zdQ~er|!0+Hvi=vkQsFK)qVGTEfN+=jd=s3?qMUT08bL9B~d5XMHj`vEOMhbU! z2l`ZwC7VrJ??P1EjPbb%qMY$8GA@|t9?9|#>Btjxjn=nT#EUR65M79ljs~{)T?mdPCfoqke>b=XcUypID)={!a@<} zA5X#MJ4hw&FRccH1MuLrIZW9DGlUFMDODA9BN$%VMCjR7;ywc#6 zti1(s70?ZXH(F?V0II{88bCO> z+4oENN8!f#%)Xam%f8~~;sQhwm~L~($-Kbv7K;5Laem*1NsT+%PCYIIYpC0UvGLe) z5%k_WS1(-xiLn^IBt~6tE{Kz`|HOuRA{s4YWaD2E6|LXm~P#=#l+0~o_PhjMtswV-H7!gj78Mwi@n7hljn(QmzPh;e~?Oyk0**{ zLz8<{0cVe)(=E+q$q~&he#q7%i6oR7mSe2F__(VutAg&j^b;iUH^dvL&u|Zn)OeGM zx>R9r9>Z5Cew36%jn*-$)o!Rbpww)XU9`!E?Mv%_Xbd{xsQ=f@K_1HdQZ`Cbdk|wp z5zK)_^I2Rq^Eq3Y>&V|*rH0IiD1olF0)^2cb%W&o6EYHyR8hb^HD~Efi}>+bnRu~@ zM50ZY0`=d}STf8JPL@2YXuwv)N}5zqv3qa`bs3D4LXDzJ&AwN8#*u2RO=0-j;HH zFvO#18sNyQX8+~Lq_^Su%quB@$qC0sh}1a3U%q0hmp*{U{%=wW3$_?^EpP#-KYRAv zr>3m7B_u3NQC_|c2cj|f%uKD$E(w)#;{2A4e@c88CPSQ849ZN`I#yRr!PeSQvV4scFTI*!Gd@KEMDV**woJ*^{b@`}#aBf)B<@2(-3)6D7$y?q_ni zVhbMY0Grc<1L(+X0BK85@Cmy>QQ>H4m;uQrJwgakMAsqvgib4;W0YE-x-#Yi;e`_|p#^@SqFKh&sEv z+QL$v%Y`{juf8F?CSe&gBXir}I??~LH*C_lvcfVI^?;ur3aumG%+IztM|Obohl>#Y zV_$`f@7U@QiLoxn&5N6gX~=fR1IPbexbhSC~aa*}!Fz42lmecjyRH-wi>4N>3zUEQ2~S1?Ty*r@c`+7<3O5LnmluE>0P4OJcIvld6k_%*8mY-t%{F zyhGEtpE5LMN6|AA#diWhTx;pjSBk=k5kFG3X^?G69jwfb{(|1#aj z%9`Q+J;Iz9HX&wT!yIUzs;&@o{$lb9%Oab^8w6bY!}if*?<_f+)GiBKzn z1wS$%LbR%uefvP17_^zmZOCX<4^N z{vk;|XUqvUmf!Deht3Jtl#${;UZR0#aopCema((yg~#*Q(-7)2v*kG#uR$8jpF_&1 z)v4%*6y(Dg=grT~{&{cUMvyi`|ld@y}2p*QQ&G@Jl%4TWnUemW-Y(ARjuT;aw}BZGkNa) zEGS)I&PjLy$E0=B2A4RidzI&oc_}&~MNgn~qjZ%tWJNzSiZXemp$#iywcYuRX`;fK zJC+(`bInt(O)Hb&`{{n4B>(WJBmyla!@b0ow0~&kv=xu28PAF3{_befj^Be@Cxl)tu{<0w2eWo6FqZi8Is7DJhV4yA zmHYufbMQ*>I%#$+`J;~ZsdhN3%0lCQA41pxh_9TGyB62U2czup1uzzvIo_vXTfw3w z6R)#Bpwkr@D8w#TTo1e(f@uH}n3N+CPnp5EJA7GLek@LU(awY*PN{3I&Nzw;#0+xs zqwzqX$+N6*iVNF+e@P@BCSjC%0NJUaQRBy}d0*wzzrA&wVYY(DD?o2x7fv_Kb)jHF<-fin|2fi zt}Inj5g&b?Y)t%ta#!N*%?zWJ44aB354aTx9)n$LFSO$rh#FEZ7PrH ztLhE8MlnRof7awyij<7rDd+yNe-|UMUP&d0SA_qt#rIeY>ut)L4esphEZcIRdId}M zCt7XSuT|emS=_OdIT!9MHq?P@xzQFOR=onhv$3%;y|!SNeVJ02^@$GE zittb>l0Y*|`ac zgi~JwhHk*|c~3w95+CVn%TNaw6Y^8QK*k4l>i4md?QMLtNAWdgaE1#&j-;+eI^@9M z)qEM>Kl*oI3I728ZK$Lb-VFHj8{Lsh)O!uxY^`iJuZ4S83rVJi|!M$Cyp>uQTbS3Suni(MTleSDa={Dqi_a+zA@ z3Gq-Rxn^Wb_aNaM!Hw2^rYu;sDbCf`5m;#EmV#fEij(I_b|aNeVK5KbVfLNZDvw3R z0_?WXDHQ$UBwp$Y#UF*(KyZ`jnBcA@X_R+tgr!P$Gi!EPMh8lSjkLaWRBOht55>S~ z+F*=*aR@{?mao;*`g&1tg{=Z~VPL&p(!d6&Hi_t?;Ys7JIg8`%oVnp-Z&>$YpYO|F zVY9ZsD<(s+sk6nYYgMk%*g)9R0)jCH}G~&5g*8$nir10H&VK55mthUi? zH)+3R3xR=Qc}pwgBP>XM`lZrBf)y4H@T}L>)mcu-3-#($Ds*MNp7r6zo{zqzFY~hd zXk~y~+cJ&F?immKk4qr{xDl$V<~MJ99Bd7vYL22`_sC(feS-kj?F9H=z(tUsysZI1 zAUnL^rZ|MX99!oZZ2wy4m1}YzotlFNC%^8rPS`x48i4MD;&K}cQg~VeNNdp;(tA*- zUXL{+p~ii`@y6_a^vsz< zWsO#59CVljaQPqQ(!_j*^ha2fFn>_M51U1TGO7ouK&B#?1+|^>qocxMMaNmW%1O7u zctuPf@^VH?n<&R5mNSa92kL-{;S1OqaL{wkuwPuZO@|wV2tVWN90+B4_0g)CK3X+A zcj&L?YwwBD^YHRQ;|FCZrFTbU12F%=bL+)*<~c9q*oOI~Y|DPW`SL;bCBVz8e`LQC z8@xZTsK0kQ$3H}xE^_srfo0u`PJ$Qp)%-YuL=PC;Jhr2!^$*>?)1DTs!V^IGGOTxD zpyhQa5KVxKCf8V#mu3kP|KvdsIRsR5WPVm1aNQtWdq$P&LpT3__|6sMJ{vxij`yTf zKwm#)fbRRP^!$rKL?;v15Hk40{aPs(_w84Cu$Fs3q^oenYvGk25wM+m`xb~KB+sxi z_C*<<{CKhZ4+%e*jemYUc_O~Q*o67=|N35g_~?TZ@~m;4G2r{CivT;(3LK;;*8Dr< zah)!I4t8>cZ$Rw){ELf5{O2PaL2~ecs9sd>iWNA`Y`UjJ^S_o~l|PGt0Q=IG3w^SfnUg>m>P zPB8FhCX?W3O&Rbm{B@8M1Fs5`GLT+#;^;%v;|YNIq@4IZ2#FQ*>+bOI-DKCAecQ=+!U|FpK7RZNi5u`J?cdk09cW(L5fBi7 zeQ62(1Ds~gj8{&aFzA3BrImi1kzH~P&BC4d2`Ah2*klX(Ma5|sX5*AS#)6jv3Z26wGS^1qM7H3tmK3;qj=QnHi zEH9*>FfUToBhXc#ioYDZ?*7aI-w;My*Y*VxKv#i;5$NA_RokS;rbZIlu5Z3&_>3OX zj(%B4AQeuHP@pX)dqL_|F+lSparXQ74=zG*r9f(Ly*#cR?eCHq#knNQc@&P}iR92# zFF+*>r8vwiiEVRUUoTrFp0UBA^NVp484N$L+1Q6(&I8Y1TEFRwG;@e$UrJqPA;n8l z;bjI<4$BxbAS_JU%eGB|^+B#C_v%;(yuN;_A}>j9yv{zF?_v&<`{u}kPm;@@ZkcI$ zcPQq>jks$_d8HtV9`3K3vny7{W^GIEUK5_VXd`93;#A1`(&4Sa3WW`!i2V6k*QGbz zTKP+^U^GR3N$=*rkjw_5RmH`{94_HL5G-=;4U|jDYv)eAf~Fq`5bDeBLfY+3_G9u( zO&Bn@3=wQDg|Kk`I=Ow9geP~yez(D+2{Pp1geMFQk;9fq%Bcr4<(al`Zi=(%*XwvS zUpywHNdk6ZuIk>WWXfoTg=^B&zx2H2QGpi&aUnC)2v{qy67dTSllsR&Y`55RwtIDv zE3nMjbgG{2$KKCi+XXlsOq0E}_|vm4{V5YAVP_~K1Hpp>6+kK&qal-Q zsR`_@^^oqj+HoAyZW%BG4-{Z(o;P6r*2-sdq*=IeG%p5e%;t@UE-# z(V2#pDVa)ml|g1p5*+j_y)bTxJY5A5EdYy4h70DMVj!gb90q`R-1r;oH;%vZI;2uZ z!Jj@tK-!6}gNVZAZV0=o_zGCY+Saz>7QOZ_T>{{CD*Ms&D%#owSB&4ys7%RUay2&o z2*d1P2WHVyS2tsn;oh&=rU`B$T=Sze2fwi&Kvw8943!jdpSEEjtZ>g=o9w20Bby-Oo_%#HH=?)R@zb%X>s? zaej%3S|a~6-mL%W$Lam3t{-Z^2eu*(*k2>PH7Po`SmU?)LObn$ z=vi1=!e`60R)yL!w%i*LD#C{R=y_MlG<$A|c3TdiZiqF_zy%g>NOh4%OI|OR4&PSc znZc&CUA5Hj<8vA)!>Q*uYV8=Q9w#TT9A+~L8KHAkOpZG zknZjfR8ShEOC+Sb4o40GDxHEf2uSAvK|*@=(f9rS-Zg7x&8#&u{|xI{&q6(L?sLa= z?ft3U9Ve2{se!7>Rm^sqHcP=C@Zb=7nY6hYX<0l(D*6VIl-E7V;KnRrPsp+0ez=!Z zFViiZSHw52W;>ZfxlbjVf2GYWIXKD9-o5id%=cWo$q?(e%LX|XVZzf%R8G_GAncq8^j8Uj%O$I3V0zkR9 z|3v3SVIT({0RbT7px6$xQ`j_$_DDj zb;Tq3kd~J5l;YU_YeaUwHzG>!5L3Qk9PqrMPuEpgDr$pjXh*p{V!JGC*O@gS??KfzArTm&H`$Mwg^MeZ?Ahe(CEzxT3kp~OHSLj27~z9oM*UkmBZKoyLS0BJa+Zo%?b*v#?LB~RV}PCNTKv}kvKgJ6gJ z$CVLv;XKI`L%(ysJVgNgc_T3&KQXtJPq3==PNe!63>F|+vbVLZejoY%A?G@kDAhFr zJiPX+1TB44 zsuxI0%vrdEe*Ncj9a4=0%ZDUd3laF)kr2`ABKl`)`p2nsMf1l!j!mXw#un_ z*kZD94Jk~XI7P5k+x}5mvLunVWAu&nAXOPP{56z&G3I(A(9N(H!T1Q_%p5YvZ+(T!$$g^H zyhEdl)|cA*mX`2tbh^KvuFRc9=NDrN$l+c~WCGs(Kk?;*A$2A8D6hl_7}i7{@(4z_ z%zM%w@Q2SCA6^n>jEX_fq>Mf&36ZrcNIAu}{A40B9^gwWriDHZNC z+t#RA<9-iD1;2EFC&!b|ujra)Y$9z(ld>`SuzpFrvwtLYkj6b*fJtt{M z?B(;^(34y{=Gy}g5wOkD#_mZ+0bcv#$5XV{#z*bE=EWA>(gASCvLd?QzS}UhE^O0s zN(-LX8G(5&Myl6jYYLiMn^^L`Cbk|O<$6CQitTwRbN<41+&!*B%gFGemBI{z){ z_73&*^CPP0O=EF0N;p?+$w-1G(@)Klocclee+D_PO>8P+dZ5gzZ%cgFEJ)g4Q};Vl zaLrvol%lS@8%~lk5|V01siQxx1wla7t3D z$<5`-?CST|^-NToVrq<~xpt~X-LVQ7#d2-0mb0R#I(3&&f!*kV2%Gq zygPI=Z&+1jP#TWK9don4Y%RK$dBUfwRWNEr-Rk_fQjzgKjS}6BhQS_tdGn`qwpv)i zW|l5LiR>QQtJ21lpvNiFr*p-sY|yR~WrpTg8d* z=236e@kozSg6aYPaGvC~H#8XZnbue3)S1&?l@=5{)YK6k0&iI-Kv`mmG#Txddg|el zw0QR%YBQ&@nBDKE+@ET>)`NrX;45*3)L@<3eksf8LViP;iTFSYT5kTk%kQLQMOU*+$@;s@I=Z_RZJX77P3sX(ms-E>vm8^YI+;$HK zaYJ-8(VCeYdviQ7x6*p@Eg$5LsH7GSkfnt!+yjV2HYzu*jOjRFVaakv!aMJ0NO zObjG`BVT~pz-{9ZVfrlZjYYR0{1%vL#-DBIyJa9j@W{#eLUVB z|Nad-RpsHhsJ1;%5zrQtC-*eRFeG9HuQ5H@?dj`9Qo~3buiuL*cd}_l+4F*J3yYc6>?J;j1fPHG1&visSqu+RgKF>nsmGcdF3?Y|XfNG0a0jsFPPOb|zaB>ZG+ z2ZRzZmPV)(bMh`wa|qX#SCJw(^PJz;?nRf5>sqJFC3#mKVe+l*Opfw~;y&3aPKGc= z34&7XqTEm}6h6=@+vVujI_K1Ab`2QyN19q$BDLhJy4nbsB~!~m@(sHo5bHYOL=hol zf3s!O^VkV-wArO0x&$74RTY(tt};O1I=IpIZES4T*47liy>gq5ze$cludl})QGvh( zX`i-QIj<$i|0zMSO*qwN@()_?$qcJg9U3A<7Mc%54s(h6LdAg}<1 zx4*2c?APbHZkT362?y&AI4nwpZrUWz+h za$EO4L!Xk{Wkp*NksGFeoPp6ls*;PDyDEI+VJUw0wam3CxK*<7+Zo|rapV}{KNluo z`F!`XIWNc-8KK|-Pjv4e7-n^ohGtk%q_nsgcrFe3U!A+g#nUlW9~Y3f5u@*6>c@w- zBZmI^^#;(+J%1nVt_4`|>UjEp#jndnfXo0?lNJ8AJ-*7yeFo@!z3l0&uEvrQJD9Y# zZ9MP~Ob(@o zjXFW3hjo-O&jm(v&iOt?(&8QmMR>aZ>6Wa=+Cg2J9NEtSh46o)#@(+8zj|YfKg}Du z16FHbI{ZiyWWZC3(O(m@X{5q6uRo59kNgmSjfSu9<7pNf+MZgZCj-QM zQM3>I=tHaX^Gim@LNe7Tzx0QGjCas&3)Ow6c9BYO0;QNM#VSOVHgSv?Ft@iJdutVs zI;du1{rKw7V^vnU4{2*}c>@Q7YUth0>79$mroGkpAo88QLriMwU8}2Jji;Fm_wFsd z_t~i3_jB%D8h%4DE#(ysx#BF*NR{f71Ww+7j*7}Hwr2Ax;2Hu&xo*-@XRce5;F7ppag=hAKw11VDRv~(_Wi6Oc;jOUr+iG3K>`UjfRVuuE5t}F1lRv$B$DDKYkt?Q$G~Uj@<%2QqmNl)L&2M7Ibz z`oy>0Y9w-LaS=7Skv{^-xWZ!pvEEHBZCU>)j2 zY)#=_`O(#nuJa_*PGFsqF@D!@XDV-3P(vcW{=Q}1Exv23Uuj*x8m!{Y56O#MvkPkC zvFL?#jC)uMv@gEH0R|N1oezieb4_e}%IEFV{MyM4sJx-<1|Sfs=2*ARu4rwk9`1aq zabYc*R2XlyJVTym;|)Xxp}sYNz@Wib_>nJE1dCSU{GsploZ?R-A;1OSISbR+Q7nu#~qujEG$;GwhGG1 zBcxC6SqeCgP!1+F&6JOU+RzT_{Fe)Zr8*y7O3WvHZ_e9E`#!MoWQ>z;9q%?$wy4C*V2Aa&j4Rmx&(E zOXkdau*-6?o!AIkW{l3&$u3X`??PVo3TyK-_ZKz4u;n`)rXlQX{Ajzb8ade_5ih=x+3XHt#wOZ zZDnnZgvZ$T5~U}0$k6_IUC4{Ahr}3EK>;L=z(|3`#YI3>?PS2DVk@8r%7CYz=eF=@ z#Y93KtQ;KDMZ78}?Gx8@P_9-`%0MK+7a5QrK;qm}&cx}>O^EVcT}^r5->?^+?>r0B zHKEbR<^1^hBOVAELfK01fB>4-Nhl4j@_1Jw6u{3`zUPb=_8B-@uFXJ&h5xki+fp5* zZv4#(c8)9fu?oxgK>-A)rS>q5=s=3Wz<`>%`n;>qiQgFro}gnGq|vFi?6)baCv|N~ zw-0L(CE^A>Y2EJQ&6fp46Z;lYFHk9JU8%~|O*7X*H>qr77a>E;!Ya|yx&?$jm z(i^y#6k(6jW1r(cTL9>-4o-ks1QOBH6jFgrOiiU+(SljJ0Ng;`#-UyQuq3ahX84a9 z1lBYFU10i=frFz2Alo!n(=YFo@>sP&I0ES1*42se@zPJ*a+5N!fh@G2J&o1tjcBV>(ByT8!#9Y%$rWY zSOC_nQ89GY3;`oHk}O1B>oD`95eS4eA1qS?E9xHYFY6v|3_xR7j`VG#@hL841XpDH z5XI)}Q%f70x<9jF5wR^7C&@A+uOT=F@{|+(m6LfHf9dk6lf&ZyGZbKTUvL=gr@kGU z?9F#)6|@v=fqe)Rgbv?X)j8sz-(I9peNN#@bW*Hb0;x{Y_cFT|oL8=8fz@-w8c|3; zhTg--!NX$a@xR5fs13$oYrTbL10JKqBPDlUu+gu^v#8~QjIFw+2HOuqZGgNSf*6Gq zWMyIUtId2~_l_rh#sUr#5;j;P#d5!K;j%fGs zWi-v;xQSrzC7;|sx@hksC)a6*oXp)*A1%Q(MNVkE;i@g?j3M6S{QB&~hz!T)TD-^C z@!K9_m1eBhh}|b5T6WtbETe3oD1c7bjVL}L0h&%0BpdgW;fV=7LqiAT#K8COre=k` z0*s7i;HU!Jb;8;8=&L=_Mz2i79oT|bd))m9Uvy5SI9kf7#8O3-Xy5x`axQ%Kj>oU#@-jaSgjVLZ?_(v5#5y7 znYYq7O#_Qca`G5BC!2na8=4$So_7Bw_UFcJYqGNF1C&>s>GVp5Yia}zie7D$%EmnS zK;GWb5v`>=9u+7Q-&wf4v5`O~3oUdjvwlz$#Kn}mt~_xQ_6>AoKIVo3yh$e4u)Un=%w-<2dM6h*+pRSK(?2>yrCjX!xV^9}%u(m#&A@CP8@4o5P z3u)dU3X9E&C?)6*+1L}8QCb`5iQ)6op!(hR3F{?hS;3eF&)DiriiQBs9sgGLHt(w2 znsLlV(ZZ#yV_}pGgQY|I^+~gL?%s|dTlUsJUA3l%Sgs9vB=6jC)qr z*W|D;k1grcMm(i@A*?- z?NQy3!q!(_LxHsNNr%z#$bBRGa=$qWJ{N4fI^RKybd|(&a8Aab1%4TRokhIRuFgJ= z46nl>Ixq`~{BqdDNw`Np)n$SEog^}HD#s^N6Th+Irs@d9{SRI7bs5o9X}91!xh`olY(y#VcjJV#+4nbb+Le%RDCZg$8qH*&2lth z@q#kNl%f*GU&rdiez-+aESRR`m6}ORlf1Hp{V;zPzx?L=%O(@-7o>Deh~h2E)1I5| zJ$1UJ5vtGVbsBEKXy#H9I@5$*BCiJZ2^mvKQG9j9Edp!cG&w7;w!w zp;1Rlt0tpS@HMp^%^yi87y*80X9SjZiC})4;hV>%48MQDB)K&j^}7yJpP=M~6%eiA z`AD)3;d(!wC*Z zJ$u|m67`%YG}$FT#jMJ1LcoqH`)ooCOU6Fi=SMexUTEA(tMY4#*QN?Bx79C-EE{i$ zMN4^?+-zdmb0S_L?daEowgC_}lh$zP=qUNE2Vq3yJ4r%iexspQLdst!Nye!j-O^xeiF zy}$TX6wQ4LnCp1Uh0i8kmr>b`wbt#8Jb zFCUw#4)YL+?7XM}l)Tv{{-(;jSjclAS`cR@y@cM9u!~;Ta!dA})fJKQ5tPvv2-rZL z&GxhRj9^YIJ6}-Mm%H^D%#Y^68;!a?q!QdvRlN4A0mpQkinXm>WdDbY1WtTI^L$Ov zYWkeVymi!BZZ#2>*YivNz^i_ zU>{9i*7n|OG1=n1mvrPr%v>#u#(~^F4RuDQ=Z{^H1m;PNtk|=F=gdlo-IY1YH$hHN z@4QjV-BP2@@}-LM))M0p7tUx!yPv@f_$cprmuFvK{?120kIHEW#W>Z)9w~^{VJ1PR zSuO-DA?~$;3*^akj-ppSBduLVgG7!`UnB)|s)bmYm0+hAD9rP#K_bB%2maMAQ^xJ@ zrchXjuZ_KUM^olwZJu4%0HV3d$|nXQ9jT^v&pWd)Bkyhay+{c@r4J5=JS{nl+cMz3 zHL&*Rx)iT{^@Z+ko>@mhiJs*U)Ks83d-ymD1TDa$+)Chxk8nX`*_U2WN#70*b`4E< zr=qE*W@2tc&OV$VE^r^s_k7acqpw43Jd)nBwI^vAjdl~HD&!uI`)%$mLKo<3p6(t7 zpLKL-DChkGS>EiWtKA={^vCc})2Cqv1>D~$_yjM!xv}RsES7dl<=KPVKCDm@Jde!u z;;sC!LA+34`fEL`y!)MG%S#?!RJ+uu9l^wdwz>8p>HyVsr)0?XpUJfOKKn4c7!=~d zzvXjjziZ|K-{v{{p4AAZ#{i!IUwx;FLp94XT?B0F2;-kqchyvROyA{*g>qayyb<-_ z^beQ=)voSzT40?Y3UfWDNaxUtb7AkyJQJe)R6RgASk_HK# zGmgtN##zge#o8@Qu0QONtO_0;`&m+9rWg3e=HP>tXje$!Wa@VS@^8%l^p5)0`7yM+ zoAld}h5q=SjH8GU&%5%m{N}#ccx>x4?3-bxb@&t;acHoD7VUh7q63=L5YE{G8nQb` zWJAMg#J#EDJH;fM&xtfgUGfEgUHA^`Ga_%Ze1fLNKe8>F(i2Y_)n;bTkIvDDo*n4w zpC$@sZdMQUbaYl+g*41^b8{ib{q-c(fz$_?s9~`g`MfiZX@w#3r)=08ihMV2NWS!` zN!PpXDg#39X~>0&m+?Bv%i9UL&@_|5=9`Zb_7SiPIPT#02! zo@Cx}K!h_0O#sB->*{t+3OhRL)VbT`BbgLa;+DTN-n{+h?fPuu=xkr#(2yIBRwR`o zNxx@)z$<|QH5gB~RhEJyub2M$mUz`gVyRw60)AT(tZUL>NPY?=^HO*6hcr=|re@Nh zY_QQw{;9x$^|sDqGjdHb#Y!6D$TGHQ!Eh$P zRrcknJ`?rzI*&iqPpmhJh*JZJLWoe<|(* zEynkkH(UlM8;Bbx7K8DQXSgO_lnP)glflh(G)H?T=s02-X6HP830r1RPJ;S8NKS$0 ztF0Y_fOk#JR_hNKE9i%WfZyxbCPtLmf_)3EbJFT>xYe1T{kj4B17<+QZR^kDocAo} z5+`n~79loQy_xk11UwH*O+W8iXM7x%Ju4f-`i+A(b1NYsp$23ezehI2(#z`VChU=U zIXR4|b;AMB4f^DO!5+%FfRcBY%RJW)A!!Ht)TH9Vii)i%pG|%PCYb!z^adXw@vY*# zUvM7-C=N9-0^CCZTi;cte#rxDBaq55Gotr0=;Rz^?w7_&k zhtYC6u&b0#hiOBK1;O}-PR@HuhO1kiU@c~iVu@-4;*#*Lz&r-!oajvx?kO8va0WXJ zz(2Llb(3OV+otGqKx_aQV@6d4S(a#bo5t}O$9xY+nFu5VJWnl?F{%wBWm!f5tr{2_ z;)kT1i|?c8>q$lPTlNCoxHa{kCC_;i111VJXULQ#11=_e=rQ!d@Yw;4xe}$4G_&Oy zNC?xs*4Ed5pxkV-!UnjY<_G&fSSI9L#cwsg+`0rF*O0 zL0VK9++YkeGq9|XK*6*aDQ*UU9RP!n;Y8qT1J?&YSAbuG+xC^xgfw1XgCPrm)B+#t zIzt)O0x%BoN{UP*OcL&)Kx_j>WBwOw={$o{1)MrS79B z?20X{xzGIVXN6vq<55KpuC-5LH-!U+vw^B;W95?k`N9IBR2zOA_bΜNXHU-IZ_0 zRmV^v0;qEnC|>ZrL6MV_n_GDPa_`tR1U7nVti~>tHRiyn#5xO6LlYiw-I-J)|26)q z+J33~5=9wbIj@^=%QO5;_xdmQR^6R`Rb0xCY(?4ZxKUNB01G}^?FMYEZHc5Knt^xzKPqIVC-QB-GcrZERIBE+4I5p7s{!Zg2zH&daN|)MGf35tj4bZ{J4JgrQ_x0VJSty3Ab3Du}S_ zTq}{=YYfm2nI|6xR=U4^OPq8CKo)G7Iy%Yk9?~r-wnx^%gduy0zH{H4(=dR<7%6B8 zK{8R_wGcA_H|{R*zpDv)_BvJO5xzNqfuzv@uL*X}3~XDQ>G)}XX5r9DqFG)Uvea^?*8}dm2V6NFs z8B==+sd5WkSw_rpmxchXhA-$sIo=5ExX%4rxMMlg?Rt9;m;#4mDS%y}hr>oL7){K8 z^JWs}c#4vC=dr~gWh<-2kKP5hJ4%|H=ZfU_+`Tf_#PFSFvu^sNG`VRS7KT@D&sFc{ zVdwyFoOf+>5>(XD?{B%TyZhuHR5`QPZXUG-A&(-Xh8eMVi z{1cZFbYn6^AzJ30Zb?n+CxgxPv8sjRkFd7@0E$UAr!;;KyoLMT)BNNp@j5~D`PS!U zrEdD3Gq1Nx7P=B4qTZ5u9b6sUwi;0s{H!3#zBiv*vM zld+?eQPminTGw+>iRJ=rGS{FF)fdbjfo?vo#;V06#tISwhO7ZU)F@k~mm!KqkInCV zQ^D`fnC#@2@eoWp_z7pW9y3qdS^BnhN2^!!&@to@~ym+tF(WM4itHd0|;^l>oq9D53d z@|GtEwCCCXi1W@-261B^67X|^g2Jqf!NLgBpJ>5J$vXK0yV@FO^It{)l*s)ChwCto zdgDj!Y9lO3zXJv%v*P_OYsKGJByV0518D^lE zU37~|kikzuU$h0Z!w@S?QQ;36^*5io1KY3&iW^EiuP7uGK$E;i6PbH&6Wd-LxJp5P z3jK_#M*vEUnVxkroHaa|$;dJ3(a2mfk<`5#HEVqN3BdeVrKNhLD51^8<=uXPS^}6{~H9IE8)L&akshk!2gXD@)=xCm)=gE_VDUT2;-$?gXz_t6%Z zT9lkbXeAQB*m?Myc>5Pl+T*ylbQsX;L>!a)72~DAHv_qPP$m6SC5Ll)QFUQ}0xOO?arkBeXI<@F*dysN#)RKp6j z#>Z$YJV>*>t=&g$zbLhJyNW8xu0k7DV6Y<%Wdtmz1nzN%l(zAyp;cYgTJ_Tz@~BtL zK?{Hr^J8QjaYx4Dm&r4!MRp?-4 zm3-eNck8m7)^$FM-oYuiE*=@|C0_oZ9O%svBL-nIHO~CGLq8kFf1=ufZi|b90UMTG zxw%J^04vxG7l$13_>N;U*~lXFblfsmA7l}QwbBmquc$295Os9uKblCk@uB;T*Ly@1 zufw|$UA<%$)y8Ms91OF*4XM8Z@(E2l4ZpSe;F2ri#gKIby)@I|q)(~{;@lQU>WTXB zPxuK}$ud8O%)(xaQZ7hF#dOaTtI`ckZ$G&`7$cs?9Q-T~{n@M<`H;zK=`9q5$!E`g zX4nv|bIKzX9#w4w*emsYj48gGtSxh;o2;mme_`G7AnVhwc;tuiF%6~>@{g%m1_8+n zA4Xf4_T$bS2-g5kY*6L_c+c#biL5D)x&XkRxX`lM$5>!GP259{baQdq zFSV|t>NcQS4T#5~8=R|Ogdtf@v&~7&JkbM1@a!8M&ljk~Z#oLX9H}&=JbMWb0KwU7 zfN$gE6lf18*{ORaP!7-#IiGpHJLMl?GuI%=Q37nc@8Tw=P`S~M_dcD#O}22vJkQp4 z*qhdKOt8s8*<0xVF(A&4S+-IqB6(pKOt$8~(DbWpEPHQeB=&s9y3HLZF5^*`Rd03w zaRGBGJ$P<@zD3y87F3g1XqX1L60A+=jZnk(#t|WG+zWZ<9*-l=5rm z$wrrEo=x9$ao#X}MAG@{G%F{!&gN1*XJRck6gYSUJ`1B9+04d zI|p0q1#Bu9ZMJR+lfMd&{jb_ZSFZVnRM<7^q+4AGL+u%u5ymL(<&gNmRH#~5B z$y}PD)AsyT9rV|#uAD!bQUL+i0gyw>e(|ahnu5YX>p`RXqoE^~On0RF zr#x8X8rby0I(QY5xW$z&#y;2Y3-327yE7;^?|{9?tlH|>kk@@Q0cQ;W1Ho?~RF?Oc z?VJ4G^`l<1XsCN#;CEuA5oMA!5=B(0id5jE9n|>vg*hPb1*!L(T@Ito=ZN~n2E;Wa`s9N$;&U%f zaUnqSKuyA?JilFR`$>t9USIO8QMKRTdyBO)tXKZ0ztelz*?$v$OW~Kl3 zW9*=Q~1jN6p0Jo zHYn2H+wS7(ila5JDM~kFJ?uZi=m$_M6{z0`xE_nnSKWy5XzuLi35 zbT?qVAlfD7IsPI$B?0N+8PlZ_TB|qg@Z`-DhZlSKo{6Qpjbi7HqdVi-5Nf}&?32yK z^24IZ6@G>1d$pDEMcwHe_$JAFr)X&W`}ghT4^ffie9{8tP^4fIj{uZe?OQ&(FIvuv z|0-%+C?tPhz-x@^H!bhW3=By-|M@an8#`IN@6Qa8vr^ff8dM_Jf^I9B1kY1{6xeV6 z`Wm+mh05}52!9DHx0?-J`s?hjlXTyHY#t+5*i8sR`J{w;jCBYmymYuia(&&cAa;c_CJC&tJKK zAJ97R_9b_eGN$uEBwZ$rD~AI|bqj=8iU-|VxmK#8%%334w!Dnz^|^I*Pop9;V zLEnsCC4SLSaGp9vZjyc)VyH2y{pykCD3R^pPBEC@Uht-uqIe+t@krm9!*z*naKD#| zD~fF{y?-)~&hL0_hVZGM0;+w?5Ar==*WSAV4W5p;9%2GLEO469*w@L7fB7VW=ND1q zi8@SejeHHt2H3$Zt8hAIRJSKPVwtD?j?n!JH=k5TFPAIQuNx6dMKLQ-on9DFWT~X7 zCT{?rVj{avO#X~Sp}Kq`XGuZ9iUodqUKCV+mM+|sVfIMu1dM-{CWfi$$$u!WVgsPK zaD|ZVcykR}b#+LG>Gv@)^vu%*G9!?C`Y2_?0tD_*?IlcRbf8zy5In-bw-SX5A#`+WqB_Cy8|<#ue?qi`n1HnX^w+3HXG4 zVNm14SHPxrMGLGZSSaJFr)B-%QEE~_NKorF`ypablfJr zGnn%RthE3)^UqVh_{n{ao*~HDGC<`fxAPxm*yWFDei-@cN8hO&f+0y%gUamZ2p|0K z{Roc1Y6SM69LK`yFb1zLK)cv#$%GGhbL6v4p7l^xkKFh#`bXnX3hOcJ#Wyb`-+fY= z4H?IJ@sjTMh~>b(@n=x!Lq~U0L3*uG+((}%X`LMF8SzWHpM-#CKo=k7P{|)}Ul^ku-j&=LZ zUm6DM|9!W?|Dt#PeuwccywbTAWYQVO0*iGBXM0B9)YSXbR6yqE{q;G?;{P&Eg3E+A zzlwiAB0b=Bz>4%=`sS5#m zG*tY3x88U6!8k;fGz2IWP@)T1@s2VuuIBfGdmS|GV01uqJ+?-~JgxQCDsB58kmRIH zbA(#uKgRqm;jM>7nyL}ruA3!)1O4l!KYj3svg*r?z1zhrY z3YY7XN&}px>K3pVf)4jukmS)8#QcCn{nOsK4KQFUt_g8*fMy>ade;hoSIEGC=H{ip zmyO}?Cxyv~i@j-VXEYD|d|^Nz)K(BY00#lEOx+~wUO(KYU@cTpc^eb+{jbs%f=57U z%MtikRyG+BvynCMC`bhV#iYXlo@yt6bfCIPnnEnVumFH4k>dnZ82FUx83MceuLeP6mtm01YbFO$MerVezIOw2H9!A+^bDuJY69*2 zD4dWU=q<3Vfqf$~n1!7kvZd532<$$}oZ3~54SGvQPyhkNEOS$*L=$~EVnssad*Bt4 zKte!##k*Jn+dDh0t*qeu*!T-@=fm;N&Km2Rwt4!v(ZY%1_09E>C*z8pZwEIV=Pu?T zKrdPI**Y$Pke;6(kXLXV@7R?6J@MD*Fsgsp0#(l9=WI;Zn;`lMMtxq=f!JusX$3k< zV>T7G5lCW1UpAwsn+p{DO9Dc?vKpDRtg`aA_V&Iv@~#VU7c9Ue(6=tZbdUS($XePx zhxdfDH52v&q#d3<2ylmjL14%l-?s=97uE>CORWv@@OCb8{ymBp=io*U18+Qp;oMKP zh1~>JJ1Cr2CjnKBmy`kZMjY+aN>g}0WiWRFEiK^hQVh@)L}$odldBxFHBu9oVOVx# z4nPk`-{0pa_#XV;M5qbd6@*Z=>abh3l2isVxx>>Be#5DZuJ(c_K-8ehm4JO8 zS;H6FkY@WwSArS@s&)v^**x0=E==Tb`2%zfsEgmsYqVnM=+F#;bMYYJhu4rtnf$e0 zea3o8*BsWJT?I3G0FeV#aG|Lv?=y@6KGvr1wO_SM^FIj}U~6S3>d%yDFYqcV;|*-E zYnR`Gn+^CnR)Om)dl1NYIr7<66fwDA?EUzE9|I;mG>5%Ygd2Mp`W}3Pe{#kTz%b+x zf~FImG$nGE+NY~-_b+1v<+19xTgDMy{M!OoTG(aQ^7R+qTppe@x zYGM-Ltr|j3y6Z_xw}Kn@7P8&b2jnflo!35{{d}QIS63J0Klurt&Q6b>iy6WV4>5-B zjKCiQ3JA<)CQSjUra}!F<{%waptlzUVc=JY2?1cIgeII3^3H|Ql#-2fnM`_N;9Dlu zE~>{I0`)ZrEEAn#K|CGSl0|$0UIh}TQRt?k|E^vpzXE;enn6WcsP6|TylR`~E5LOz zlZZyv6qS_|jmYNELl8D%04z&5X$K>e8=P~-z%Ro!Lv^yie2kx`;A(;pxpj7kTI)A^ z=!>tvjX3iYWA4B7_fkE>f_OK9LA_WeMLE|M;&38K%*(N`WdA#L?qLkf8TtEHZ?kd! z`R%Y6AQ3o7%t!iH2GN9bV^MaHF{ zvMllUFJF8K`oH}qe}CZr`dI$|^Rm@nkN)05ShJRMiyrf|bFJ@EKu_~IqbZo~-`V>= zbTH2UwLB7Dup$UnZdl%wizH|DE|uF!zGnqPO~>jh1_~e&9_1CcU zrv{rgtMg)hztjG#0MCgRU7+^C1*fTdB>E}mc-&m+=fs6Q&762M_S;Op37!P0^``RA#mq%L82%G* zw7+?_)HvnYUfVsn%|#a|y7?|WzWaFNz6G`WsF5oB8^O#3%rnwF5;oC1xYSH`)B^v( zH(?XR`nMQ@r#H{aJnDXLEi@iX(t2!+r!CKZE?&~ScQT)1xZ}H{+OaTD#&)!#Dudr> z(qFIDbFv^zt&g34=sewkx~%`;ROeuLA9+%2YTi-2vgzu2a*PUX6gyr{Jlc~&-JJce z2W@t_cE34_0^Ygtuq+&(`s>^q-I_46^XANg{2 z?=4SrKS5}5JpE+4PCiy;{*)Id2=@wIBhy zRPwm*KIZ%cCoWBqB=U!H?favp$SJ+E9v}aVdz8p^O}7SDrs%Zgz6lBW%^0Yj5gE}#b@m{gYm~h6`GbQ^L3mdX;o%o@ z@hEejtDb7zi|_6>4yFZd3w#+nm|3*L&oWkIiev7`JLwf}Gu7WCd-p{g>h+v{Z@MrW zycOPh6`Jb5>9wXmx&2%$9qx%l1F;Dqrm|mIIt2F<(I?%5v9iSIT`QZx9(p;Ct z$-M4b(x-~0>!)xhi|npjqnic=?CiN3tPS8$HwsVm(!W_Wj7}i^|9rVTwN*PUwwgba znQ-4lVei*WfkWd&0qgMQe&dP#68zC3vLn)!r*XP5=LDU~diLzW=997GjXy3N$%V$R z3QYf=;^G;7)#I#D|DT*brlF&|$$xws2_>lRKMKFe>i#E?aJK8jxRteH=e$z~-Ok

M}&wY;0(5V8*>FuBgh|=kNgO$_I6TjxG@Rh}cPhxP- zrpxScjP6?6r7kD&Ksd)Ue$zQ|ITuG`pZIT6Wb%d9n%Z zP;W#2Ysqva^l+4fZQ2j{yJmWF-q+Xb!~}o41)p;INBLGyTAlmZF|!sPxwCd3>tt2l z=E7n^Vyd3{^SuLKg1QE8!cjw7znyngF6&-PfgInM(|r%icJ0D!Xm5Hfk`Hjz_7S6; zmY!Q2@5pn0%G<~!`98puCfpBC^Y~(d&niudU&m{&X>!_rW533;0(ijphJ}GCZ>Ors3<5|l89nJvVg=^Kv9AOkswL3 zWXTd+Ux|VsARrk8K_!Xg*eW?o&a~v%WN4buJ*&a@JKr}oRrA-JIe(oQ%d#l7pMJuA z_Fil4`?^>1L8$OT^Kt)Gxf?3UR1d2;JF7(ql!-`gZ^|lPf2Yps63%MpL`tcJwLqt@9`K+yDt&vAYm&Hftj+VL^nLtK(||Xash*S?QT=!l|Zq&vo>p z(;m9--ci~Dr@dN6r00CB_DBa;iCaq(BFOO;%KaBnEqXYRHt`?C6R?g-!HBWU> zE8oH_C(u;2A)Gur4XJ+<7)95~UWKHlr4H%X?H!*rNATz;>6IHSE{Gs_x*sqJk%CtJVz2*UT7I7B^9lT1KISr>xHgOgGv(E zzdh`>FVR(&PBpX&>$~qwntAP}d^YVt!UXSRZBt@5q7mxju(mexydc-op=qO6ufP_K zr%{sNNs+4ZOUD-)j->VWI$GPbXYaBqNtT&AxRV!QoB8^ABnUEfX&M^b?z}ETnz*sS zC1+3)v9MM5WDO=*T7~h@_MrxgJGygcyA^YCaCdNRq+Y!f9`4NvJE(Y#1bW zGpeRqxNP@EBlS@Q2E*G^kD?UI?p4mfJjfsgtr9czG&xhGqA#25W;u_F5$xQ1P`iHG zHevGFt|k3tp0XBljY@%L18c1!&eeRA8ia|3A-iKW_vFPE(-@mxTrl{)-|WN*uPWQbi9!_R;Jo-|EgNov;AXi4ezV@F>0Ck&2L*p#ANT}2FX1O9 zY!e*2UY?_rio3}lKPtud;~)~_3wQZI?_R#8lmq|r{(_z)C5@`I=>!UWj!my}9Xak> z%fiE9da-n-!5U@|ScZ%yj?YT@2cRX&eiJI!wh#$tr96HQL%Sujt5v=Qd($lv?jWH| zH`SWnEniQyH6~-boA6c=JC|r>j=bFUgjGOtI8XMdCwKYpol8+lN{aVGIZ@l`8diP9 zt}w>kH`gmhwta2hJw)ob)|lA89xBc?Q@Hm2N)vjg9f}DqL{p6=XY1h(d?BUb)^e9f zp8PkZok+^DPbt2HOvj$cO_y`}I%03jrW!f9!wYn`Bz9ab$91oN(muRG3 zw~k8t>ZqyBY%Ff=#Rs-=UcX7$Ir_56Y97Dzur0fC-R};}!_tDI^Kiqt@jq~c$LP)K z*Jd1E*oDx`sj)g%)qJx0?i&$ByQ4~LwxuSpibFVO-~SQuc7=bP^-pNiu_F7Lj^~VJ znito#)U#a{RNcXncQd1}>6ObcwY3pm1%L{BVg70)G`3q3du!m<_m!Q3arsEIo#4ga zFX!F2E8Qe}&&V>_ZHk)unPBC)@@e-$_l`o6I>eIq#u z({o2uQVT$V-T#>_mw;3G{Ax^Umq5@YT-*XaS&V1ZNY_-5H+`Fv$K%2jm~ zyI?nvKVf(8s2ee$CE+v6)yyPid5kg`kHz9QLYY}N!T6ySJEE_)Q{*CAdO>%zj9FKE z01e_qDgP8-3WS3~p@(PxYqN9elD*Sw$Jt<+T2tOrEm?(23z1-V|6YiF7mm8ht`Fg= zmdDcJ_@xT)rf~7-d+1K6%Gp~(A65_g#4ISjk#Fm{5r?WH~Z{O^%iY^HNKShK^kInA12(IhE2bBKXmiQZLe8&^yV$mgj0YC zGk&)vyz5uGBwCNn`&_dR{`^6P&bQnz+5T-0Nf=(MA>eKLxr0S`M>&>c>?Yp&tBYUp z5LjcR5!P`Zx3$6aC@?WfiaRZKYgDc`X0+t&H8snfxIBs2+YNjf!Yd}&^cd#NA&>cZ z`_1rHr9k8j>>MWrEB~_sp|STV(`{v?J19L}6{I?UXW6(kj_m90#X%!$RAp{?hlhr+tqwXcQ7sVzm8WA_S5_BIEf$ndRdOdeK9x>g}%KHgPU*UekIm>n~Xn zcCnG0MZ#7?yKjQ1{7#9j#w609P?%R`7KF_%2cG(V*J0M@Vxs7>PfzXpA6>Hlnbc5& zpC7icx-_9$dBj~-Q+H*0*OS5ApZGFWfA^EL!$16ij>(-v zB3&Y1H;+#B+^qM&#^tdTW0fend91xK8@gqP!s&Q{oyI2O$Y@$b#^_YdJbX}Dv0X$V zzS?(EF=yrw?YIMKZ$=s`j@tfMOV?XinQI5jSY~^=$Miy6bSF_R0AZ1Mg!nBWwgiypv9O8Pp*$erz%rM2TP_!a;GHr!3Jt}8Su#DER-ju`= z0r{YLlw!pdVKZ1HVW!M^?>g(QT~tx>aS!fFVmzaVa<{)xFurD}zbO*2w~ZQ1u-0*a zBpGgUbF4#m>fZqavco3EL2+C0E9`ffBo>K(c_|uD#1v?VVRU z{0#GGT1ir4XF)_DP)i9n#goHh>DKQFsRXl~+FluYxAeK6RCxH>zC_q)<>UE^{46s`1Aeg78g6#lMZ9v(7k6=+WDA6}Y z2Or5cIIoov>PvCV;0|*+9DNY82zb-Z9teA}*+aLJ4eyf5zx#!8);>)(U=K*!Lf z_BB<|-hsy&D5rxL#{uvxFCQ9Tff^9QZVW&KLScW{uBVv?Y!~}Y*iWh84B0d9O#A!= zgn7^$$-V`)-at7AmNXi<$&WJBKf3@yFD-pImkqAtd@dRJCBLfEbbibui8Ms*8Cg3t zO7hGF`2;{$&`W|PM7E+2+Ie($M}SBY_G4yRj*Wc;zl`|}*jt3%?0Yxv?$cK>&@>Q7 zl&-+0)2Ea&*lgWL3sh_Kz}^aEAJCs(ZQ|)ru^s3H-!6eQP;(+efADx8##yjg&Z`hK zRv;Bvjerfo4V#1gnn8?dDkGfa+gnF`!>ZaZ`>RlG=lu;XGO&k3-0Is~MGp>grt_E> z8xvc-T$<$l0PQ$2bp>m(j%(`1h17xtG9MK~SwXMy$EmIIWBn}e0E5KKyuSR0?rLN2 zlRC}vmyR*loDX~q>=+M-w?6d3{lvT z1A-06ks!jJ`fXyY`w z?>{!-a`Zu5Zod^+!VZXa0Lcq>JWiyXDH_li5^l16!D=92=wNnJNKeE=L2o!eAMVo( z0`$xyBFJ>uQP0d&;Qm}INpF?1wcQ;5{j>067@{m8;ExX-rU3yZw%i_hDaLdfGn5FT zNg$*c0*^mZfx%=^8G>GN5simuKShOba&{iEt78UTs!A+_1?QnxzGL%0d_diJAQ@;3 z1I0J&%UO$m1V94@>!khBcn|7G++2iD^i4ZMfPXBG4@?FI27=lEOnX7cTd={}1z=)R zV`H_?9A`fs)196uky}7k@f#!7(=)z7g><5EiFAeEg^4ZnA%mbdFRU3>mIJvkUHG%q- zXpv z+^wO{tTG9AHSRqK3pmlI;@qer2QhGhs+MmD$M%&2d=V5a|}r zp(mWizm%JawS1pm<9eL`v^I$0TK47fs{<98d)&Gh9~za7#-D(k5WT<(youBd z*_oPD-VI$959fcun~~bqX_3uKIrrwD!K(}#%=YgR6>_QD)qcUW<65ET`tImrJZzK- zaw;|fr~2+4Gz26~!xEzs-nYLZAXNX+ix&E$!0a0ktnPmReB-ey*LWh1$c=RQCA>T8@ zU?xK&{5-Hc@Stl$SaUp8M*K*@edz-8dh+0f@vL7TlU9ix23%lkdF)pBuLoT3US6~| zWmx07s4UZ+5lAA(Urw)*@etHRfcjKDxY7wmb%2A5%yLq*3M zPvrn_<7=!1<}hIRvlJZL?(z545Mz{ zR2L}CRYN|`Lpz?U;qSmI5xj%c;abODl=kL*`{qc|E6`?B1Z!4oqOvnIiJq@TMrVHX z-;8Py1>99A)*8l#1+T7h^10-y(fYPFNRTCU$4E%7e7!GoTP?oSJOMVf?WN-ORr#kC zO1ckximdlT(RN2Z@ zb}du7Ev!QC$=S%p`>7@#oFhp7 zA&@F|8P&ksG*(1oF%v126YjwY9XL#6RSmZ&vEFJqJHMlc$p>BM*)do_Za=je#m(l6*?CP4Dni?9H z$8`o)=k4d8HZk4A%C_PIKaroY_K`bM34NI`*V#l;!L&lMbd-%hGb!Vlby}+b7vZM{ zhx~5dxvh9*esNJK@9Vxbq)KkGSC4Ay#1@#n)7*G^B||N?LL1M;7%TUi$RG*`(%uoW z+`qH{;-$Uf0H;D)eWi;RUmXHHS8{?pGZCRW;aX4nTv$lRf{r)bP%HQmaN8>Pb3iEs zJ$qe2(fXy#Gdfgu_<2)MiNz^+ibUBIvZ-0-*m31957F?!D{D)j^!fBY3ann}LsMJ& zFE%8-+B8`EwmaY_F?&s}_90;D6S5F}+7^%cCcXShZ{xGCeMyu50InL)RjTOOsMN-X zrb&K;mMu&-1HVE%YF!<>ITP7?Iyyd_VAqcCwuqPXbnA*>ho{UzB?2x0@ET?qVo{OJ z;V-|!fK--lFpl^l`mXq6*CSzB7an;o9cwfbD$AnsIS|ygr%dC({SZXDu+FZ1_CFBz%5lZPL>m!7 z^W}r_@&|a240FL`LiX-A_-BZDCZgb2umTKI#UJ9d0FfH#|5n+Ws>)RP~71MdwnL-bO4VgdElM~Sb6RKeW9ylBuw&K9(?;0`Z;F|{ciNH$|%a~i*H zb=zJ3OT;@hg9t<~DLFDw>hq6(`^iE|97-1>poO8Yn`KAp{Qz#=zyCj+*(OWtgD2~+ z|B(MIRqhyzUqK)U;JqjBXh*zYlJ8IUwWh^lYs~_^kiE>P?*H|R+y4(FK);fgT0*L^ ziPX|j{!56fBfD4HKmN&ej}dcSryQ~(4KlVNJ-cDf{~>Qz2(-nv`?AO8mq{5Di!RR*k+FFoVXV@{YGJ{I)MS@B)vW`I}r7x=az1 zOIq-ZOx%D*e~;QA*nF6#5OAOOLq&yzuy9myZSqAc;jw@G!SCGC4#WJwIFoW8hr<)K zbKcWyICz*sM%#?ub7S!e!X%<=`46~fbRvY5zvMc{3o#47H4l`+kc$3dl6BW z!uLgmg?%$_I15ez*qwhd@&ppli>}!&fB+qQbk-k2+DA0%E^zXn0VnTk*Fb7m8wd-r z_bFFcgt8wOT!YB8@KW=Eju`v`99bbxf$$nKKEZK_qb-Yos?9hGuRgeYK=#Q|)# z-2^*3Q%2&2tYcogt#1+)hM$a#Ashf8?wojTGMq0YpJ31f@;TS*MCZaH1rRTS%yz*0 z`|qSd4Cocn-%R9@*F;nyMu04;zO^-YGz>277Uqt771uRjX*9{>oV!Hw5z&#mNpvE4YV*K8@etGYS8^DECM z${^&`(Dy9Z<}O(^X^On$mV){QvPP|I8;uY%XT2E$l|Q|Q&gnyqZEZ_t`_@17Lownqz@N>~fZw97u0Znro6nO^=bHSU^DKoD~!{1@YD*SH#k|WxuUt3O+k0HkFGJfAF z4b$@S9H4s-xHd!zw0cttqdoFU^T2Kl%$e;0NRLxfRZ#)2fxkzJp-Y?P<;>4o_h7v5 zn}Rh8J{M1bfY^!fFWTSuaU5u_!6pPo))$=Qf$%yiDmJlUY{Lzj9Zt|TT$N8Q1PSs- zXtN1MP@uP+8;A7D+T0wp$kZ_nVymyy)iyX(@gb#sG)|BJj(Sm2AKO-Y)VF}6*w&L;`*5CUqodw=N z(3T6H(gyMUG&MB^^*Hs9Ln4Bzg-4kC?IHA<@VH_3WjlKCGh3(o3{0jUI3s3$LoSWWdI!-m+1-Z{q`0x?&VGa24GQ}i z4xa|xZ55B6NqZ=&fc@K-bEmcCr?48SPQ_k(+aSoMDdQ7JyGu0pDLr#{puz!6wxI&1 z++WP0@IEQtw`X7=21MlE$~lgXj!+=#4}wg+TQ5>J`UG3URy!Cit6_Y$@4B9c;tZ&5 zMZ0L>duqgglxK&%>TqTsyVbNXeO)}fqHeA`Bd2UpRI9C&g{acF=?y@B^Nc;sfs?R^ z<2^CwyJ3ef^T3$Pc#k5Vs2pyYbVtxp( zQH#>u!9SL@B8GNZmzWt0LJvnd{uKzw!B^I(&oL0|s|BYj8C2Y651V~tMa1iOCYFw^wT{-GhOxZ6X!nyLQR`o(qYDd5@M zB0od_^VVBor#)N?V&*!F>!8&$3e>+Pg?ILe5`7p9h1P@lc^`sUwM@cp4~#?7q-Joa zlS@`c;@5Dq_czX*a{JkikFdU9v$M~%Xb_K{DTTT;xZFOo5AU|}!5uE^T zT+nmCl*7hE{Jgv`H3Sn7zE5b>iztH80(?>YraaVbiOnd+l#3U+MX zudmmOxndp`nQ$i*+xReTuM9b~9cNQ}K*hlO+KoRr&LXueuUq{AWV`vi`~`0spD{5D zCWFalOY=|uog6oQHH76=RsN?CjQwr%&3QTM;$_!{T8=!=rZcL?+8lfNgUcGs6`_d$ zcymPRvsoGeO$)Vei;=`2PIHa3*HFb)XWJ#Dxs;)j;&@wn90&~I8~4*RJ6LK|c+u`~ z0VMbRzf&IjyBYORLnED;4ZcXqL@Ly!W?=9!gX0x=L0LmftO^*AlPzCgzpsBTCoK*A zLOTyES>6X|t($Qg-%18w4?1I_M+I0SL1nd@wq)35?YQS;U3YwzM}kIIx0uVK4x4u& zFEr1DX3}9fSigS^PN7ERC<-f`n#6iuWb?_Gm)i1@@8nDr!`@s*AoPWV7!0m+ah;;c z`t_)?IbRo=z|74Xzn+A}Ctl3u%a%M4JV1_K@m6LCoz$K?qg!g4w5K|LR+Q>Gc!Z4ax7KqM9tCC>*IsA6s=bud_@QWgzwwqhoA`r0LK9 zi9C%?|)){|`)R>nTcQ|a!b0VKRz|BeCf6C))5 zJw}%Mr}V$~HZ60)%>Es@#biCS8@y{Y^!_20E-eYY;{h!J`%M2q%oCd%6vL(s$Y(1&o{}xyNzDm-wH~Z>l@L){vy64ZO1D`(9KO^4u4c2Z`&gpsWRI zgvIXM63U8NiTVuVA>MnTM*Gjoa3O-?fW#oyV}PBF7Pc4j&0ldJ^woyxe4LZ;ToL|%%;z&dAcvEsB z&$sTFevlV9pBDVUSp4a9_OS5ooD0a)Y{!f313If;rI$WL&WW8UeI)lL+~AW$4I`k70KR^TUfC9yp=`9_|vjnDRC)VG(P) zfe5@gE`wkdFS0ilrqzjF!Y?T`O%XPdWl=*#JYDGt+QaMNmR{{^?XsZWL}f3e6BUKQ zcd&Yb*eD=3?&SWzi)-BUJL%+XU4hL(BBmE~WKHhW23#g%%f|Q90BM2&!Hk3$ws}e6 zCg)80e+lZ4$%pf6B^D@dm5XB&iZfL_N(h!bjeWnU?u?P9Uo7>o`}Ixt=Hm^XR)*O6 z?{DM2-LUElJ)T9+=FLm5dzKTVW9mE8XtOsDj0a2c0~uWk6&3D?jLN-i*{CeaQ06UXBzR#%2(bOFCLUP!P?c-B*V zef|n&UaLn1G-_CoT2roc6Vj* z`YKV4#H^_TJuB*PTdP`#k{(eG+XWFHM9CDLl0@0#9o_=P*ulV@m?&UXK-kqYy>^sq zG?-aJ8@(jCm!Fvud@kElf{4{hxbAFHKn>i)-l)iyu>6Xm9AsBn+7Zz;AT$J>nC+L{ zS8yUGYh~kY1;$@qA5Yt_MfvCYy4+a_G$gz%TL`^s*lRYnwkJhqH$ll~_^EG{O3I%H z8_1AR-Ep$|lHae+p|uCp5kLZu<;40FEoIPgMas}M9dVJLhg;1YuU|8lpt;fvgMzzs zJhC0s@cOeJOM73?JUY|(^j%xoY4;-hXdAE_OB)yrp~!Y%$g7DJ-&N|B;s$JhZB=-i zkT`Au2#H-Js+6s;?UUPlyo)ViO=50dJ;xQ-!i5Q3%1q$(iu0IE32_E#XAO`CdNflB zrGA5Gw_f|cTOrxMXf|>$t5*WWVvQGl>BeVa zQ{o5cfX;Aek#EUE+LzQAg7e-Y^4I4NQgU#gH&Nc8WhD^4ED_n3q$O^c*;7I!=Y&z^ ztqL)zTo_wAy&Lj7jiA$4VL-ksvtaF8F$md=06Aw{wwv8hv>7?GTQx9mC6QUmDr}QJ zQ9T<-BX%0iuAU7>+ue;UTN_vTqKP>{9>TLmSJ2Usu@yQIH0Yjo_Dp})>iO3Hk>lj@ zOBX*%!+T$~bj{7{PO_07Qw5yoIj9p?myVCuMMUixtNtLIVs?{B4a97H#pWV2^QYq- z{Kku}oz3@@Rr>vf^5~y299@g8$?P^nzr1&-9+ny^#!_LGWY!y}daUzYz7- zMIMmfxo>m7_gI|gL39EjLAtaPPR>onHZnW&F1$v_)^+FewBB2u2dIgk8olEExGBpV z)NXSK@8H%n1 zq(!7tYn!~%_FZ2}LjXA(a_|j6Oh}#Dm7B;Y6jA+#`jHZ zQ9h~yUhDZR+nKu}8+8#HIvnZsZy4QcLE!mqw}KTjP~eVTsbk%->Ft3*ev-|XLMdLL z?BY(oEufa77F%s{X4dA&^p%=|!zGs4gFfgyq{~k&MV>MrN_10OtUW$8Svuq< zgfHoOYU?31Z70|^7iI^GY*CbBol4kD^fD`Mh2bG`dC!4hTmQD|G%NpvJC3SNa(Avqb6iry`=Vy8uD3c-jH)RF<0ohLvt)uf26RLU=f-g_YVl zH$Q(oFEa&1)gACj9&y6XtI;#=erNwoBBs^KKvo!bgd0xl0c)|R$(Ge#=dY0#;Gp3@ z?i)bIeMXj#{i02?c(zyHSA(mhwDissp$wOUyD9;W)vw3NNcndvwQT!N4a0)s)yyZ0 zO@B_DE5D{|;5~sDX#Din5qj+;Jd=tEYJcz1Rnbt_%__K?@yQz?1zhaq*^%K=r`X*p z=k21*ln3fLtRjw|Zc|=-pz3?-Av$|ge(FA2ndT#{vLt?@6T|9tg@A|k+(N)Z{+hRO z?X#Bi%<*8Zn{@G!1PWVeimd;G3IMGnP4(tC5IZbG%~FbwFf$ zg5Lw=#Cgu%#>Kj*r?GE$ha9xCep{YXYPE$8k6$FYFNc zc}mo*0zuSz~9u{q$xJVG>>SzJYU*?d^fLPBrD; zM0m|k-s(8Kqf*-%oamlCbQ^*oWkOAS8d>5J*1vt{K2ggv$7@#%x&R=hi+AZ=3=u;Q ziax3Fk8w`z1~qaSL~fB@;`Fxg-V}DC3Y@fyY?t&fzmvd7d97!VIYmiC3nRG}EN=gl zIuNGVYV7J-YXP#za$cwo<1*~p>vVM#+Oa3ecjC?x!^0xV*806~y|YZ_3gdZ@J1 zqtS+Q-(}^5*lZAt0fPb1fbQQVYLBZPqR!px*?Wn7 zov3^Hw75JzERg$o&0ZUkAq8Yq7_~DFX^iYJDH4JJqq_Ge$d5{9>yfQcOz)k!lr4$r zY7bw%EIC+wm#iHquqk(1DRS0?7yv+E=31y5p(i7b(5^sk&5fV(Anq?iEVrU&BfpXgN4 z_5Q=w#EX9H#8eY-^xpo04T_?nbs!2{kh=}?IpuC6sU9>!14K~ODMw;LM|86MzKB_H zC{K(|Ch(l_GCjag-M5%|m}m=F-l-`mB;~#{!VCH>tHh`Mcobm~H-XY=+BLVYP3sQt>E~pBLU2aU4zDD89O*lc#pD+1 zqc!zrKOOfFDz|?>`88SgeN2ZSKGP0@A)NlA`*LCyV=KYyC3-1JB@aPZaX~ll`sz%* zBDm@AOrwa)h{5=jt#Mp2@F<}DM>bg_zX9kl71Sw--|C#|)kDHUd}ms*6ez|4-nY^J zGh~HAo`CJKC@2s09LC~jlea&%J?&9C|1d8TJG|jX zipM|iY|3$VtTbyAOJA=OSiPLE=I4VYe|GfoC>0Wu+({iMu5lDcD_)!C|`dqZ!FJ2`t%o3kNUU7^%veY@d=$NozmUD zGj$$32;0SOgo+9qo6OzX`T#To!mzr1yj5J9j1Q0)B%jOXw@D=weyPxmv6o3~j2`0c zwJ9CJ&rd#V{^iO+mzS{GqOYglv;J)d!h1hhrXhX&qE8qbV~xx4^kx2F3Tp2?eJI0$ zM&_RTEss?nfX@Zg)^!Av^egPv#8yM>j2>XnF+ck@KiAv)>M#60MgA9Q&VT4ZLOsqA zKM$nC0Rl{7`1^hG2YaclnqbKb?MIK2oH|%wDGzfTmL}o-bTG6jT`KaZbPti-;yo|{ zflA>2W#98)HsZ1UzQK7YI-VhUvit|w`acy4i6ul31%ScJ{ty2Jb;SPv`@gk|`Co9f z-^yST_SIic3H;vn5C1*cYN9^$Ohv*U=7i*hv=-E_{m-yF@&wcKT>!;U`yF}lgDUI6 z35|gVlTlqTc#kYH!_n$P*#l18|8qDnayy_SN&d^0Ze0Y@39T79Mb*gtQtSOY4`i~X Ip8WlP0MygOJ^%m! literal 0 HcmV?d00001 diff --git a/HybridRAG/assets/img/chat_ui_upload.png b/HybridRAG/assets/img/chat_ui_upload.png new file mode 100644 index 0000000000000000000000000000000000000000..1e1ba85d4151aa78e5dc5a3616e52d27bbdbce95 GIT binary patch literal 87990 zcmeFZWmJ^i`!9}(qJn@@5=u*lbSd3Pcb719Hx?x&odePi-5t_3gmfb{)X>ckXY2F$ z{r>)M&a3}g=e#(xTr3^#xo6*dU;EnECoY5D%1b=LAi_XHLwh78DXN5qc6S*K?bhde zH^Dn&$v^ba(B7j-iM~;Bn?laIx~Ztnq3_O*z)h5>W2Bfrf1SHW6+z@dt8^QN_gYiT zdtO(!s;h9NVymzk@@C}D(c)@2M zS{GP^TSRSF;yjn z?n`j@L?xIC?GsUxE_&?07uhBoZny>%lx{qF@KxBos4_$>>@|G3=`+UNQIUGt3h zzh~m#SIEu(V;^cJ=sspPcP@5FwaSFt`8%LijQ=r%1b05i?7n^_6BUk}I^M`ma5OvT zDSyVyL+yY_PDs+5t=yFKQVVs+gz)G*nm@nTPjDm`cvRrBo5W*&_P}@EnOlr=$Xc!A zYV^aXU$ON|idpA>zh-yWaQ)~!qVU#5Z1*sht*`554yx>&CCGDfZb$@@9297RisQ@Y z@tRMfKUO2wpdegF9A+$>vDRH(^`JL!&d_`G$}rn{GfB6Q1ZmM3>tpxNjE(X|RxQ$9 z;j!X4{|Qb4k8{sgL?Bu$EEQQhNq)yC2Z*7(k`l#?p9UsW51+Y@H44EGAL%q!C`e`f zRPHIm!QAn1R$lhkQy+-#Y^wU<3je5nKD2ugzs20=6u;lnh{Pr^c_~z3tIcKGEf^f? zyZiLt4f#IcS&K9QsThhg^h6uAGokM-s~P2dx~a}7cE)qB;DS8X_I$kX6hS&*D?e8` z!L?t+dI@oBIj(3_)E?+2vzlHmp>c6WWg~~9@WnkM!G`|x;x~D$*1|$NRmEwvRd^9m z*zxu&bh_57wNe-VoRhnp$9+0+Ox%?sDeJmC-hTut*vx3~bGZUu8-@U*u zNl_|KdXXJNqW7$%aynrMXa4(3MeHSk{P;Mzq3X}t-K&dP6uO&f-$JfeJGLXG!=-xD z%_ThIaTOb0v2r2|;^e~SYn?J|7w1g-vC?aY96fjR@tl@Aa$L78@%4M=f`d?=D$&2* z9FhGU_#qhhU?j=aW-L>bbazPHyw6EYMHD4TX;EBIVDvt6qJgaQD%Y5GrKX_win%7w zzp3)FS3TA}Xh%pJ5495t;Tyn<+nV0rp87tJ-u`MPvjp9Eo3Hp4oh7_mccf8#8Z&j8 zsN&t}=D!Po_F6RlE~bc#`}QZsd6zIcDn(zu)TgNC4tc2+ftw`?hp_{KZY&fS0TTPi zO}3lqE1M@B?TxdnFiP?dlpD-6OUKS7O*K}XW4bR}_2(}pymPks9`m-lYMxGEnp+D! zr6Iej{rB0>SirMAj@-U<#s8q&rX8Tr_>_t?H>tdP@uaEDLOzoQr0x<8V{&sFH*PZ#6X}gEJl=orX-N3g&_X>)fHA~>qx;_j0qyXnA=N`@ zbnCQ&GcWi0r!C)Lg50y&p6IP!ZxZrxt65VG1e=iH@n!*pf|#ER*<-owbybL;E})6P z^ug}sYN)jyubG%|63KrVD&*>hy0+H(1z91y?Egj1V>$zK& zdUj&{J96{XWFwT{9VM+05Lltz6c*Z47Z<^>*-~iz%#8QcW-4a(V2r!F#B;kzq-*N? zV{&_wLr0SSnWv6og7Jla2foJlIflg~k{P*0Tw?9zY`linvmx!>!l#(VOuTItWxqb2 zl&%*VG4J~}bNY=YnkKZ^5!?= zdc*HsncdK`p15&1K9=0xenSz%EYbYZ%Tf~UDjHgphF(taNrAV|I2sq#z(B|rf1!h^ z%Dj@6{Y11W6q8y;W@T2Nd=|A)T5x{^cd}C5#fghonb7msC)9jYJqzEWjdh2FbADg5 zNQvpKO>heSdA+}Cw)w@PlBo%mz%RUa@GO_`*Q(L>mhKid>-48g|C)gmA2+IVGKH=gJ=-Vj zjU8}n=c>I7dtK@(-);@Xrh0DQHq6Y2Q|A>F(HqSIy+NKTN+*lo2&6eSh zb(W^bT9pa%pnZdLRHf^AP0F}{gWWysYMszvp&!tugqM_Xt@T;GlRie>h&R9Jr8{r> z?DMx$Z5Qu&35Kse$A2mr%Hbj}_%r@Be5ibCN`khImYy0D{mVKLQJem-{(ejwCixpa z#u^M`lIoAqeh?TEaX*aUTzO4naH8545a2Q=Ll-2;cD=QAjeIrCE2j10dL_nW9a2=V zO$QPY`QAkZvm~$X)%){2=+$-9hvgdGfVtWv6`TlegQY?NX4fyA#$}n%)FA%ju1d^+ zvHn1-c@a|OSTSerrIE%7n^=rL=lFZ3(UCD*l-p{YzOOH0m7Y-3T~3Y>{8`s@&l%9Y zTFkq=zB=5jJ@=0&q(FPmDy(t83GbP{<~(cSLuf;#vX@AfVOZY!scyYMP#QS7*Vr#{{d!wk?1OL4evs81h*Z`;Wu zk8vAz6#m&MA&~oA^Opt^_Uq7Xd0GZK?^XTO_W2*zeRtm*9rMJ=*36CwAAbd24o?1??e=GQeuDEmUuHF-=01XKZ50~nWP6^(z z$0un=%cW-7G;;6xV#yOxtuP)Zh2_eNc(HES-MiNHwLfO}f%Y4xuO8G*IAwXQX{+fi zyohvC!vm4^8Sl%)dK%8NlQfZv629Nt>xb>c3M2>}v-8j+Q95f5)oO2@u`h!d={36OHPhwwHmE&M1^_7;F z6W}j48Eii!#Ui}(6_gGUjKHy~kr|1#;TjG(`nb z^Rn_)6(w4i6D=ikDYs~ID&>8a%q%^``+HU%%;*rK>$Ca^*O*`bEDuROMtA1*_VyMr zRD}I0#aTXA>ui%yvrX5RJD95k9V$3K9R~zX&Nqv~oeHqkTE6b>LUE)ec;Vf# z)|Ynk9k(FYE0liUZzGE^J-b3c+;tEX>pp zbH0uBrp`_yA@}ryG#E&Cy*4oU)72zgdE@Jk3K|+JbO^8art5e~LiI>S170Lt#(14B zHJil1xhK<#@c!A_{^ix>y-P>?9Wv^+PARu+-fE~wi^$QkXdX>`w106WIx;fju)YzR zMnJ}6$5?<-p(P?38;dX_ACAb!_#uKQ8xWi>HoSFkfZA!gj%&K`J3C|cJ?jPk8o03H zm!dKR$Rg|O_lhXm8sikI=_<4Aul+kZ0uRr$PnNswN$>`}wz3!-7ctGH*wKaZBLf5Z z@vLVgp~rtp`}E5rQwo)a(Bo;v1pL<~!& zFxQo{j5>Q`R^6DJN8t=!6ywf$EoauNmEGf2eI9kn$2>Q3C`$Y?nI0Lj#7=i*dv z-uE@HL6^x`th)FYB-eAY^X)rru`l@=Ukps01SCW#AUEs3nH_2~OSsORcnwAlY;Ac@ zAI^0(hJ`9));{^YR`2I&Ey5ErKD8B2KL`LpVjSI2U=mG&&B4Jj-uo=!6Uz5uy^bs_ zCVI}$pLgTv9GuP+H2vFrcEWk1$6AO1{KCtVBr=DCELkNf(NMWQd?p&NM#t0z$vBR^ zeLISD|BmcytLt+!hg$jZqqhnLYN(~3%GYyOM_c7q*{`$xh4ibB$2?Od(eaULYNKBJ zXI1T4u7@5C*#&Y%Mt$$}LH0dGko=Y_Xa%rBLzKdWM$`2ir}8b#5yj#>(v2 z>rNh;rhO@w5+@}5b8)1%2<#c~AYi8b_hM}8_wJVMVPacP6gPj5ly2#gCeT_G)2=EK zD!-U!IxdeO^)hX4R_~%WnnZq=46n*`sH@tmxfe9c4CpYOU8o8XjMVi(yOv}_9x?A;Z~(uF22e1o-~M_vG0?A&w@Kp`SG(~ zzLmZrqNz5U76TXsY7(s?21ewS<5*|1c|qi;|NMSyqH$9v_%19FXv7 zJ!gRx!uZ^lRm@y`K9p1P6|ubPD(X$N<3_H8_ZPtOnCasC z8M{jdFq@$GH5GO3j}DfwRy)bI)gx|^S8frUQC;h2>JPH#PHf4-#nm(JqJwh3qK{*7kLh@&TLOHWc_ZM+1lepBqT^$S=dzb z8yQU`CqzBEp8+j6x|kfs2$qhKjw%11UsoI%n4iyZ?b?s_dD zwTQpLrY5-TmZQ9!^=!pyl1IZ_|CmQ_+2;0ktFc9+%Sr~h^6&xvl1U5)NU_-p3Eah) zthhmA0*!57RXAU9rqV zhmc6?ote}@DFtia)sbU;JsC=zg#5VXm=}r=5n7(C;k}E~2`}>W@5kt~|Ekvfu1$}JeFq|yfcQH10<~WJK_}of+F~&{d>c# z;HXv>RjCXMh_$K{_d8=oHQH$RJMA@RL!}Oy&T_iSj_e1q7jr6mq=Afm9Ep|nA6t+! zEqqU7TCt|e7vtn~~`FN+Fc6zN?- zhW?qQ0x{BzjC2&_Gy#fC{bBw~cnC9j%#7Sz$c~B%M^(9&)(V)v%Nnm5gOE(Ho)Y&VBmZ5dSI&7C`006j$>P7_GwRb{0Xn>_4=CN z?TpCpS-oop0VOfhSo&^!9x%7>kU!<>Zr=C`npEpuKGDPV%JBn?FGAWAtY zFcnyQ7M+}|EJI&kpkz_b19co9-lx3`8d9QGoH7PRL%D04=ZCO$p7~2V_}34m8VqSa zMDs@1QT4OcAiSd>ag1M5+Z=QHO;}q9CNO?n#l5SjxA!UCnN-&dilr% zr>DWBK*p&TQ*b3FK~9^q`A^r>a67v<3d3>>;&3C%tC8VF4fAKWNy?B^@9j6gsx$E@W_S8?AAGhcuJ!qvAIk#0L&2w${BA7k0 z!jKw)m>cUQnT*CcP}0{j=g2G$GNNP98wqLP4i3PGOT8iHw!Av8qM@%v_Z{NPtIXUHgs*`W)38)VHlMI^f-0+$6KWm>|uP@AMfQB{=#2e-+DcZZsS(9Lc z;#^Y$ket$5h`lJGyip=D+kxxZ{Sl%0tKrV;b5vqXFiOJ!VAclou!>4L@YGzk8kbi| zz89weSzk>$3m|XJkwEBs%~O25bOMF2IbLxRjs@O+|6*Jac5#q;eF2L3rZYs6@6|zS z0)@W!(dfZ|kkF9x#T{|L>*56DpFKD0o9588Uo|o=YU@~4mmFw+c9&TCX8HwjTVoWt zhzJEm&gb(f%(F_ykAw56aIGh0C#|{xMQ!cby@8aC?Bp4-HeT-D2G*TnN}1Kyb8W5YigmN%W45P!8R+Q=k!#|s|7olg9T05$xy)(2 z$(T3H4L)DtRq<>2N+Dx5AQyMJM559a%tJu-j$}wLqFQ;|m-!_&(czV{bqaW4Y2xDBf;|7UMs(W3u&FaEB zK0{ZVX2;F}lJ{{kWx8YP3!&P}Cm6N)O~SAZ<2!9e!P_>SEUXK`jPT9PXZ!*>Df8tn z2CY0d#K*vd&kMr50M>|4O(2`|z4j)bDltaKChV^K1qi5#`&Ql=YAEr7xuFvo$~>%f z^^*nuRgQo|h=-sU@@d0L*_zEZ$zGz~wHBH70&&G&s^T-$w9Cn$sX_pgXKgQ6Os{UN z#C4joUP7^Vv*q3yuI)TDTL2dB9G%ZI=&a9xk}kT=#k%9vXKWnO*i>-$_q@5i+j zF(f;$O(Y1WG+FHtqX*5}jbvt5`mAo>GF@RKC&I`1wX`KAN*9xx1`7(*VBkEN!{pH_ z;AEu832#?Z8^^&8XkD=1LEQHcFkM-u_v}UQrKj7OfjxY%XWMiFWG^|XJ`*3aND1_y zhJ*wQM@Puak@kY&jaWQ{f1Qp(?d;4o=nmB>atX;@Xh$MSx{Xj zOuBnpg#|}_lBBeG%M#jaCEUTw9pjOPEs>yi3=&MgQi$pSm^1`e^q}wGM24M zr)PVzGY5BLNZz8nn5vIq_fOo_ zc*Y=jpC~B!%2>GA*>Ok5x@Dwy%FD~m%vKYU67KumbQc*eiCQger&&LGWmb;*=ssMd z!-QQjiHsg$$dXo7)NEgJC}_hQtvH(Z_Cj?||T|U^DJH303osg6? zI@bI9&XshLw&wBRl@z36DW@jioRv4;z(4E8UszM4xxe)=Mj9HM05NK*Dlh+a;LgbS zC7FMHFFtk4%KBjt-TM;v%cJos>$*C&89>2Y4ESD^9q=3=`XH8;U&6y_(lawDIg0%M)}0s_Za-j2&qSpJ>Y%LEQ75tVagu=GB9T=#B?j zsXJ4)6S@I`qT~yo?VADIdHnNZ&G70#O_H*S$;xjq6mQ)a)Ya3ozk<(|tFjU9iZtUy zn%UJPOHhZ_ML21vsrdPw+G<>D#KpX%dpUpAUZpEPh=(CNR-lDLyw+N;k~=a_#&6F~ zmEowjfTWzfIHaSemqBFdDRdh9T%8H)pxC#p3It|#3u$Jl)8!NjAKu@mN{=4Nht^?% z`4>rhyFKY!elVfbT5sEsV#M5@%I!Qi_;d@>XnNq`I$p@NdV|0o4-ZdHPEPbNKK^J^?@&Fg4A z5XRyC?2wZ#4F)3iDY@s5yXf+w={Vwg@9#cC9oDYEcNI2((oVj(S8dpQ)h{1e%j@na zlP90E>cG<<7Wv%*yI&rY0k1@3d3_s)fTa=!t0{ri6cmI8!bP*UZTf>V%so8ai>G#& zF%4v$cy+C;tcs+lu=|p@iV6$2J3^oR&PvPOQ4+Z=1e*VP1F-zig??-K=fQ-U-K6W5 zf2RGx$3dTcU1jjCy5a>Iq@aUGA9G>V&E8?fb{@H!t4#p!$R2y}>8 zS+EXmZR<)~j!j=*gtQxl5^h}?Y)`}uBq?PHIw23$=daxM;djhw&;`R$3px6;@&lNd zy9*9A`~KOkf)f#uiG)PUohuF!wGS4m6#h7Hyhi@r19xTS2FtgibdQc6!DaA*O{ps5 zIo(^LyY{>UZ8IV0gLeScblHsi(n1rr^^+tCxfr~g&+E59HKPL^E7EJZ$RD-DBH?qSVtTo!HZ>yA z5$-gK#;XK*HN{yFGtu%@MQ|D5Jja7z(g}WVV>MdFm-KP#PUqM)e4G>i#w>vcP+mGl zr0@{w`N>`dYFtC3H_%Rn1;Ka$g7 z>KrhKgGyFYTa#^P<0v|bSpeIy5uD)#a`ZU&GqT0tiKN{NcJa80iQ4jQZPYntbWfxO zQ&;(14yQWFI2yQC>I)kea+FKJgj!aJQ{wc;v%R#`5_Ns>4lgF!_Ihy|B*24J+0AVO zu-yH#=$ryIL;d~dC^BCwX5bz7MC>%(#jN7oMlZjOj*G3#>^HE1#K7jiyULS~&ZCXItKix30oNP~>R zAG)u|d@Ze2>v!vDbf6?2mu^n)D&ErGd$h0pgv8W_dgJ0njnk2MLw!VGej6Sj&9>JU zghc_fA)$~=|26jeD3M?)7c2kiPrtH9F*|QiRB8$-H6W73{^6x1MW+QPK|s3s3{Pzz zOVAa4=XbcU@-egr#hjHzqn*zPs2r?Zsy?xMT4yW4&aGDU$fr)*zQnV~E!xfy;S-%M zCuWiKuqGeFqr&xNs@)f)g=K<;DRJZp@aqlc9*X3l9~UU*aggwe(z3AfFZy`L@cRCm z1+iABGt8AVtaUUY3;;D; z6^?ET&RcG){)HuT@XNkFv2bSn$Hc$1HC4TLyI}Pn$;mNx4}84NZpu735ER5lXcxWB z4SWXxfoPi-XIm`d*4md3Z~hbV46m%M85uD=`5>f4!A)>T#S|GPv_2t{^vSQK9lq@H z{KbyG5e32dvXx2aN=`LV6&5&cWk;?9_ygv5THRI+cy$xIq2~#6wuJW zz1DKrm4v@jr~*2uEVRn^Y!+||es|9RF_2$s*4f|iC{#|C9b&nOi=*taE%;c#^}JqF zIf1fdH5TVLXV6>Js16%lYW)aco5MjSP+c|RlbqG1@XgU()mK~6mE1)#Vota97CZ#9 zOqb)667Jjyp(3x)I+(@k1Z6KTpA@M!TWX*Ubx!KpnY7QFJ;bBD^E&uDQ`J5o;({+1 z{tzTTj(V+CpoZ^wyf}HFZk^x$U{x$gv}NQl`E%PcnE6)p_wGIVUYD29FFmXov#SFM z*+0uo%~q@Eo&YlH-MQDe4|yVwzz}$TMuAn!3pqLM>5Wc2d+r8!2FFtjHHHLz9|;%E zW6SqNu6EzBQU|!6VgSW3g_(hY?h?rD!XFG4>S+a2UDc?Tfc&5NogpNfwQF zCf7JIE>L4zR7`Et4UT2cUN0%Q($Sd@#KTp7Eyiq`u9N~okad0IeG>J=s@_ zz;?pLZPWvF|2q7P9ykH^_nng19UXMNMTCudI{92|=ZdF>XWQ*m_qX6frC-2;Vr1=0 z>kOrMOT-H0a{IFOD;hO!;TziR7l)6$JW4zcdJ9^F7-H@c?8{N%_8HLDTfQ zJlumx0uA+t5C8fb-y256D8E(K#^+(p@7-OK)=|U)KPF=Ad9Etpcn;Y^hOXH&#smu?+aa&W_jz?64+4y1@tHIIYUoN42*7N4`o z93z2%6rO1|s~S15dWH()IUlY`k7T3Yn|-1>Hy97q{iD2vKO~8Tsq68fsO-H4Xl#S$ z{6Ypzzd*wkD`r3eIMhf5sPgiCjQUD<2hkA1t>-yFc^NzG3cQ~td+#YJQ88qo`4B$S zlS?Flj!N0-Up@GEvqMWGzFqH&t5Y!+)&rBU6Jj+^PL3TuyO~bU!>O3ygd{<>m&oZ$ z31-~}QsQ63D{`nISp@w2Ts$;;YxLT?qkXa@fa4XwmlKm(*GDm{@ffmU_k=tak$&{! zwmGH>#AGv@=CC^3M**$(F$nU8*O~Nc<{Cxr9;-UII_j0$RT>7CLqm@mKhfCam@A~R zmDIVc2LUYQ<zv)Q&S)42bJnQ;samO=|!j6H-lW;$jDVPHXll?b%o! zk!4n>0^(w`Q~M8tzJE=-?_XrAN*ZK0Ux7(k%0wgi)S`Wt-u2Rv0}%~B##MdBdrX=Wr}TiHJ3NW#s?v=sHu4; zVYg!8G(fUHs!j;WMj-Aunpf9yAJnXnp*mgvxq4<2H7Oh6a-Wh)m4 zmi|j3Z+GqNb7^#R&ky(Ye;Pl|SIGs^bhaSd2iU`5XZf%U;hWx%NQ@xHD@y`eYMR3{ zGe*@c+=JsRuSX{pjcm=0Eett%jEEhdf~Yzq%X^!foiE1>G-JF%z?+ z-7L5|d86Ra;^qwjsg~o=*Y3S-z|U{OX$H{^)@t&N^=aKBDibF*gzQrVJMJksLIp#t(kX8lsCaTggygvr&>MrE-MiB^nQ;Td?M2$WM1aDvZ<=GgQXgv!OloDMU0iTlPc3oYgawb&fEw|Uky9QsB#q5ANvFh zf4&*zpKaD^`;-e`qE*|9GihK|Q#+_{iqVtq$;JNtopD^k$U*{w&;RfiXlNp>9a_kh zb#c+k%CSaci8@|Bm#V!$t&E?Q773$!y!ECUFScE@j$P-;RAHk)!w2EGVECcCiru8N zE+U{HbTqihbFnicr?7)YH%JCdqNkVptFoW~;k4zwYR7bH*M5@xg0@dq#wcWlI)-25 zz>>54rR*ucjErN2c*dxuJHO2U<5uC<6nk!D(2~ednSdf?@V^0l}YdjOI_GDzu{V2Y@fvMGBVWa zC6;9#_WL%Sy_p*XJc*sFBkAUr`>MRDgQZzh?kqD#mJ%h7_&@?UGZ=_>w$9n!o3Vic z*PAe|XZ2tLnPPSy>r{K#ISO^Hsze*@c}ynP+l7sBht#J3!TWH7*@0PaSX8X9Kjd;3p$Kl@U=v=nDMC~D~@bkSabHka*b<5I?tV$Vd5NUvT@;- zXyiA0L24lmzD?5h<5BEo2V(^uBgJDTic)9o^y8>e+U!kkEBSBN8c~e}j4U$>%WtmK zniKy*fHO1s!fKtz^+ND z7PGSb2f-*kGQsf91&3VA4c?JAFeO*+vfZnr^>Qs;pEo6bHC=Cp+IL@>)7-~idHTee zTTfM0o7whjNa>Q@L__6Bx8l%PHX~!+JN=JZ1>N*(SsxH)=^)PomlfDjTv5Y-U3|Di zmT<+)ShQ9g2{Ho%V^MK=ISXzAPQ+W zOEwNuvual`k0ZHn)yBtaYIq4FrO7wgJV}IYn8FG$OwULr2RY01Eg_ppQw0-}CxSAC zOkvd27hoFK^ghpCmEZ7+`6AQH$N=}Qm$+ujsZ;$q|Djg)H=qHP!pUY!h(k^dxMKH2Asi7TWca(VxA^CcriRek!9ofdEr>~3!zB9Eos{Gsf) z>Yr$9vCxlUN0ddi^^cu?BTPkKO(^=N2p zO$e$|nqKwqh9#_P*3236*Ck_YtZy$OHlDa%s-Y5TRX2u(^f?y}T19*bRi{K`03-u5 zQ`A92xFIeIBjoWDqiQ&Id|yu?(CJZ9IP`DyqhuD2#8vT)Qg!7)=T%ijv}!;Rmj?eicw{f1YWo=H=ug9;0te1Qhs z2~7@IT;?FNk^;o`yt8>`~-(>oxa1j!J|vZJ!n zO3qqVZi`reqN5F_ftf^tw`j|4ZZJc36a*RZmtvZ>`7+UTih z>$MMB623X33k65Z5{}ImM#h^L^+8=NGgenf3r{TK>%(AMkxv)(oo>;wsxz`uRb>^J zSdZ391+%kT1`V20<#kWrb=sIE)c20tNQzIg^2L^~u5XtbuQnDx_63zoNPhjup%s0} z3Lx_z@fdjC{Lnol=Gb_#fdT|RCdTgG35l|@cB&gYsHUrJMsduxt&12d%2|R__%7>q z!mkd5uA!h%?zUI4^pKaba0iG>Ue9pg61j-{wSgo^jW@EP-Y4X%z0@Agk%coegz-4C zatQDl3DaUz&G*xOjSazikcdOrpS-kz{I$K|1?c~@5_e&@w?GpHUb(s?M@9R|=R#;N z-ZW~~+!@EU#g~+!miP>O8Ra1}a3nv#q!JnCY@hZ&3pansI%om|S7I`v z9Gd_Y;xS6z(I_f0<}aj~n=5gg|GfJR>=j`8u(HnA*6In^5!qa0u{<9MqEXN{ApIO% zCZgv9H0apW@VK%i7036*qQ^4w3Rv;|Bl&qor9@|b__%uW_RsMi1iA5mH z_Qq|4M1`%BIB9=$PD%~U7b(3f*0=OH#@F{1+KRYug~`XXKpoe+bs?}Z+P3=(xN$e7 zs)A*DDQH5M9Jt}Dj;-fUKe-2bdyxZzfINZ)0T-*#qOg4j09<62Sd(wwLxR;%t~r7! zoPW8okf?uX%D&kbVhFQr5!F(qtgxmCE2_$SkB&FDlP^9l)_+U~>ue^1rL4SgYTpd9 zkWJ&Eib%{2F-V8Xy^-a<|B||dN#CtQ(5Wb7+JXRoz(@v zreq3F($`3ijZoIo1HQ)3_YL2GG|NAu8xO4pnG@WOAz7$(x_4=4bdxJm39`AI_Qpq{ zK-+BMTTOvnk{i7oOhruoq47;3`VujXKwo8L8F81ZzU9O8MK%BlI%w3-L-tIV)+@3^ zm!Frox;oQE#{xOZ+-x4}sj~6Yi+rcUznY{a& z3VRE5MJIMD3U<-Sx-Kv1uouvQbtlbJz#PEj1`>7#ZPU zhtfUy5`wIswwRR8a_IXa#!p&xiJsrOTEOluqP*Cr(t5z*6#}Y_J;OJ^)z%8gU2)H9 z_i7&er*WXVoR#mjv9cr;VtO~3;pje7h~9dXl>j&Q4~(yK8vg*d;3u`c%*_HY2yAqd zl_!C+e)O?o3ZcXUnO2`J#_q|U;&@Kiw=6DCCjgxkP{Vz2(aTFZDdZMVns{pN0D66Uo06JPN}#CyNB z`e7J{79y)AVSO+*HA?&Rtk4@hJei4>;h1pgYX1}vN*wWtE!Av z+HTsq8&3*Vas{uv6yTFGK;FCgAqc>&5{NHqAEhCD5xPA1T!<--uikr>3=~N_7YFPG zYJ5NhnOQrar0;2#{K(8C{5C>B8zL8u=}4fF1=z>x^j*;i`GXs-=*D__WL| z!1p@w&f2E^?SZ5ye5+OKcydEV<^UKiRcWIsbfNRqS<+A^`Yk}Saa1^4TWgVn#yuc7 zu5uOttMRk-sT)5Yzy1&7dK+*TW{wlTS!yJuVOV{&`{nMe{BstHcwpe4m z&wy+1%?&|*rno@M&E)lc{L^^yvwBDpH|UOId4}-o$=2Bj_k}W+^ocZQydTqHD_XOdIKdVD~5aJv6aCRpF&b-`Gh-j^mX z#*z}4=5v;y_{T)Q__9|~*_mdls^bMdR)%CzJ>awfELlvfdhsL){kL&taMp;z&r119 zD>(}!DMJ$RSRo#i0j&{SudGof>fYYBu%Y~pdte@bG7;OiIjalhb(-o--T57}%Tp`< zH&ReoS0Fok_p)#D&p;C>Km}TNLh5t8u9~vV)Zl~T;XNo?z8CZq(1Gpq)f`N(cDp%b zf+TZ>K}$?9k0&gwF)zT(Zqf7Vyo|o9i-)Xn^*CwvqTs0zo?I ztc}5v;TY8lV$A8@;jI#?1753rW%c>pVc0}DT*ptid&L1rCUHzDOACmZ8XF<~z~H6` zz@~tsKNufQM20uwa|m%4Tfq`aVR%`WF}jfqrR>EUST{ZZrpjRTLjCHcBIT3O+A6 z$_yk37|;}Cy!Q3VN3{-RA`QSAu8QRmNSC%V^|M3iYo*to)-c8Y$`iEB9~uF<5?46; zJ-0A4gy;@HIFA2KbM-qe1}801l321irg_E#sH4+&o)JN!f0AKV;alZiiME@G=uaGv>&( zMwQanT9ICS!}G52d3;w6Yh?6NY#hzx%{=y(Na}zQ zacg}$eRSja`gAxnbNZ@r4v2!rb1}WoV7j}BAgKY0S@P>V@_RQR|CdMLzP8urXXaxy zvnZD3Wjvpw*>J4_CVqy&-qwomnHNAS3Kv9~&E`@qq}}j$047|Z!@T0EVhT%{9Gq~X zVzQY3utct$cHQt0v>2Sv-Ri8-*@mx2fWi?JE&5w$5uKX?k(Q9*?d!eY1ZtVNEaJc& zR9;{jK!@aJ?=WTh`MJMB<7s70ma~ytF(`U%Qp&$)>J*Hz7@EpiUjmL_Iah@}Ti(d> zspGMcv)n9jU&iM<8jQxOjv*&%s?th=&h*Ad#%L(Bku%=UJ+=?s%d#u4iH}+Wc=wr49Q#I;3OeIDziHPr4|Gze2phdY4ku< zeK8Vn7?u2WVVa+9jNI%rim$_kzxhz2-Vx9A1-D>L&OT0isAk_o&XB|FMlNvjGZi_A+$A>R*AFa+*yC-$` z2EJ>l{-*1RX|{?7{P!WpPE_fjCI^n*X>=|{Wg73j(G!poW@pP))4;wd1LkGigyXD8 zDE(I;o+zm;Y^}(P>R_4!s=_|)gB53R;LXcUo9q3$XHu{T+*V*Zdvn7Ww3Gop1UzSJ z>&-d_eQ-c+EqHJ+!E&?12)}E*$OC?CHKX9<`j(dwOFYDt!mwIp#r0+Go|YNejGqsY z7M1Gy^WT4#H=H*ORCDvOU4kuna^MHaQ)fsdAZ;NA^2bH@s;5!E&P=^wMSU78J)aQw z57$;)`gm9K0XNaS;fm|;ndXQqpWfl$qz(iO;~Le6wc4-m?Qv;Lj9FI#JNxxhc`Iiw zAmC1aGF}P5h9HXrFENpzB2|_vvG}C5#x+5dS18HS-dW-0-gp$?C)ih#d0o4QT4vtL z15BQ|e7MsnWH~o|cN=^HVE^m$9!J?B0Uq*;wV*QmLY*dPtbo0jU6BGiUr}*x?eV&E zFCf!dtO30vcgjz$l*b`N%xgi-=Dm_7M;`GtkUA7g!1;JF;Xu8^@9ycD+oSv`0K7nk zmX6uTT1|Q7RRDyCnF`d8nu)x*jM)U+8%L{o(xP61MZF5t2QvQa*Fu+9lgF376zeWu zn?Uc?c16~wYdcJyV@dUYtF&Kf8K794kv%%t14Jelv6H=vouf0DeApV}8nXF!$H6W3 zKGAGr@j80dTm)k5c(vT3LWopAS4HXg)cC=tTi)k-ZX+X7Tjnr6?uiGorx<>vmpr(w zA`e-AYcPN}9*!N3`YtGMdMx45Zk1&`&+8FO@#n zzqmmpr>(5a?$`z-1|OZrQP!-wCgzZWv3G#r(#J4A4Lh$@irLC0}d3rwbW9~-n@BK}-3NbYWsu{{Q3Wzr&W##$8S~_?f0w23~0t4IAzdjdAd;6S`j>N7)rk4&QlL~C4 zsq0(se|#3Q9aAB%g{I72EjN0pqJS@;A86o%*Pv?vGzhDnB1`-p#lQb=4jO5> zhr6+ELw)9g`(ljqmekdADv-)?|g}H7g?Sf0EP^3KR_iK+W+#8eeSIRx$Qe7>SH7( z4j+Wx{*7UGc{eTxk(}O}lDfRIrXp8iBWY(jXv{=#V z%EVt4fE{2)paS621cjfhJ>yaqx?HSVs~MK?tR}({xEtPSt^XX`p4&>@yc57Va3L^j z02yCd`AvXhU{`aq9xcmDI;QaW$34U)l{iTWK@Vw)C#?ii)tUbnd2bn3RrmD^Zxcln z2?3>~k(O=@6eN|D4(aYLZ$v;^N?MSR?oR3MPLb~JdI#41f8Ogk@AKvTaL#r3;3b>A z_gZVNIoBLx{9+hu@YXAZ)XZ(Qwu+?cFWDIZjFMA>o&oAhT-?<6iR8gTb*-1zCUt%9 zt{7?SJMQm?!Cu3ZBryEi>*h>Qh%At#0J^oR(ABi0MS~@rFmp#fynt~f0(t-7!7A^p&E2mwqUT-KXF|WI)7_&t;%Umn%jOLCta&(?xQxeA1a5kf0^Lca%&fff&iopw z_1mgiwIIhI>Ag8zjK=hS-}1_#nmX7NL-u9asnFC%O~&Arsho3-5SCZHo&C62M~1)j zK!4wsEjM|xMbIGB@LMt9AejBy8BCmJwU&tD%O~^pa z$Vi4Lbum((8!=UVajrc)JUR@-oj`dioLv1}ls8&k4Um4G8lJ3v>XWe;<^~}HT|Fuq z>d@rAJtgUvue;6vxqGg@&;qJSubN})X-X&?GYiWRV0$^VK8fiyB&uf9%3fyXri>3K z=diKnrm;%cUx0A&ydG52Or?*UKxSt9#jkKqHD#DdGPbI{zoaCwU}Ic9a%^(T)=vca zC}u7?uOLU{MVmx&Z>#A~Wu2aL;x9C#0Bc0v%d@eqE*rF2@-khqRX{E2mr0N$+%45+ z=Mnh;C*dL_MhAW;Fc3Xnqj-rP{Q z^T9ifo7l|iW7bgGF17Kk7x__9r2IiZ*q>yVG=w6Ef&3Mf4<+8Q6?r6s3V-iztoas$tgS3M-)`=H{m;|N@uqe4(m15Xk6 zo_*8uaB3*qy~ATGJ1cj*hU#h&)#^3N{iLY${M;|3pyhZ3RCEjLP+W)_;kp$b+=Y}3 z0LYGpZd7gpHh_DxlOghNSs_$;taiNzFF|OG7mf7}^1CM&LR)nCQV**W-W}RUB@gt} zrLqaNQ2les;geh|V2D6*f_bb6Nt5tEA6sN4j<&J($`VtFpEN`t=+{JmZo+iA&5ox| zsZv@=65QTy`kX6@aqfM6Nhu!rVJ2$`HhM?fa8U4YMvbpa80daWOM1cIE{4vfu(b5z zf!hje7!c!Rr;HhGSF2JW75M|ss0UN+EX2@9tRGCqy54&}2DbqTkg#iDh4L#7s&cJY z?@C(;IJoi*vS_V2Oz#^vb$awLj7VE%d^-3_S1(Ap)J&$m#)2RO3g`vd1>tP^R9?@t zJ5+d_AFS53i8EZP<%EK>uxQ8WkEOLF9>%wAY7Jzg(`Lp0oEluNRqujO3PS4LWiAIM z4jxKXgM+B%T7_ibkg!hhxny|V_(Gtot6T1{QjfdlG(FGo;qH*Eq^ydf1nen1TzY0I zRQIzBEv?KTEtE--f4O70m zCOnp_ImKa(LmR8tcMJN}v!BUp`9e+Kt#~Ml_ZtxFKtMkbXoMXaA|Q6JVa5c=Yd{NO zO;dTy51bWxtCc+y>+9Ql{W`VvP8?lYiy(Vmu{ECGI2-=1C0{nM~39x-LIk)4gXKj4n;%7 zErAt@IdQF{sOHihePN&5e=1roXsWRNYzjyCMjan$nc01t#GwnFTBaP@pB5tbuti)Z zKW3rEVqwXzjVu=Ahio$hE0mK%aJh5{i=4DmJU3@$zbVG=)Itnkcu}wQO_L*sfr`_D zLtqnaz@ro7O%N4A@=6`@eB`xPZqAl{g+FYZ1sck=wVj`suz@1n+^qS_9l^T47u*qc zO}8xGR!t{cwm7k428KsReGFfiUECf`1Zo(4eq|}drBf|&&g^*R#N_?;@}o|-RggeA z#Ew@0+jo}Bm*nKmwz_118?C(ORLPHb-Cj}-!Yu;IY|8*TDj`>mRxXsr`!nUyF~2|M zXO&UsBh2jejs&c1_RG#_V*ZNkvBo)BlT1Y(&;CJ0!ZRZD>q(-~MXKZ8yCeIshWrJW zeWTf}Nfvhdib#25u+isBfz|7dUt$mI4Js4BKj4e{07(wIWaE1`KYUKB#DT2tQN=v1 zx*t;y=THgxpoRnpRkUJhYnJHCOmTAv?E$Sz-FShWsfdky<*n2B3Ux=`sym_hxX6pj zXp0e(i-DD>MZ-W_a6X>U2H9yuNSHk}nQP08jcp0f*7lTR4Esb>g)<}9d~w2akA#6G zBo*XePXU*%?ujqwMR`%lmNDBPU-XfHjBqqvJ)?t;U1RW7ne7N;?Y4Y7X$eGl0EHD; zGp(&1A2qQ*QekFg)nmSw_D8vakdl^pI%fuL)y{P@XXep>W(6CwoTNCkJwIqck7RT772J*zK_zV4^+@beatBn zfo1)pK=B7d_sT)v6QtjlToMuvHU;LWFq;E1Zdu$ANA`wg zkE7N~&P^mkm(Z6YVxRE=%vMF%IhvzLUBeNuD00Tdai_+>7oPXQQ?xih9hC4T2r&=tKX!hUYH9f2v|7;u^Ugt zXmd$s)-|j1oh}O*-55*1Q<*L!PgNdO#6(Mb>`5zrCET(|u4aYDnNLA)VFe9uFerj7 zaLmKDykuc4czdXTI+)ng`KxQ9VmMJ?)Y+of`JL%{+MGHCYD5)Sn*HAd`sX( z8(_gyXECA796ekAAs%zh(E0vMr|Ddn__2935{9vEy!*r{bNcQRgX3i%G;)o}%KZn< zAES;azbG2rWuHqvYPd8@Fl>Kc?57;@@rN}{bTHk z6w2G9Q|2RnzRO}xqZeWu%$-#hoYz#S?{rTQI)*4(9S$B`SJ0B*eVs<9zoso~rf_-O zFT=qo#hoZuzm+{}B_b`Jr>$MH$86kjS0#ws0AJC2#~w3;1X>NB)&mzs$Z3px_VLVY|pv6SF% z8Nuc@&hzzw-_UMM?LMKepYxjp>$5`d-|Q0?6&7%xI2O5#g@3illhbj%Z0@+DOyvnr zw7@oZOkVkrCdUEh&_Lk`CDt?D z4B3g4G2&0V)$|*LXxDkLG9yCvPTj&=WaF8z?Y3VUyiqi8xN)6=zmmD{i(>%RDRoR( z!OF=Embpy9B)YOS+K2~UbN7h()#j@ogi4CF+~M>S^_vhqH{fGpoVE}V%1#TKuTWmF zmp*!?U1~B_9{LJ?0=*M4mFCDb{t(Au-e-F@qUg$!g6%C}T#KO+8mu=e4!R zIG;gPnZf|SfZP{qMtb@0JLY_D+Av##DFd>vLKZbehKRPsoN6T_nV!kdv0lr|X_Zjl|MAg`*v!=Ib&k$1P-uC$A>p&! z+;)TP1yD#ZodCt!x6!+Rg1#UAUkjj6+Xu!K{{{|p54S#u*Aj{9UGn7wmI1^!qAyyYwo4pfQXll< zRS)sL1HlCpG?mEo|{8@(&H%Uls7P7=~Yv%~#=eXqaF-4z$U* z#VRF}s*n4jUEA&j-rcT;!aKYEoj#1Z+1_PGtQUVeMpa##-#N!yYm>KtF$|KlTVIxi znyElHmM2f^*8rjfu=Joguu9AgoCf&Na~^;hu%Vg8#snRQ5aqF^g8&!|(UMS9b=WVG zOc|3V0M$l^Ib1v1F0y0p3eq8PyV~oO4W%jL=0vc=kiiF_HN7LU((G`Q49YzmOcanh zf=tMv?9Co%YJjSED6zTSgaLKF`p9w}D$I7z@YX3){Ix4l_}jz&-yf+!ZYVrpBqcq- z7`&_)1l)gsaEuc)NRzb0Vi>!|`aGeQhI|DaK|BCp;=}s@tiN+j+S4~Z!A0?oNWNP( zqX7TUZdS*2(^m}BV_w5*_xkY@M%u$?e?@N`35j6pQc+GOtT-gUT7XX?@X(ayWlX2E z76F*CH(Oz>N$2wagBLvjUetuOh^V0>0Cb>~i<=W;lS(YgbC_e}do3SMugoA!$?!r@ zIkKe3|3Z&6QE2f=CiP^pvnk@iLaWcG5=Q_mX*)7<^RtPG!Ryd`@9pR@ zF$~(Xu2-$v_W^SdOzcC?jzxtVqATXye&ZjT9wj>GYVZT?m69WEK>UdMjE^P1 z2K54?UaqzO?WXWxAO4jZAxSk^id-&-r^Ux~+RyjOQ%=;?`b>-OCAK!}-3`#lit5<@ zcwEaj&iycJ@n@ic6Iga}0l-_J`Q4m+V>3u!gZ(38Lg+=CUW}TkC!X`9P<6nRc1OLk zqs2FxnlN})u)g#DEx6z+%*sns&r3@E;#!^`tm&X`79oR+H?NPg|6hS3OkNP~1$OK~ zhE`--qo<{)-9ct=SM*xJ%eewa+lUSsC}V1_QOdYK=2h3`Rb#>ffO@8<(zMG@tOh=V zX*rx6EIlnfw50Q^K{N>x&-i~qLnjM+9gLF$gAWP7AmA_B0!*cG>+eNSkItvYX8LkK z5Qe!6MslGU%BZ*2O)avL9t#!)94`ZU`?47_anLOmIB$#<59;&2;T}P1MgixW`9a+T zp`C7X@q14u!+J=pS9ne6c2o9r_PU}o_-kiOWazvIut3aVE3;O<@tdT3Bcz`6SFeTmzDQ8#t%#E*a>(n^dlU^(YC zBTZ>TYoC?P7Z3eDa{=h&{*(expvSz>4NJKW#4RMDWG%ioLl!f2CSz zpEhd)XG(3X4_6(jXP18H>Gs<>(>2`GLCb7zg3KiJciWFFzAiH0D9%OLdYfl0bc?zfjE11v{3Wk&(1M^Aq0}zmz5=(t#*jz8rY%@7BKqM2#%hcfY2&Q`Pxg zj3X@Ic~aErQnXYEju!;0_`7lmP&d4&dk$L+qpIBbv_97sUUE`70a|9QY9X-vI->1@ zaRbsp_YcN&nE@ptaBKT^p#sDN!`7uSezHd;a1y|g2f>CErF8s&wG$mYS7)~cbxjxU z6AHMK3wyQn^HzL%LFM@q7ODg)YCoOdIRYQDc z&hzK|A2YV{?Jqw;Lz4~)ktk9~VoRmc;MbsINQh-xPr23noik>E%&01o#oYa{>*pQd zRz-GuA|pM;&^+E=;N{$u8Oqk**W!A~fouBHe`-VgHvkhIxAuMXh+n#I%-2s}nv#kt zl{!`LQ$G1xWq*8Z^3ZLW-NNa|tQu9zhK2?PX(;q^{4}ho%Hv-pk^!WF=ctU^rmLTo zRYjJH;lfAvQge6DX<)KshyH+@IOtTKR|fWiR?1!7{r$tEA{I$;5?^%Ryw9il@8Kg% zg>Y-RYhKS*3N^J$%73F=_HuG^V7~!!q|lDKQN`N1n=|Xd-J2kR$k*Td)^03)t7Pi} z%FhvWbN9O07nG2|+U}i$<6ycnqecFJn0WPQ&A{B@pCFIp2H%AZAyYyBl?%F}icf|m z3VmB-nvLd~JR)8Hd7o2oDRc2NO2$@Q^r{wVFokx1Yt)ClpRE*}#(KjGLS)>)->Ipb zR(;i37SJ0j4x-`leomgJsr|<}bdJv0r#*U>dOr2Pv63>F&%nz-Qrx_c%cu(Y2emj= z5O9$i)3OIA(<^9-2bsvLR!D${Gq=I%o=ff!osdv0|3xi?8svE=V|xP9diiiXufV6! zy#hQ|!qQ6So(#V!`6bOvs!?c_j{%`x>Eo>^!%rKvtrx^Eb_V00lK0PqeYGD^ zD-4br&u5Ehw!*gT2ur{m}@s7T%z#j9A=5FY;Tu+yjLv zR%35n9~QM+N24#AE_~Ro9VhV>r`tit_0`uyN3CUUb09_@S^5>xUO8_&@)~GsHL4}M zgFrB-`^aYTkW+lTvvzKa-xbHFaWuSU_VS_M3L_Q%lbT_FUJ!=ZMUB^t)%6Xj6|Ftv z2Wb^(PqeJF?Gf%Z0QaV+f4J4a@>i?uH}GHeD3&GXdK4M-R}nI)GJBYtjAQLHrX!jE z42Q)v%#gcZcCXRZ@8RSS0y*QYz4GsBAa{8mwOqYgS#Qlv={daLJ?(VWYW&UdP`9^t zZssX9(_n_^rNkF1ND zCKV>=C|jt`5&9^kC^=8s*0wgG7HUjn%_GkeK=Sj-s=K?ZMS2G`a;3oyy z#*}*z;VU9(N3X@$$o6tVcW<-4yUN+!(^mIzLrV|TS`#%wu`fSMB|`z`rod03$by3w zxGk#{5`yuh<-wIYtXX0tPo=rQuwY84igTv*z5XduxrU6>GMTkBb5m5bZK~$t%@+yJ zd-{1NEX;!P@^t|TFDoCtiPWbD0I{jNQKr8%7@(Elx$2S?Ftgl&l%-eCN9}<)F~E{{ zhcEa1QOkgBan>S>#RVMZ;GFE(9>Qk|l`RiFZ59tUPDA1Yg2^=d{3rQ*syjuUCyu4! z`PI8+LZU(~nCJ_ApI`Bv*bb$2l}8C!Vc1J(6L<5}-`e_U6cU_&J<>I)EYGrKly}zt zBZu?&VePP>=erRa(H7wnIhm${ofx!}SuRI9wZ_z8C8S&R-#Jxq_G?R>J);-Z>ZSWM zMqAI{*MjBVs&G6jZ2sOjS@h>UA!?X1ZE(K=rfqeR<-N8+k^dr0VhdwP(?(=$OYA*P&7 z&X_P8>U_YN*BLojn0TlM?7g)^%_AWI2b&9s)2L_;H8F4#gd7j&TF4!oZ06~|++UjU zdB6ITt56JE9Jqd}_=xLGL_!6*Iu5-?m?=QVJJcR<5a9vz5o>$-=SH^A$k60IMF7V} z?zPm%@PIc(DTxKAdJct-j0}Ha9;hV9=x5YVNG10^wWqiFbZ%};(zJYbV6ltnSlBG; zBsnau6tFgBl|`>`FyG%?HGV@_ZmGb$gDs1Gx*xLMfFz$*EKs}IhQ`zT!NcQ8w9QFR zyTGNGeG3Kba>hOJ%ntn5()O!WyalN(Q?5%6(JTAG^xi5%_EG0I2;nVn^ZJppTGvkP zNOxTK#?(L0hlYZFdTDc4(14RFKpX1d_5kQc^1={!BY9FDSUrE6r{VCrGpKRhX$(Ne z1sS1wP=>$1dIVrWYXlE)&rCpg4j~xO?g9eqO*!xq*KaP#ZkNBtz`*b^#D z!EYUKCr0C=s6B<6IE18d5U}GI?__suCdF`CyCK8D=^f5%`J<@^`}82@3>+h-e=Pfl zssw!unVFmFQ@R4=O$e`z6E%SBEq)D^ii)nIvDw_(oQ{fa$s{qUw8b~-sQSsVQY1d; z%_MqqL7jPHR&36s1ZRQl_9q%8<~eXiIJ0S3{Y8`n9|li0Nw8O5lA$#BtSp=!n*u*3Ke#A)VUfV%dtG6q2Ib@jS9B6g+gB0^wQ?P1yOqT)a7X88I0}B zgrp-lueUQ=a3$Fq)_+Bgt z&M1PJR|6cb$CQY{fG*!?2-RI3h=3pp6c`#wQ`yl`d!;*<#mG$q`sF>qn}F*G!u#rf zP6(CQTe{;$eDt|TBhU817~F!2m6mBwM(Ap2%LZszXBQfxRt1K&U^fuDM1EZO*~1x#Io? zXxh17G{9G6OfeFLqHMpE!in3ClZlI-hiL#<;C0d z--BEN0Njop&%#bPa19Dr*|3uWGSCnR3vi74nU|ao)&E;NqWhiU@;}Fbt^LOB#{R-~ zUV8ERfJG_&XH&HQFUza{+V<=(w1)HCuG}d;6(lSW^(=iG{a3Sadx`Ngzut93CFgtZ zoD<_LwVQN*+P)!B?$%7qg%RFQ-a`Kb&dLN4W=CNA44~sv|C;A;D@4TS*s3B=ZL!({9jcw=o#pl1`YaJ(0*rI z9XDpB-O8k2A!&(i``geCf%1VUxfjgP&l~V9dFFAJD`(;^q+CFt+$0az$Q=^VfR=cl zLZ3#<{(GdN-4_Yx;<*<|nm>fV4E{a@$ur=vlP;4=7Dv?aAK(4bJzgTUPxs*zE`s19 z-Ys{J_avGBUYE&AgA{S{WfymWxiT}s51M`!rN3vmQ}D2H3P8A@$}JEG0s`oVE%ya1s<@>5ElQeXmoyr!fCr_m{q*q}G0d zZ)^ndHqo(TBmTnd?++ZyKXm5PRwKQQ;2wRyu41%)^t0xPKzw#)2uDYJ=5js0>G=n7 zRsDWj@gf3c()h{WuRhV*kKllFcOg0s_W1ig7k{aX2!{WB3LPi@Z}H%;j;BUcq9qB> zr;`e$+{@UkSy@<{WbUwxANg%Sh(Od_C81WkRvXgu8GF?QNtkQGYMi&s!p{M#4tI~!=| zDh}F_?Dq%{=WDD(=zLeF6e@|`y5r_l4kfi@jNGU6W$B$y@?|xG`(g~vjP>k7WKu^q zN1-e;|wK*Oj3-*igMPu>4|*=3H~s>f*Vb2oy^Ud< zn0VbBcx&e-mKK`MJ8&xR(Nm8KsqSoNP8&@)o?-P-meLovoBe*|*)cb_TRGT#O>(!l zu~`SzpIzL*($XTXSae0!%2X7DU9R+5X^OjJdkr9MI?M7^Mp?%PimGBHK9mm#6tz#R z&ZA;7N45s)JIp)RUf?E6JSOC~a@Y%IKV%QCXi3=e>!wtbt+SG)`!&Q>JWdtkF^L=73=Y{DtEi} zm+Npm8x|Hu_lb?9al+jL6r(`q7ASenaPij{k=(z6XUFTJ>{)uW8PmkiZ!At)!j{U3q=X9V&sITzS##*;j_N>*iOhFmFi_WG3MAJnTiSf38`rtGA_y|1^QfG-4|_ag z+DS)?9-w>H``u7J+0XfUS5jo(VfJ#cyyH}T`j>_`(3>DQSk}g_C{wXQr){yJHUw+s z#*p>*#zK2kEFmv)0Wk-sElJKOBL-UFEtYK`RVtRCPi&P$LUF|rczO?J745WGC9@l3 z>GHGdYsZm3)TnWB_zJyf0pGda7F-(IRw|9YIr@6A^I~IV$$d=&MffXQTHZIVGoToN zWt=w%#Ho{j`eag!2U{W-rb4R}qq83hZcuY}>+Y6SwvX`ulRZY##!!zu`IS0qHaewp zWgshOH`X<&fdXh;ncxyG%{fygP@C?aOkU6*xmgWp++8m^R{X-uSR+>Em}WO-y1?Z} z2`jEo4?qo52*m_R&xI68N9(>JJ;1M*eQZIGN6n0rrqF^jk%IWqC`YShO zuV`GcpbDORG|^>e-zb3OXIy@o5*BtBt*^KDWTkCSJepI0KO>|4Sshsi_t}TS z!Z{N*8F{JALbmZr2NvF{4(-KhkDSd9dwl!T{VRC{!#8R^)Xc2gazj1wtO3KVTO#)5 zIq_LnT56zoW!(PtmwtW`6tcKqn9)Rc95@`ysNVn&E3s#q=EBsWHQ9N2_hkaXF~*ET zix|!8nFIKRVIt2!8|ED_FVAPAR=RYiPNE z)lko_;Q1F@&@_hWIWq<@^!&uab|m)oLz@I&y2gcSX*>F; z_6$<%wEkaD36jYwzlL$ADf*ryo@(`Mi5|wY-G09I=u&o5hrz_8fox8nNBTxar`x%i zAB_@dRC3n)WR*?)y}_Run_g+;u-?A3Vh9^^x&uXbdP|h|bbah#ON;RhZ4YD4*&J$S z6en(})=tiP*6ng-=jEgx3h}5L*RLN}R-Qee>=XxEy1EIQolN7K`e36p6I!}EDk65$ zQBW946W*bfdnf&a_31_ok3FizfHDrbUOsu|=T9DB!~+8lwA`R`0(xK#Io}9?gD8zg}}R|osy>w?TQs4 zukX`7piRYDSr}drSpcmLKQqwxOo>pBs>tH+uYW(Lja0t5(y;MOa<=D-`x;rWLAbFn z;g!y?CJXYQ)5+)J5!)O*2I7y9$a}($&O{35Ey8_$!P!Vi=Gg|%8aH`tO1@NnNJ>cO zLzVDvOvqumG7%cIAV+HX!A z%2h^lx*jur@EA1Mg*V4InX#ZLRp3o@cRvB`R)sKD2U{!h-lhdADk?|-9?(K(!JWtP>m({uYpyH@#{sCUzyYj&RIUfk z1MRw~n-^C5LTot$-o1Mfs019rvM+1Zv(M{U$X9@#*{s=h-cHDYm!6y1-(zvcM<`f6 zsYjO?I92M|?0BqbV6r^3pfz!#egz1^o`v1u(*I8}6F)JLc~ktlDi~D`mmF?#_B*C&&GB2O`elPvi8nId)exIgCMYV*9Qs@v3&K)kTyN%!B?_wb0$ z{es`e&&e6}jOj@UUyQK2))2CJIDPEh0_Gj$gxmPR5?vZ*gwX`Q2ypXnGdpl^`OR5wYM^IY+Kp(36rvQ*Mwq}bov?+jL%0?wRlKtl$Q@nNWSf9GJl zR0@!3{PuL10(9N2)6;1PO#(HgS)f;Uo0b4rB_b1=Sg6IHKd|E{h8+SbnpS_L(=!St zB+w`B@iOERPH*!tSw#pC3h>Rxf0VMw{@_7b>!amO{IPR@k(fg%s0ZCtMcAK-&*PBc z!%r3v;BmQGF0@_H1mG>NO|&Aft)8Hat9dJ3uC>$C|~v zZg?1`UQXI_d%N*2bXMU#+9wP^!x%IukVtmeib6$EBa1OsH#QzQ*gPzGueL&%xT*Yx zJ9;RK;BA*HHr9_ywUakde$Yo__gETwap{N;_=u-vF|d)G8`JNqkwMcpHv7fxEHdz6ICg?N6h8n~EMqB&D z-#ERdDm-}IC5^*cMdDzW-_HIAJ$+?UVp9A00If~iZnL1Fwc&^op&q}X6HnNK<(evh zf!22I^KD|{!$T_-VM?L+6MlZ?i90n^KZ9)5tbOir9dT>o5|fRTyst$k&^~)yOzg$h zy_JR8{)c4H5R2*Dx!IVM45JGtvnx()s>`FXtxb*;Jt0v`s?kYq2X-Wiq(o(`b&KqBVC%klH3TMm%hDmW>oV__d1A=sf={=}VAztu5mSP%K${f(v zn9FyTouGSCh_3Jn`3h+Fvc6ixj|iMUA7KoC%q;YG$L(94b%hT@JM*qapxf4Dhzhib z_rzM2wmC4mDc?+xeYzHLQqJesANhrKtaCI^&}q;8#nXx*Ym8zivEyB5PCDHu*kSeI z-4gV~#G7$_+{AH`byB^CYyoAS)1su*&|OhSjI+su^rD@4_sfA_Ew%}S&;siZ4(znG zW-2zE2^Gc5PPi0%59lR127Vg~^7#-#GWgOXC2I1S(0nOvnH!29JqzuTUOaX*TCcgZ(+UTs1;`WP;~n?A=KA(R zFk!^q{h~xfJMP6&T&<*hP+F5l9;WizVPny<#xz*Sk#;iViLLchg>vPIL_-qd$#PU& zCJz83`qu%6tOX38-{NpI0W3Yq>v|Wqp_grgcr;-^|8=*2EK>>cbMrGYl8;u8-heoN zUGp>rWG39#hV9SZeIq_p=j53&-<#2LW}NJt^|Ee|x+M*p-JX+V!1r%Rn|*oWV*Giv zTcM8wgv;l}<@nPr;$58hN>WobG%5?ngjv(gVx8Mdj3w{?49HwAY?)ZwJ8ktSs5q}} z_U*UGPK{KuXzy-M@N5OCxf@*)XB!}+j^5tgHSlMqT;pK zP6E5K{UhHQGmbDh4oH(~kSYkWwRbx)u8Dxb5A@)GxL3t~=_y_47patEhcct2v8dwY zfu&4}gkzI=3UekV)`-6Dl$Yl`ItDWd9P?@&R9W7b0hs9n0$idZ`%47Q%%qLhO>skD z)!oNpS!zWHZtfQ`c+J(oGO1%HIn#LXu-|>f4<7j3;GOeb0UUAjZN9aR*`tDx=FDb$ z8<=4Pvr$&(IVXOoC|`Sm4x4)Jd_-6!X|Q*a1N^6!*sj;LBIc>$)R`die3b@gE) z;~KVR_bXyzd5raz$9t-F;T|5lb9)QqTlfo8#e%<|HjP=Y)1KX!CE#7(VZ!ZaQt0&H zk6Vx=;e!xZyr5hkGMa;r(th+O4P0QHNfm_U_U^LN=Gq>FZx(YRJyrU7a|0?)+jFvz z=x;UAQ!+DQ@Kl&BjA!!MYfahFg+Arq-7Q!vI|ywEq3h1m6&7kCP$*d!cXoJ1Q!xLs zQTxo+d1tBx(%AI$^sTwJ!)3lTq|5uA3=zy~qi@T(*m%m@g$ztd7Gz%N4#7%UInc9L zty(wYzr_>5XFnYIwFxY7p2RXwZVm9B>Oe>^sYKv6$JQ@wpxZUrt4MDQjgf$f1vIFL zF-5#E3~uT1*s}Fbk4`$H(S-a>-a#;QxivMsxi`d-M8s z1Q*Du>Pk$Rb_|wTPHtxe?djKa^R=eC6IGubucCJBckL#RMO!8OlgPe(L`!UU%DB>6 zc9eT)?U*9bb!W5vKCwJH1ju_%PSxVf`Q9En9cJ=t!+|jaN?Vg6AMv((Z(Rq0JHyJKhJT1WaV>DxKexeC0kdjEa9l3Br>HNOgI`y z0`|`2%&f`x2i$tiqd9b0%XOddZ#z;&edOD(`0dh!z_|;#Jlh9RailSP#REsqlMd;R z)`R5&0zQs~ncs^gB_hT*tayZa z#H#xHjMh7=py&JLR-4{5B4SQC{@m%Oa+{a!?X1yn>cn0`_e;7IY%Lo%8XzW+qmz88 z&4fZYE{>Q}{&7-Xp0+_lY7X+klbI8D8lCt0rJ?tP1SwjM#Itk0_`iy8jm@;==NBF= zQSLSS$P^IiF|R}37BeV0bA@Qz6f51}z8qP3=wsvE9Fn`gPo>KE0)&|tAG2aokPxTn zBz)+Pr8(HdPmxBqf22l+CMqwO9I9iAbqB2}FYk-DRnU%ElYP^+visBacq2j?DqT$w zmI+^+uc9pRceHkFJy=!3rn@UF+`T~AHE^&!TN`b)KomzNsdFlB@nOCkH?P<0iOBn$ zH^1&p$<)H6(_t+tiP8;(jUYVmQQnFt`p;yoIPO`puC(Cp&+zt8TC?D9B_v&$c3S&R zdVy!|d1$Cr>Biop#swXGG_>nF@6Flf2D=!|iHY^%BxlO+%!@dyHNWZ-4t39n{(X&z z`VR?&RnJ=xQBn`)Ckhv=ZYEp0(tCS+6}7g`(?VLD@Uf8mc|zX@o;&-&)YV}YrKOrJ z`ciAPekDbU>X!B3dt=1*<2RBmE_|(sr*W~d-)d{$HB4o%-hD*mCaR~;QZ@N_Fo_(9Dp%cx}~9ljQn)x==L9=;Zfz?Q+eMqbcE_PXR?1W5KLBFTQWpd z=U+>$PQDfQTcE`*SLW+xxro4d$QhMUV_f$w=$EPO_p_nWJqEV+L*?(4L-|qc?LLYL z{J7B6>pD&&x~f1e?blBV#VgkgOVJIl)6uRJ#8oV3d>xuXj`NqzMU{<&&@kcKMMok<5()$gr~R8|@A4iG*yS5-ZFITZ{a z_;m-@lR0uXQR=BKd|%EhDftlg=Z=*p@kXNFI{%60v^7Iq3PG6c+HN*c^l9B@`g0fT zo;#8o`^cK%rAN!HSbjzz`gJ4b+7OLpeESi>+aOZ!e@FA-UXK zt@Xrc@FSea(Y(PWaktTCCQ(`7=C2D;#g{~)RygCfT|(Dlt;)<)RaG@LXKDi*kgi^3Gaia_vY17APJ2Ps zQF7wY@kgy^Pm;Ltc-g_Id582wD#mi2P_^psY5a)7Vk$UYb~P;{BO{X2cD6N)NxMBS z@wIvenMDoaRk`w-C8g>mJs-Xl(a4gSD_kOX_XWxR-lES>mu<+nUMEw2;FL)AeWf0c zj_8l-f31<~DLHX6rngGj8So#6nveZ`3iTwi%%GVJ~*SL_I?&4HpT>CrP zmmfK*8{d>uXC*@zFqGH^y+xHwVfX_%-o~+N{FsNjg;R9d#j?SIKS#JGi0a>uwpw`S zr26dvV4AN*i}QDwO0K;LLLgReZvQdYJ0ElYzmo{IM&7sIgnr!!_;>!sfBvb$<^Qlh zMcDnZEOhbF-8sLXn8Xq=m((sG*8Qf~iSOZjzlv1CyMgBVXSH15DlbX7C3z8HfLNrb zOHFNFTnu}A!KmlE>RyK1pR0GNisUe#-Chh+cE5=Dtk53$N-T<_T-QYPGgmH6wl>-2 z??`<6-`b-97Z@ovpF?$8cU4CEC0Y8+Ci76@IzrpMCskXf!y=)0D$De$g%LEhwjMQg zKRh$8IGC+*N8RJ(|8)-xg?eDA!_QQMc#3l+m4%#(?6*iE1pCNkuFB29>^IcZ)HrGH z5MM{q3O>L5xj#Hok&F1&EJ>YYkN*ncl3$={t{Ned;u`$=>aXfMqBFTCHMRWKMZ_c2 z%j1XtPGCu!JvKyS)J5Zyzmu#pfUaEgB0^Q^TEWfQzw1PRAsrpjuJx_p?SDTzTzn~! zwSc8DL_NrQ`|r>q5S`e6zZkC0@xuQee^Qgo{JZgl$jFEh+8^(4U{Hufah8{qq)-ZF zKD~@sy3}Cm>guXcU@%-@x#WgzFj8jgh;k{z@8$39F47P_$+zG5CI9e}*TnH|chtmL zQ&$Y{_Bu&wsu*VhnTr;V4I|notj6zzsvI_w_>CRqswmnX zBw(vpIAywOA=%4R+sU+1u+xtCY@&2iAv;rrUbH-Kw%r0-GAg>{U0J>GA+YWMG)UkI!J`BgNI@kQI6 zsZ5;I#B6iYAhKd3?b}0Q8}Bp_@SV=T5r@}8U|yWW%gZfV-tNy8O8CC&<+lB#YAv2!y9?hS0c5xWsMw9`ValEvlPlo8-1 zmr*QKQsMu-==FO+AvX@++_6&g2X?}8qz}p)=s0(AUG5DSS%(PFxiSaPT`RWr7CxV3 zRqyk2%t~aOzwbI*6>A|Qs*;51is$z?D2~j{$gWbys;oYYVBr>H3KmOHWU`?<&7y=6RhK3FCoV2(Ufey6}<5y zT(f*6qMC(%IHJzVN;@5*@YHzGUnYCn`g$%Un>Y)jTr`m`C)4O@%c|l>M)&X<*S=$Z zA@rI44^uDocqv{cYHT2fciLpLkB4tHthL`@R#JDrmr!j|TrM9Lqu74kZTqx-Xz_qY z_d!^yXnsA;)8tE<0gMkTnO@+3?b^%6DGL{0zZ?FbkOWtXfa7wq!~_2oDuJk)lwlsZfeX)n5WlFaIh~b3el;uA zwQ?~4hfw!~2y^VQMSNUziBl1!E-%@KT8w3##PpXIpLCL|eM{SFS#t4*a;1gqE_&Kq z+=x8b7vbW5JC&qY9ADF{U#`ddFepT;S;+n+OCxvA4T=05erCKnbd&Rw8f=6I(n;VK zz79l)nWy)sA@)&!72i0f2Y%^&*Ji#ctlk3HF(q^8_v~}UzeWM27f_+MV?q} z!M>7ZiITfOnu(ftkoL%uCPzLX?^1HgywzLeV4=>YVN9X6xyz_t-@GuMmMp|zTEK|t zCM%E>o*x(CsAg%o6YEa6@bkC~>_xhkmiYs*3=k$4PM@v~QU_{bQr)SMFKLd)yR+Vs+9=MrB8tvsJcxn$T%?j7*; zkwhKCwbvX}uNa4;{fFG-LSi=@jj&xQFph5Zn!R|yQ(Li1vCo}z4V^HCXpHsq`4QkJ z>)ZL1yH(m7Izc9JM9ZvCmB&Q%QmcIiUAKnQ=)hl%EYIk(VRn$NVfEKl6^=~Y=s@8f zQt$e{$&Pue$A#Cm%5yYOJobv++kyuQi#f6mh6!g`W|SI9w(f68jyX)-{&g6{_63+K ze4e!!FIG$<4&NK`expGS$$g_detn9MA-SB1G@*KEmqC$-FNB4V%8UQX$L>NF-ZXkr z^`3fqP4#qtwuxk(A)B}2Z*1sB2|QEh8q~uyj8x21v~(OY8x{Y-%AE{~=c$~)-uz0P znZ)!l)p#qgh2yP{FxzG$x8U)#nb;>^(@0kAl!Mw~%s-5udB*eUdp%*cuZz-GABE3N z0x4+IJBUE9vdnGUxNQ~7Nho>74Q0aVo0zFZ;el^(fYd82-I5L7==HAhdLC*whjdnX zgE9{-O3{OGt)mvfl&?W`3$KqfdAXXKFR&)2$8!2ku^aKqGb#QPL*?aTJ*L6a#)smw z)S)|Fp_=Hk?_GbaE>-e1q0_MVjjI-tn}(^)}}<1i6z% zhhs;@>F^`5e9f)Ic5$=i!R+ARJFS26BzPkI{~O1$+s~YCeAyc-w;3oDuEtdvS<1S+ zwl~3gn&4XKY&J2vGw{q)rz7U{8_xun!^D^u;U`DqOxJ>2k&xlsICG*5J3Q%R>98MJ zt6b`8@I6@<*lTs(@2@jHDU74!-|n$rJMtMIeKNqkrZ7>Qi+jOlTGsD$ckNhhK78)Z z+02Ap6i${E>(1YJzVjIg{oT`YDYI#^8f;~gZ{NSW#8HYz+oYIHbQqsZC!MYct1MW~ zbE4ZXh;BI_$;MLC51cqc%Je4Yc>7p@?=*z1@^q-9Z1rwe5@GR@JK?GON%<{fgp=Kx zqw{*hTE)^TVdZLH=JA9xm*Y@EXjTmGW~#v1zLN8Sipo*_T)cSYbRsTSap&UV;;D0M z+wl&P=X{B_A{RTuI%JiOi@P18yUET@GseenW1(Z z(#~ICg%QJ`tdx9{Ls$&pfeqX4$`;=$DJcK-60uF9;`}5b%5Y0m_-hJfr zKXcxj&kG;7q0N1MoXgkBBx~&8HdJyGsQJYy2P9U~P6h;p-D*WP;f5(V6b9uq;k&^Q z$g|TXKvLi0!pNK~%mm~Dwv&SYO4lF|uy|v``=_D#4tRn^?!pYVVzEg79q>Y8F06xO zFc-PuOo=#Wz1X<~>AM;4wA zbEkgE7q$aGl-nQDGYH9)Z9EPVfcg@>F?FQiB-v%W0D;wC2%O)N@E5=?F0DeEzOa;o zLs671DF)O&rXI^{2&+33z#OJ8qc%9nFJJ_3R{U^wbXg`G6n~71E{*fi(%qn}>95qiW53fxALahm4q;NB5=&EE%RwW1E%JTz{>&0@(n0z_hLLArV zSse_{lJ&~Pv!Gb|*9STZmnBVmXfD{Z)l=SuG;Oo?GV0M@p(Xy{TP^x&2$zMXhMuG& zG>9qBjnVnRx%A_pk#ILQV=1p;Z zfnguFhjmwCPha4YCpI&{dtq(+g4`}FYz0QfOTj3Iy36Smi+gKpJM&rqFEaP*Xf-qq zOz}&Q%RC}^H}F+~Rg?d`gh`IPAh8U`ZP=KL;Td4{c7u77HaX7hLtWmxu>EP7{(QQU zQn>H#FTD?E&Lly+Rwt&9O&A~n{YZ?^;JRa_`|yje-VLM@41sJ$IilPQh8(@OQ9uZi z##aZ6SSbtnOLA`O^jb`?aqxjqq}zu~jfeS%(}#`p*m;Ow+|K8B@7@(5E=M*j zD9Y6mloj?G7~#;~x@A+3uSMupX<&JDSi(_=+;Q0NVZ$k(d0eJ+V8CoMiizA-BfI+* z6%{%8+V8hKoh%W)iXsyk$R9%CUOS3OW?rj?l=Y~ZiwGMOj&xsc5@tf44PhOtb~4## zKoR9wNk@8IT20FWX>v%Q>37VvMAcQ}Y8=if& ziJ@B(e%Q=LsS1zfLxg`@6^iUo#s&A8lTyb>9sHtCg1Fwk3SCB_z$B8B6*!NZq=YZ^Q^st{2wcbFQm%foV0Ql2DgugeAA0sgE0 z3VX%t2><1+nrr>lPRpbY)@ir>7he++68cz(KYI`XxC3SZ!i+K*J0C6Qx>dK#t_)!X z0i@pnf$u>#?54X{69e!J9W~@xcL)=#zZbnl8IqIR{f!9a2F^B+z~>@~Z75lE2?S(U zhR;^SJc`op?{FyhX7(IIPTTZkZb1~bmg5HWSbLLvVH$5kS+5gooKuv7_=gZT-<{e@ zK#bz^(5QO!*viZe0TjNZpoVt}PtsbHb>AI&Fo+GvCidW?C;fI58K6Ygr6^t~>CLLU zTrFA%+L{kRk>0}(G7v>`Pi8B|bDL*JAaz2FK_gEx`j*LBoEgeK-n>ZFZ zRVd_42!2~xZl@Z9FQG_!eE&@Ng<|d|wJ6JOuUJMy-6)&UvR>Usq4xqVPvlt7UDy#t53puQ%O)BG+PbrgvpP54vh`odPWnIneR1j1Bw zuCGE++hL>{xx-)^QkM;7)dX`iAcp<-hX*!~M(t-vQLtR()q;Guv6&g4N~Q@YOkjH< zEO3og2*-e^ zOI|~xew?4DhDcW~o58qYq=<+RiVT5#c&*=1Ed+~nsY9Q8mHp#?ke+5G`Z};_9*&bu zGCSQBM*?_g068MW8A+nL3f&u-C`5g`+8@MVy8V86>WKp46IR+~eZ(?ajto159pfO< z3JTtV*b6X{H!I0%lMGCjfd+NgZ&L&T9p-_Py4OQjOemzO#<+?q0;{&xew_e$8DMn$ z$cf_Rq_WXgLFyH&h+}NvhVcBtP7rBbL)3k%HS(ZcA9x=}|!2Q+=1c-#MTySwRaCoZgZVP zY%aKf9kDR@t;4_rT;a}VR*X9qMIt2KH-w1bnkXUx^SEI#yfi5_)cMYmOLQZi28CY( zUzvNgqwkahY+eE+wWUZHar-`sElW9Ye}#ce>QdNi$2M3ka0u9E%uc}$dU;A4h*7&@ z*b-s@fIIWyl8Opv&Ybyh=7W7RoK(DNXzqLNDikN>O$OI{IU6yr?5qnZ2^?EYYTBwo zJypOEM+OQK5)2--P4BiDHV~J<`-6`PAr&T}j`A3$-S?)!I~Tzd-_u;|YW|lVqY_Q! z%9Jjv_p`=X*I?va=L#rIxX@!tG%Ku}+8p0ss zvR(&LD~zl98gp>UhpAeNnJhGIBQQ(Yt_>Fd%c+hLti~TbsjhIYC^;j%Z$c;#gff5- zSWOOWT}+b22HWrfZ$n5dLftg|o5gQswzDtL%VlwS$B?>BOA9V-N1X4^oF+7r=s1JK zoMCNMqBc!CK~l!*B?5H|cHnDorGt4yANN5>YyEyC^0GAHm|??*-~aburmROxMdm-L zGl)h3E$;B!jCBtveXUSdLVM;U(0>Fj*H{uG`eY7G0d~C@B1gP*6}o_x!^fZ~Ia~Ew z8RD+io>lvO>!rx*gip&E-qY1i6QL_fReo<&Fa9sGKk8$DDEA$ZFu-}6LSKjjD@-5+ zZV~hae5PsEu5Ac)rp&iz3J1Di`gCld z$4x8lE88MeVlSn?8LNp_Deq={H{l1012p`>XUdIaM>S^}4_BvijyjsQt)+gi}PVUWB6$Q)-@(##LX4cohRYg`C>a;(R@XtHD9 zLJ%wE%_rDS>&d-$4)}rNB(H25+=qWp{BH9f&a7EUuMWEmEYaLd#vLrJ20j_l<6yLh zTj2$kfY4BW6fst;$=PrnE7mmQi#8WhPymtBan)b$$akNCOA9{U;ERhC_*2{l?T?hI zc9!u5g(yQ6!31UsToEe??@!(~qZApSGUsc7(fF|y4r95pBKRBLwBz6vq6PXOu7LZO zCg##KJBCK|+(0gz_(|_U%V7v{_dY=u9|0LMhM{KOQVt4k}1<=Gkgo{6mH%>aBcUqjU3L8?HG7#ZBnZM$!xD&9q#41u*OPW zfn%BbH{`Y}GWjs1>`ZA8x~rx|v=o-%{?5c3QgSFs(m1&?`Z_~A4KpKln1kcJ-bp|M9x zAd6SaBphZ=`?FqcTu>B9?|L4DXm}q=-piW z3No9Cd}+XyhOI1g=qc>=U?;(PrU!+A)ghLAU;;yytrO~WrUJvM<#B9 zx^9jCtQ^62{{w5&{yPF|06-ze2NAa=eRmg5@_w!~9#Y>4ooD)Wsi8Ni$>->Jv+rmQ z7T}91p?qsP3=3re^|{uvoXL;0CjV1i&G)ES38U@gm1y!xl+;!&eko1vpe%Y{@;}3; zpBxe^cj1SmO#oUMedaj%lLLl$3}T?>LbiJa z4kpxSH^><^e_b&Kg~zat9|FG+NbJBz2NGSZr|Fw^!)rc~msS`dMW3^p2n2u|L3b&9 zAX=zxX})%`u+Pr>;Nc-DfRZnZOY$eL{G{f;ML)d_3QKD%D@B@N8M%@@$uo_0 z8w2WrutOlW|A6$=yAHWc*_Wfl$^At+kOwf4f9Ai{RDonF{vemE zCrS6`)jM7}0l4@ym?|lQ-JWeZp#%PT*q$UWInMaF8cMHAWYeq&@S?!_X|HCOn?d*b z+ZSBOhlVI~FIb^BkaMkQ@f4Kxzd7mz;WX3StYmnLLS&%ZB>Vz%Rm+HomR(i~-XF8C z>;l`_rUnrtI@EtIY#t-ycG&LH1tmo(vK3BIn*05+Qm_f%iRz4Y*U17s9=YRHXbtpO zDDy#JRHNE)&Br$@oh8d~WWC=KSPcm%dC*T~K5 zQH~C&DZJ1v40-;ynH)eC(d1dFK$oK+fj6*zT<^qj(<-VKZza$O{d%;Com5po_~3g~ zXK4_=FNh7e>TCT#T7r)OO5m&ma2WrKzTI2;P$CY7U!F(tnR=~qU_U~s4WtQy* z1_u8Ph2Xq<=l(0eprU#LY5t$|;s1K*>`bZh$_>gh6X#q*BK6e13PS$BJoGyKd7gVz zuwBn%rHs%OM$h9o#G$0{f3BSByDT8>Pk)KhzeADxpTE8J|G#tb;Q0ST#IZun)J{V? zC&5fL^8VQ`S5t)itJ9Hp|2@YO$So$O24K~Id!Z8NDK3SxFfna-a-Xnt|Gwgj0Fd#l zrc5B#|KWlFoSR?mI5xDdo&4WV`UElx2=eX7KA@F3p{YRVn5cFJoYl!${yy`$f~mcy zGZOX!cPXlipG`XO!MmA6Dx<4>ei!tlx)AcNHAZnE(0E zCZQ5BR_aB!A#q?HaG|d(YLYTmbHLflZwg4rn53p#69%WB)I3lCRCM6%xd}h!zvyjG zeHGxdmdMzjIJY5uRRd}CL++sVu1!hi<#a;ok6cw&>BQm-;H0wcBjAF-*upBmK*ydU z-2dcDNgjSJ7x*m1aice>03l{b6Ioc)D<{G87> z^z4dCV|fli-L~NycdI^oGR`nctsVuq-z<&N2~(_mg4)x=q*64mW!Y*C{rR8XMrISw zrt@j+X2Ra)wPaREO|2!|R}k9i&1ThR9L}%W=dGDu|Gk)GO1wE6DJH#~(nx!e%JHRE zhV=fQ`_bls9j%SDp(g}7B&NcYS4eN@!|7V5F zv;#{8CFlT%1m2+Nv1+8HrgD6eZ>WV{qca041(HNbEa3W~ittD)6<(=m^%0z9_hr{j zy>8hmPFE|I4`|`s60wpwx~*aC8yAH+i_PN34LdF zO+=u7+^y*LBToKt>J;9}8q@0nKpK%A8~Rud)Q3YR(#@!M%Psh}6pJ{~zt1f?4 zeDCoWRZ?Y0g`rLCls3DEkx(K+NVT2CHg=bPZh}|6C&HLX7m(ps-9PfV-* zer~k>Whz4&XLaK0!-5w$TiCOrbo-1+%`T0ok`PX4YRGz5;d*Kp%QsD5%Ll{k5#;nG z9nrAV(&@Th&FZ@%DdgY3+NM4?q>78ud4F5A#MgU$s9QQrNiVmXoK)0f5Qz479k-6u ztWOcGtDo9)930FMmv~{`bE!(AHgR>S(Z*`xtyItmIneOr%EZm?rbexU;#gV{LoAjbt0v?)T?d4>yNT<-jmzKV<VU!}-%H9<{f+gyRam@4Zz|rHU8T_gqy{fBja|QLbxld8H|-=7lY2OWANr@kVsu|Y z5j!(yYeEi0dW~*UuRa7#V274p)iw_@1-+keOeN zeajQsDITBN=lLiFe1r(FwrWH@3&Hz2KEQ-N=hIrf=Yo6^+&1+ov((5ZajA{dw#n#L zY)bN1L+0l+V2^6qHIZMty#B$)&LN8o72OBtsv17eGM_gWk~SP4=GBi6i?ph+ob6;; zw^+I@MLJTuggsp`J(PcoU@i-S@HKw(Or(%hY!_!gSk_RjIa!xeQr#sc+1|; zvou;2bMPFo(V$x>Rnz@&_x0;hfI#P0?8G9`(@!@8yQNi`l%dNWpZ=2%p-7BjY-oqnX z%F8M)nRClN)c&e#2Spkiv1P655)wUXxnpkCtR#Na^C0x~0eL4NQjSq8i)a+dz0|$I z2B1b+3|qCcXXYRz^W&QBCT6OY^k+c26-{!iJE)8hXxNL5I1xYD?tXF6=TTQ^(ILPs zWKdju9PU*I8(rFKPmF{=Erz+_#sXiLOUn;qgCqQ&EE31884 zpa5DNQBgj5x1M;De!EZ7EW5(oOE6+3^=yo-2gIGVP1`K79RHY8)nbv!dB3$%O54F6|>m z)70TRQW0aL!eNj7gFB4PKkL$O$g9_!*6z{DVb`%%(z#zSbWz9QSX6KIsKA*2LGGb# z@N;&xjX^n>ny|4$#v@^K-o8ij2eNbO3;ErT0)BA_H=)JH=T0+vL;2htS(q(cx#c@1 zx4eEnFP|tJukNNT36W;(Vs{r2g|=z(xG2|sDHH&oV3Y8Ceqgg)#t3egTKu>^ zDIBjFsK%`0WUn7*fh48(T`Ea0Nen-C?eI~UOa02%)JzrCXXX%*z010Uo5eS;^*Hsb z3p3AKb!j^ssZQmMo_s3*I9)~?k;;GoB_ZBt!^BD9OW8Z0E&U|Vwv_KytLw18h*u=Q zpBxToF6N%B%nSX(f=Zk>s%>hOUw0+1k2K3vW6MUyW+&Iihs2XSt24g4YR1*R>||FI z93!hYmJi)QU}tD27Opq^={;*wvKA6C@QM2|tbi$@TTDnK2d|EC#{}Xa-;aA+88RrJ z@*&VJY$PUdq68+o6O~bxbP_Frl$Yx8de&f3ex^8ifdt^dz3I$MQzPHDLB!UwL_(pg zSq805w!I?Ty;qd(JXzU}dVWrzZvld1dJGZymsKzQ6#ri|La6fKEGZ9+nByQz7*}k( ztnt0atIvb7TGp`L0l)UPwLLEMdnn3?3pW{Rw1$VQKd7=9@j1OGYH{<9Kw`9;pe*BI zxn8YwS8HC-zbMROOLi1Apu5saz z9s3Xy!}P?MSAM^3?BfgSo_%zpC%@fP{C0J3vs<`NMgQ)d>0#PpRYB@>&pMGuMIs{} zErOAH8yXkmOZCG{_0yl9{*&C`sQXwm-=#?Lx%*Vx%7@_UoC2I?9^>r=o+hPUtGxMI zg=|$iye*5jCZEt!T#Xy!6#pNOGJVYyxr2$ylO>fZ>x!Mf3U_-KdPfW0?Lx+n~BGtEA@#?0CIje<$_i`}hN9sU!jBh>%kI{2Gy zVetAtKF+8_hvA9sAYbo!M|OT)rN%HEQX#cB{w5b5kRZ$hgMzo5?_|Z)+M4axr#MFr z`fnM<4FQ*>F{{Yw5Zpq2)*e$-vIqc`>`{ zs9&QZrWlz2&y}XXbZopw&o9r|6~1Wn(7txK5y8@NopJvezh)eF8kh$nY-kKa?xLr+ zmE;~wQv>9yeyy{MAGI*&5K!MQptegrB~@B@27PhE4P~% zDM8bk4cDaUuN6mY|1^>>_w~D1&h+P#Gee~%Q;SYC-MiFR**7&&b#PWohKNg|A<8us znfAxN`z?wOQYM!D-Xfrh1?A$*5j|~YP0nt1vM~>A^Zi7weP)LX@gD7br!9L0oM4=* zT;O=swk=K)e82V{c15`s?>_4Etv6LYE0D2B)e5N-wp+GJHtBZTa4Tf?!g!`CPkL4_ zCWeLr(UNw3j+9%cP6$UI3}w0x$eX5CqF#^YB#%Fys9}l7y%Mw16prsY zC$i-~a1A-(R#l$kESfq1)q6v4C18WVG(j!5;=RT1I& z;NJQ5AXf) z2K`seYnj;T5)X%r>Gkb;r!y=HgE?G%Ze?~$*VZULPBf?0BKd7XoQfRG<{v8aZAFhH z%T}AF=Dm{mrK~uozI(mT5NHKyusX0$*l&L$X<3A}&G>FmKeLd%&U$0OUd+D?#6Fu? zkxfolGdir(bi*o8?AFI0`;Bo@J<)#jj_Fv*q53(+R(7=mR9-<7QfE7|@&~t(#$7Q( znO{hxhCVGxzEeVvP4qb`Yla?Zs(Mjtmpr3AOCcg8lE|A#F*9XF)A(U)$+3U5_@h)I!5)`FY8k1 z`YgY`y;aK_t4vMYO&<%JI}RaNlc%}@JX+WszD!j4=Q+mu zMi!WVJ+gK0D3Pp5N}+Yhgg+Hz{ecbf7<{=d6?$BrdQhEEKTKk-9-jL1)XQUX&2P}$ z*Q|uMz+Lpd&eN&i<;G>>cgy|9GuG@C)i~#_URDelj^rH^Sq1vPX;3*I2ah3buVBJ1 zujzA^bm|neEOV^dDiT7}a{mOdfBQ#hD6`p6Iy}CkOu9O1xB7YCms4b7VeHPdo}=Tt zKH5?v_Ad~YHZ3%}v~h{A(^FfSnsu*xjxjFk>K&$hwXY25H}ZDP3?Ru}pI;g@XcFvm z4czBGBo;yKr0CwZ4tJER3=O}LFtt@R)K19`XsgM7Jg8&(e$W>;8v4BOo2;8q+POGr zmm|in*4ad|^Bn2)c3ZAlmGn&=+ECw=VkW0Pw+Ls3N#O@XeM>6z-iW1Nte;iKYO&d; zBxYQ3f&Td5V6jQ`h*;_wvHC-wS`DrBzmxUNfqp~y>(jh6?~JLZp}*Hj&!;nEKL+#! z*p4b|Wh1>Oddssb4A5E`kwCKuy!m=aL~Ol?GDx!3T3bBv+Ifve%71xt82(@ki(r3I zxR$!-^KB-@@*<0UK%0~YB%sH@~l?@*#6)6{UmqZX%f>%$1PYSw6K1O|T!Molf z&e>(RRX1;6MY}|09J8<}f7^wNOIB8X;74pgEw|@EoQwNo%Z}k1M#RzI)88H8Z<%eQ zGV3q@x${vu&3NZ_tS;e9fHDnAogHVx(vsC>U$EkUd%)Ab`QEvqyEU{q7W?k+7CI_7 zyYX#~^>&HCB$c4Oz)kMz9w|xa`R1hvZxKC=_x5f;46WK888fJu zZumYBElpKMb1{WQ)N_MCa!?X7QJou?>hO|kdM0;KP)Xu$|7Kg#sO`fT$raIM`*dMo zfEqZRDQYbOZHnLO-!;L_lfSyiE8lj+`dhD=ye=_`=B%=1?gUPK;~>C%V9;rC_*@lG z1>-aBjT+Vaj;Nk_F1zI&{A%)e)&hB5vgUlM1HOL3Z&1g}W8}g93mf@^yPtm-NlPi!(wldvB(+4Y}HPt&cr+uISOv)wsB$n!*gBz0TsfOC3N|g9_aHJ zxdSBOnF_j!M)A6@NaeMyyy%w>lehj{sYzl1g7h+t$2Eu+Uzzx@Dt_Z0t|JZqaK1?kA<<`0n*Klqx?2F@etIum^bSJxx3BAB%ap(T*>4;wAc|3MD z=lEF`z94#AxJ^rFYv7lqkL2*J2`dZszQVW!vX~)rO^4#Zd^`zsGq7jWOJsNOO2~I4*|aDMq$QqbMcB)}syy zO&LBksPHuMXRy68#qwYctvZ^YpE%Lkb>35u@ww`=z)VzKct&oT4YSD1G}b%v@M%@> z{e4W01I;I=_~jyhG`Q)bCpR5$#&+Xz$}|7Z!q=JeRy~YOs884O)Ln+xGs<(eXpDjw zoue;lNjmdYutzQ~5PztI?x6--$Mrq~g|@8FV^Eng<+d<=fMTBlyr^@|qZn@lD(+(O zrh{K$pAo}K5s?wJxnX7+*Hv#=#iSZuVVzpH@&BSC9TT7P>@O+8Ck?-GLRhA*;kIk! zDNKy2l>}EM{by=cjg2jCePT}1+0{Uw%2$Io^GB~@C(d0TZQ;X|wP+2W=&f@CUHR*o zj>sQ~>}lG#{#0nWGEjU!3tkpagNl|Ev~;e`GBK2H2yTdr0&TzDFZABK@JwBMUwh7r z?K|pqU}{N}H|<-f#qClSCl`iqJlFV>s3NR#H3D$o>`0fDS$8~ng^2n_j@RV|@6lmg z%a*D}#YLlgx#BCs@>E|S6I0TS>E?UhYvfHH71ynueq(>5pR>wHPoPJbnMuok0`+Z7 zQ9}Id&^u%P6uUM#)3NLg#^F}80ugs*(sS2%-#EmUtb^cWv(|%>ytnhUb(>XlELayu z>PO}WX)B7liq+j$UK~6mTsy`&Sp0Go93Xb8e9gG@_3mx7s(4QR0n+&)f}6+6{(M58 zM>Xb|&Fa#=MTW=>9P7PyXd(hM(Sw^tc7u14D|O5fP1R%luK7b+8O&i33aOiSBVjsm zrSr(xw3#3;&a-S@Kl6434D86||AAZB4oPESbWJ~O$fu~9@624cGDF>hn^B^xHF3A<`Jt)1c3-n6~Mm}4A4Bvl`bpi)m3Vm-fsXz z@DIR+o9}$C2xtpv5dbgTSL?Mu1eC z_K_kzRdI(Af8xEY+q}!CsNT{9NB8}*V5&fPJT}ymoKhTr&JO?bCRx3Pl2zBZ8>d!= zwi`$SbAr!}0ZGctS%1K;j^N?{w)B1f@v0Au?JWKIpXVny<@f&oLfYWj@#+6cfWbIW zf&ZTmiB`7qwsY(b-_$|xw9|1ZdNd5UbRxxxo#CMhJPlKc}q>NDUUnc?e2}sONINy(Nx)?Q@4H_ zU0H8F9yos0aS_G-Q~==bjKv$n4A<)Ghf&akR>x?iA}cfX%6nTmiE^UpjVPelcho*P zJ?mNm2b-X*1u62RR4o(wqSF}+lB@M(dGDP#gBA=l>z`Jy<0lv!J;>!qh8wxb;l=nM zWzK{=WAnS$x=LUd4%DyxwHD-`kgbdO@P$EP|GHaOPbx=*?!(E<`GkG*$zOmin72@0 zqZ=(;oV*r|Q6D!yhSLN2eBVh=CKd1hbTy8Y#B}UlQ7s@Pzf@M(4^nV6{>{hJ>ble( z1KJmc+~R318_Ltfqt-n^=Y}8WKkbTt4A=02MT>4do~+%wl5v@T##+7k{VY?h6gCd4 zB954D_KuqASDbGrOYashrEb!CgWjmxH)?F1zr=eTqq$Dd{LFi?1M=h4fZwY$Ww%OM zIziA{A1i>Xzo;vc`6uJGU{OR;;O!{QxzUd$bze>HYfgF;msAwrU>6Ol^6lp}bNsx= z;K`rfGdC2~l=s&2`|KBzuSceRdGl+B2EV{hG$?Du1VW_+|2X&O?t5*XFH?tvx|k2B zH(M3~iK3>iaC@3`%{VRHUin9s?#sZ}N#{~yyA8C~*IY~lPhQc5PQ!HY$FBYqgFM|$ za(0*?jw@(l9ur&SJ{jo`8>R3XoU=DJF0w{_NSve>%?oC)Di*m}OkF+2+~XeU)vs1- z_qn8~!8b9bbW3=MT8kr-CZ`N~;Ej9yy2!T+AysGX((ZL?1pYZPDv6+T^7V`g>)c}= zvAJMkE!<%>a#?~SVm_awq3Al#p_4q$CvRJB7}tx+<*IUd1gAh84SV( z)5pru?WQq&>ZK+uzA(^YM(DOK3D)&jc$jl2rZwChr1U9yW{f>!M=wrFA8oOmXc>0A zgmG-#KK$-?5e)Y|N%vKtud>9)jmvZap8UNu3HIi_)b$$UT|J0OrMIVk{J5=Pw~Zq| zsLE!C-ne_3^VMXNR1@2< zQ3m7v2;QqYOWOV(r5@T@DdN~P&|uOyQ@!hg_VN3TSO*NM7a972Mm79qXv*McqUaQlC-aC1A>XA*evF9TDF0iqYoy#bu-5gH=e~DovVuZu*jKe|=@cjO43|nq4r7TCZr#*1 zIdL{RNax*Uko}IGRQtco^3*@j@NIH4l6y_uToRY)dz9o07SA~`uDo=pbW|6h7s2WI z7WXdg~Td(sdZPH+04x2>y1Q*V&;=F=CeJB6h54;(#x zTp?+_lBQW~zU?oM%L}PpynDMca|7!@Y_S|GIMDc?G$7q_NS3#sZGxfiH8zc^4 z8V{o!0#%p1$28uBDbdKmC!ldD&quTHWo9?RxKibeLb6(dZ?olTy*I6Q=qcpGwWHsq1@x%T<|6~0N~*Y=^1CFt}&=SWH~sYG<(fdh}aEflTnYsFOJr;Dc2Q6^(37; zWCqW3-qEZceJsrhmIlX{?o$Ni1ol8>VHUNq%-ojAZ%XeNypeTY>2`MxF= zs}CnlH84%Q#wN6;%#(h9>^Qps;NiI4dV}AxMyJD*mWW?h9<5TtpIfdoM$P|xpc>DW zVwd2|W(yCtKWA4X0Wi2rbtXQrRqHkR=3)E z?;|)K-bywF!xHkFP2>QsfO_S1^)c_=Yt;^ZkPWiJw3JO`r2kI?0$Wv$`Q?1RgUSLX zC#?MIT`SKL8f+zl)<5mD!*VvM(t$p&V(xa!TUN2f{3RlNN~vaoHdblq?_pJ_xT;ci zj1OB!RE7xu+Gp{|+8S?ixaPWml?MFe+f%rT06p_Sty+a4}9PG)NqufE(w+55i-<6?t}5uUe<-?SB2FxZx=4_Pg`Xk$Krp zbFtaHwGJtRC$FN32KK<>rY|Pnoi zY4-8`#GUTR->)rSsN7?=t~qbW`fS|9xxk^8H<@2DO&6v^n@cNfQ2g4bcaB80PRr`z zHOHgO@k*E_OWPwvvR4KUNZv6VuGva1d^{ko|Ie%;5iZC+qn8 zy$(~J-oE3^LfME9f8$NP%!cm{9FrJ%zDjg@XUOATp49z< z<=h2-buG4#v?TYmjq38zq4&M^4nhgMMITVFccw0$Ep_FoiT+}Fr#kR-zvjpR+?lg} zaNVI_z!GgVnaTeW71%Bqm6vIdlD_4(;doZeNrLL6W#JYAj5%rpy-Uqh=XqY85h_+o zr1O0#;!s`kveDy@Q+p7GLp168?L^lX>WUFoGi&x$5@ONvF+M-tq*wr^sB4cp*gTC>FpWuue}m553RYMg5XLWZ zc6l%#0;=JahCxlj4W#Xm|LlTyn7#@Y>(l>@czq?u)4%uyM(rBwbS@u%x(3F@UA+! z#z)~@)xxP=1BZr0^SGW1G8J|l(v$L=mior=>VpOXUfy%%3;r5fZuPzr)gAl~BdpK# z94Jn#uGUnSFISe*l4EmQldrv!<7(9oLxfkvlCJ;Ix1$_#x|bcw@DGP=*~O(x55s8r@Y?f^@aT=RzJbG zlA}+bG3bq)U|pQV*Uz*yHkXF~gHe(yT@IRykb$#?$3#TZkOLY%K6*_*)07G z0GYi6iOvdxu47ZH+l<%#Wy^idydD{8gL7W#iRT(#_GJyKDC=WJdZy<676g@DR7Gl$ zl|TH?QIn~z8eP>$E|4IV<+7V>4A;D1yLBGpS^=jjA<=*S-n1Ioi|kYbZd0*Bf>F+#mBw(cwLm%vWUo&d!`gc?P+Hp)AW+nm*SlE%Jq6?n{`p zMA{>OUvs1`rrMQqBCEwZv=S8gZ!QfeSk#pjYe9>$f9=gg!dz9-fq*0+K0-w9npJMY zhlsI8{x;_%4hAF6!~~zP`Pu`y{l2wlP1Vty#zuD+9Mp=?>-lej!R&(-N92>jeqm}@ zuI^n|I_$bMKnL$c23ht9-?XxVaNHBbCErngDJtzA`;$?c3?Z2I_I!pitTx@>vh=?7C}JX+k^ZNX;M43YvQC@ll5u;9;ewv z=!^}N5R$dN5;YyY3As9~Ev8^^JL}FW=^(v&Et!!KfN*HcXGK1ieq6w-OfRqDsj_5$ zA%HWY$md*ikE{HJHqQ6X18)Fz&8hd-;o872|9TzZ5+M46n&2MHf9>^$dTsg&NyVG& zhJCeuz^1z%*sq!UJ$n9k4x1h_fYZI|%nm927dxcJ65XI#sh(BcH)117LpyL6G?Tb5 zeR`%4p0#%}uZmpIhMyXrHl!;qNU9I>9KXHA8mR)T*2X&Aat1kgAo^T zl$ztTsT*yyRjxPsoHR*U*KCAz6tOH3_VIXdaU-~|~Un!GhBhkZ>amwk#1XtM^s<-5K zGCZgLX1*B7Ahs5DgBY*7LFBcGRon`3ZFSg!RA8nc&&~E4}f_zyI_2|LVW? zuKI$st$1VpTUkIO`rTncsumdCaqjjxScmP6>R&&N*_}=XP=y6}((qRW1E!?iieZZN zt_}L(5A3o zxz!tvwcl^oCQ_3gQC%RF!dch*P6bs+E4htKL~Ctm%`!J)yEcFS0CH=~^U(*_k*Z<1$GDrp!*f~v=Iv}MWXmDt znqd3Ghi^cJ`NCp>m%*eH*rC}m{@StT;ToaMLDhIq<^qM;3`n;vla2moe#^fT)BY%5 zK|by!$BX%OnRoOM!)UcW_uv>+D)fO zNI3Qzo_SE%j?40Y;kZCljM~A3RjO0nHwo>EycfvNn*P1k56dw9#UUaSSXii|+4tq@P>#)%%Oq+MColPL{U6ldvXjzlTX?CeIr%fA&?)`l46q+iUsq z1YJ*b<|hy-T%zP&ImNm<$>9rQLwT6Bcz*l&k_w~*{WTGpl=DVu4|+^3O&kXU1SvzJ zgPMmSp5wwJny0D}4kM~l7oCKh(zt4Bi^hk&k&_;!QnDu3BD{w0^=tSsbNZ{**Pk2Z zLYC=&Hd98wa0hcOp;tiBga9VMV4{2Iu76$erWtcpGEF}FJ;9I-liadgtK^V0fV0CuKgcp?-Oz8-yh5~LmyV8ocI%_e{`8+OMr=z{7Q;F$?zOD5JafdA0ryeXt${n+6*i~MEALz< zkmBrc+0cHZt^B1*NQyGz_}P0^6NU~8z4IsV-_Y*qn3o3E6(8iW)wkePdl;@;M=k^H zY;Add0$kTmB@D{? ztyweq7glmecFx)7+4sKg`*FU@76Oe z>bv)z$$4|*@Rgi_@>h1@jo^IjZ1G{4d^~Kgo{nTm!`>Fu+((XoCV}0+)t32sA!CgT zN;B$!vgxw1kR!8bvgf%EBXv1(8xS!U?OBJ*Ve%w}M_boR52k)r*lW+@xE3n5f58;K zPFEma2>4aN&Z(T^x0xID#2N*i?1;+%`^jX=`H^bmebnz&S`5bQ{uY+L9%Rbd2204} zK31&~G|keY)7HtiQDf~Mmej;f+YT#jjT^(8>MXA$X@wS}*gV z2#fiLs_V5LwQhS+98JC0`)2JB<==X3pDXJ=~(EPRV{6iM)$)$_n+sREWUyieiH)3JSZ7EV%<{N=j z!NT7empQ!%4aURCrUcE)uD~?({G^slIo{ZWR_d!%J7omO@(fc4M9$97M>Sq*@o<@8 zvUhkbJzy3OgBiSpNAlRrUAu(&=ou(vGwgT;&J`Qg3>^)IUmkvlx?bN`k-ew?YKCg? zVKI_nGp0NEUSr$46U0&>GjBw@drm5ao0F_dJMl~0lg-?Mo#2_IzrYs#A2b9m= z<>6hyK^Yfv(bIclUj4}gvUY6_I@D_u8eO$Ob$hGy9fSI_`;`TR_gk~au2n&E95#1$ zWoixxD`lo_G@Wt-7b^h$XS-WSTi09fAipCC@p9d9-5eLp1%}ea?<#M^b;nsQ2ypWJ z=7OGojVY_X6=ly#jaklJUw*z=!K&QMnfk|G@sX$!IuA9@sGK@#^WKtX?UknQwgopW z0ARN6nZKa_a9uW}c&1DwnO$=(Z3YOBKa%+D@6=H$8#Z5K7}c@1b9;2#tGdD7$*8)u z;xZXqdT8yQkK9>OA2hM=Ap_!s)Nlx^b1}q!*EPsHnM^XGBBYBqJBp)2tg_geMSsau z?YaSx_ueD&ToCw1+W=&$1*+VZbn}_35l+#AWK&kYAcC!+2rdx78A!_G?(Uj6USK!C zq&F_Q9i6;Zv@OhBpuc~i>9o0x*kdGU(%m>VD-cU7CJ!=o*SH8G=G~c$7Z7}iTlH48 zFo^q$Yepw1P^(YVTmPlO&TTxLV3!N^k~#RGeXG9EXzKLu@j^~Q?`d>@_P$hcrieGpLtN zl>oKonA=bz>zZZo^{|wXe8O3g6AK2=_S!qnB))PYu}dZa7s|3Dt5h{NIyHR1`UeT& zsOpTDNh#hvk?uUfUg7N)_sBw`%(3A_kgQ8mx}X*kX%5Wo9=A)O_YBuug<1;o09)@wT~ zy*cp2y2~%D1Ue~0Fx=D6yTorLJ=VTK`B}hJpcOlBT<~@{XY--!;2J~e(QD09bEPNv zRb8{|1|#0?)PvlGG$R7e(MmU_@MXV#DV^w6fHAZB66GTo=M+Mgyc z!DpQ4Y!Bc5k8Fu{D%Xt`R(-fG#~2@uNC7?}KcD4@Jw{tKqbDxGJrUuE^8g#hV=?hn zC6sKbp>mHX=ydKkZY1V^D?kWTK#Ge7qQR;>`QbM~rALk9-Vf@8rZuMQ%PBjUQ8!Qr z)|$_@IH;LW#Ch@CQg+^idcdfuO*Oqx`9i?-EiH_>(A|pmPuXow`G`DTVV!biQy#-Z z1A+O5VL2J`8!95ZZb7Do87q8Z!$VE@96xc0t%J8vUS4*4n z3;EDef@MB#+@2=cqWYG!8U8!YgcVdPdLR;ezj=~J4b%afwtUq)uNJTrLxWW8v44`S z2wa`w`|o2Omii1GFY?yojKDd!t;618$va=JQJr>iG*w4cMNwJUIbmoBBIwVQ!fCfJ z(bw%Wmv!#R#7m;DKal6a)T25Rs=vV>{kk#qCD0<$L|lt=juW)y zRWI`NuNVm{lPd;XPS`2?$5$e8E-6Xt%O#GlX1iyLt-MS$TN8N?tsm%7%}{@;CUE^> zHAiMi?#(}YTA9R#Ng+xYS49_}j$2R0A@WyhF+IN0@fI^SN_t9V>TO>env>lUt?5rhRoV@Am2^`14uc;mX)1 zc`uB5^QxXO5?=wVC44uCGHZLP@T~@q(Oh~c9t?9yVAI-8SXTjI5XF;tKUjZ?PPSfJ znW65g4(`tUnT@?bzyR5w{w>tV|>&vNA-;`I*;oZb(- zq-C)9$+roGkEqL5MaF^=ZTTu}lhgP7<>Xrymgd&)3@OOOZ-Xmejj3ebK9~7CkHmPi zEOUVoK-**P`Gk(1A)qsya2LZ@NE7b4E7^yC!b)2n8;Xe=eUZhkHwi5=j7A#9I0WVG z`;q?mT9b=8AdEEPBB-kiH%Pw-rfCMT+><|-te?am@jLSQJz8r6=o_OpvT85lLi<0=12)Ive{*O2IU0(F91( zpoME#10suNb-LD zrHUZ0Ukdtp>w11icngSsj1hOYr!k*cB^yB@97}%<)XRZ+k$VHLXtxGpjp#?GyW&%Y zbM5W~+@}q@%9QUMZy5BUdN^umxgS3AOA=Ri2=a{^5{Y4T#rPW^)=kZ-L_0iul2AJq~G{+jb+vn&tP+Kie#H z=@DE(FQVy4#;60~UDW}sk@Alsj(qf7h0XOCRs0LrRV@j# zoWIxH`5%%rF5sD);j0-6Jy3aToUAPJ78}oMUq8U&Z<}mHA0~B~<_Mmjy*o1AJ9^0Z zN&0~&o$(a%pi%P2B+*-db$uqxNtJla!{D*b8oqVtQlWgwqtcx_4t+A{{XQ}-CAO*>i>6=@3xgXaQMKt}bS1$oNS>*9?@`~l{y~n}Ru|~L81BEhy z8pX?@5)C2)z}fN`kQ9@1*8=7AUHuGn*PrT~(5-Xzi@Uu8T;5o#>DVu7@7F@ljT-Y_ zj3s4Rs7c?6ejlHuVURlDIy;LuaQ%SqmZh%^&eCYqv`yzmY`G5i$12EDxz$PY%6x6;^W; ztk6XOE2(d7NYP_ac_cVrrl#^F6l^vZkCY?H%NgeM-%ggdG%J3c0;@Qe&EO%})VJ|_ z&9?#R9jE0o`*2+Q92I~zkrdzf5zHI(@#v!p`!YIwvWuyAYa^2+c~q8#;jiX3H+}f~ zKd^Hqcf=4d_~tP2T_C(&xm^gd);ozd=l)N$@ya(x;NpF!*=ZdFS2Y@b{ot{?-#;e$XaATGR)&U~9l-vBAsr7ABYJj4Tk=N%uGN+lnN zY6HM;;a_MzL073V|AmpiWfnzTO-lbX3v7?@8)KM>206A>sn4Td5l^0n5qLtdbSEO( zOh})o!2PU_Wq3LTU_4~a%tx=!7uMtsKqz#2Ij6{grAO0;1uCGgwG#kBvy8OVU?}f1fnHgaZlIYKK}{` zhyE?9{+bp@;H=Gndof)mH~LoQcVQid*%Vxu=j}L z4`7|Xbo{#tr0YT|ZK+WQS_RjAW(n|MK#Gow@A*N}~$| z()x=k8<6@!F7Sg5-5d(d*b0-p%==&m+rmQMJfYj9m(J0nvTG6;MJ>dO0{;f168DRB zl6>j7U;W|Jgv~|TH(RlId0o5ju^Vk#F3-f|WsxxUp|K5_$d8pYO^Qg2?gPYnq&rR zjQgY8T;oPUjngI((3(h<78u%*q_D5T1bT9_`o-M7A;r4a75m_Lk*+3>P!fnxQY8k~ zs!dR}-_5q}HGz&^58`EgkWsfbNl7fk#VTQsa)Z#6W{6ZCT;xc;yPFM9Z``H33 z_>5a5M^TH4(xP{!vT3OuYm!lU*?oa}4a9NER0C#!V2vg%WXe%e(!EP9xXFi9Y|-ye z*-Ehst!J4=zbRqwn@p}XehRcXeHd({%yi4UjMc?6P7*z9Ui&n2xhcC&CShSlv{WRl z8tY>C*0FOkGLVs%^J=j1`D44-ude~#)q0PX$tl&2QUsraxL7?zT6(FsJ<)4ecKnT! z!u`)xD(94WdDs?)x(AIp`67W;X zXTEsjUg4uCEmGP%v(9Cs|0-ZV-?MMgjrtqmO8ByVe$+#WTCN3;^ebySoZU)O^zkxt zadANvC>u7y+J1cLa1RG*TKm_4W21vE{To>~ZRv=WIuHXTb^hZSU*W4?HX5JTCJ2ZF zb(1Zetl`qtk)6EVBo!3SoReo^{SlMFzTq0QoWVZuy@?l>nbT%A>&E4W+>Ey5D<%yX zB;&(qGI;@nM_yOyiUsa=+BN}D$|tbdogC)d-G08baiv&%;Js$&vQA`yC8GUc%-;g$ z=U2#yJ9KEK8}!LgnNTP|StJ4I_Q;Emsntpl7;y|r>d4DA*ltpW=i^`LvgPU*=mKI7 zs8`xYsVSaDTcqE|{2BZ`oY@_KQ0wo(P4LHaa^Eb5KNV!b^aDp4_p(9D#jh5@t8Wxi zyE$mL4B90a)Io3qb@}omI+z3x7;d~<%|SFVm}5u_y?HhjX7&W)ZD5XXR*X4>58#qL zv)Nir2UY#5Z2fL35A3xt7m7!K`y70*VKtG*w?IHYu9tU8P=4s=cyy4-*B^+q76GBj zN6>E9_<4>EQu6Lmuc7ew9(qsS+%$`jsY{89^*XI=pQdJQ(g|cEh7fIp`T{zajo;?7JJ*Znhxj}3Y&^FAoS!Gs7}d-|3>o5i$YMk`>TmWn%CgyPT{{?xYI`^&j5N%WqV8J~7?RXaJ!fnin3fk@-*&a=vFP zYHfkuX+{x~G4H7`V$yG2F;npHNpbT`wfALi;%PSOGdNV|PCo@{p;39$4FYwY^a64i zVcV!hhJ#P(`E51#={F2yvnNH&B##Q|=<2FwNrHw+0Vi@c*e)=+_)7l=a@>o;u|ZnC zS~Q>fa3))|e~RW=D%D^nkTa4UXS-!_-plgwFHP3T0B|oC!Yca8gx0{n$nPv}@S(?_wq!{KxYR8(i zZC_cSU9RZ>X&z~-zQ35_{aS?*q#c)o-Cp;E z1lIeRr>P-b{Fgh95gY3-JNZTgNx9I>1NfO@XG>!?M~8T1)F2brkT65q*Y=-O-Hyle$YQ7SX%0K0e|KzO=gbJDEi15Q?0C+YoQJ#JrqFN3~C z$j|-cz3qGl5$LxYK@~ZmVS>XVKpg!*Y#LAP?PE9;wGCX|>e(6(8FGO7LGhIqQbIs* zCD6>%jIM=37+Wqnz(Fc(RJPlH!rK?h?Y20sWN0Ua2It~md;-`D$q^RKIVioWo}KvM zF?8k1E#NTAVcc*e2P}<%vHc%>Md}dSeL|drO`%tGmn^c5BA)-nCo;KSxr3PdvksXPJb%DTNgQKkDR1H1I7{LY=*7`9qqxF>J% zzV49r5vqQux0O3;Rr)nmc3$C;XjukS>RE&Q^Vp(~+;~LJ_B7Q@q1PjXv?-lHNNK(; zb!CS|%d@=!Hv4mhOtJGlvs*>r-_|w9X!5#i4vbG^-0O8ExZ;L+aJpFo1}wxA>zUhP zKG-Q8XN867I`nK+dXsIk2f36)m17RhL6b>TDeC!L=ALmeYMk{t? zcYvWr-t{6JGf1K4SGwhhb?q!Dy)k&CZW;D)R_V4WN_fr4vwS`UG+>${@abt)#m=>04Ax(j6&eRTh#NDtC4VJJ;((&*sMR6n-*a_bS z{8-d#9gzP(fc|lqZG3F>c~`tb$~ae|$sO$nVaxLmJdk9OEVToSg4Yba9B$ z5mZaBmvx3w4(c5#Mz$0 zFr*y9>-K$?3rLwkZ(w)4%f)USm|LkFn6TW z-LzeuWT)e?M8=BvPuExRNSiV&ewMJg#g>n>k(OqPKuWNK$fS{97l@w5fY_qV8L}Yb zU)y{8U7(bR$Av1WrP#T$#IFoa-cEyMP4Vy*7rk)4YIjOrHl^vw?SusD#=CDDCi>&j zu7I=BE4MT;x?mIE@bp-fCQw6N_b;e7aBH>eQVndLkzxs=AB!St`C9iJu21HBC4yh1 z=zXwqCuxlf@)Z%$u)IZaK;~HMPs(qs2_AqvnMok!)q1AIRd$nx9&XT zc_8H27_d00!R>kdrGtJO7X1K8-_9CJ_*e6*i60ziY8Cv+m;+x-m<6j$u$lCz^3Fm4b5l$U7>wgK7b>mNmKUJjoBMXZ?LWpz7hR?zie6XpS*$OVYaU;y|-3GZCSf^tx(8_cuq=Pbud z)Vc&(TEPWlLYx)+j5(r%Qp$cd11;21?KR`>o|4VBqpBdaom`sAB@|CYbo1gh(ABFO zK%g;mJHO!_ee?=xfhjX&%3zS@*{>lX;b7uGXM6x%S9B~Tkd<9FUwQIng;v%W95+-^ zr1=)lzGBBDhdeC|F2a(h(aa^Cn$0ACXQe`<-++CiII`{eAT|4;dJ-^Kt=ks6bp@{V zv)w{axuW80QItX#w+n!R^g^6{QJ;YS)@q8%f*L~iAmTJgt zz2Gl_HrD899}P-E6LB%K8Evg0wgCqd3U zUKc?~I03tPx!#@Ukma~jU)4R0?-`{<#L01ko~iftr&8cfQ(D_ADdMzTqUj$9A`FW6 z)BtTSIcgM-xieywwAOzHJo=?EYuEoy};8$&(wrAO~@i-H8rU8!Isu!2lB#7;DW%i{;0eu|i{C_Y3p?vap< zx_+UO`;D6BmFk1vDbQRlpCkzIC?nq8nI$`?*^D#VNJ9Z>X&cH#!Ge z;%Zh4CP^Vgz!45YpGM7dif;%$Ii9>+5p=SONUQ-nL%iSu6zx%EBUJnbhPK?P!@of7 zT{De#76Z#0&dPw+d#PF}+%9{g=+|u~b35j@tA0j~U!6Xa1yM);>36ilKPv77{HmOC zINXAzoPXIbQgF!?oB6wu=939ux9cP#(zRBQd8TFiWzdzE@4lb?V_Rvrj|bK}g+D1? z@8&Q>;F2#~%kOJ~eRbVbIB?q83N7E|o@?feIhP^cSf*6t{H$Ij6(mgc#kyF3fbokd z_@Np2O9-e0P&@%wcbcS%H|VTk;kgUuPPPc#)YT9aXx{YtQ~0}`eVWNi-Rqz`)cm#S zD6yko*X&H&?0=KaQ2PY}16hM)S_GHhtP&5%|FPI?8w6-a8;p3ZT6U2&*~p1Tw(|iT#40+Reqh_Z(?mKMcyzJp327ZIm)x2=^d7{DB4gJQC2?J zgjm1m?FQCHj!z7ey*Ds=kWqZS!jz8ThitJ1gh~9DZ2dc_j+8e{pIj96R7oG&$!41V zo;D>AnTHqL`4Uz!_~Uv$=n~eav&w%aqgqQIwk9{apRDjta{0mR7i+mS7ww#LRf-#5 zHJV6(j%TQX^84+0`YoUtj`Ru>x|3bfJ8YOOIa)3D#*02ZEo_guQW!j7JgvnlE@wS;RNo@bMV4wyYUi2E#2zk*TTB;}S%ZRTS{MTsB*U5GKJOVg;CEiMaWKtkh z{*wi1@Sv~OTF`%=6aIL7<^NAV_1VWd`xpdUH$r&`q}8w=5{%PYgszy?YR>N*la_jZ z9tbMDo|OGuL}F=TYAwtnSTv6s&va~KpYOVL^zgE1UN zb<4y$#YS%{!D0WNif&Z8a*O>KR}3N{y=1WtB^!$lyXHDsSJEC` ztUiyp<;r3om0-1K(Vh|;RmQ@E44?V+ru~js`lNV_e=xLvXelo3Qm@ag3d`VQbHhz1 zjgR`bQ%V-B>)Vm3Sb-}L?JLBYto4O|U*cMnyVZTF%knEl&Bn=&5F!vs3AL980b^ORXDzoxW)i%Xe zK!ZLh2;Xo>_j2|a%lyoCqI8#`md+7(^_9Yn&m)lj#cv?#x&emuU3gCoF4TQ?f+!4f zGqG{WaE^vf zV5Vqt@s(?Cdlxlk9>8sX=8Sc0&Oxfi7%i|`k4{*)X#Q&eDoD`LzeZmK!d{Y-#fIn( z2asWKBmP}R>gvF7jFH4oLofdm4zrxu6VIz>>)AhPOy&a`c*9SE!chw7X0wS9fWXZPEO?S-;772`SbzaCx2L^mVT#XOBK9oU6Yx*OyZa#=Y07$BAnzDUK{n%FMYCR==Sy3!194I!>iK+5@#y@2PDR;_8mIl=2O-9KAg*`Eccrl^7_8Ko z*3A_7-+(Bm*gMWl5$+vQQ#WbCV=T{AsSp31{DhQ)Rf612(1G2uw|z?#OeAJVHX=IA zZ(d~3GjqR+jicEEyAshXGepr|-Hx1YgIHAm+@o*QisOKa%SOSz~$e={}7XO)7#F`Yb8 zqyj0Z{J;sr6~U7dXpc$}3n*#Jsn)_Y+-bO1JE+I(N%JrQBKTDsvpwr8tR)(dYF;~? ztss;+qf3c*gWG1gJU7+K7<)2O;$%7yDJ17<6m}F*fzY4y9(Y{Lwg-bf-zxM<#O9fg z?&~>9TbQakrK}cP+8SeNPCYwCOIug!rQx?&y!9|>^Heew&;H`ky_x%vu-Z<#{)z`Q zX!Blf%s#t^sS6ho3#hU#ApL8@reKmYNgD_aDJJ27BhR+&=zIk63_}9C3OOt z(8C7Hpi9o7Aot+3j*dO86%`6B*UkKF@qksN{KqP$u!OpMfrjb0{UqLn4FUmze8Q1N+g!#r=+rqFF_Lt@qBFAYw~#DMXo znO3z3e;!}1Xm1u*m8Jny)_8YgcOPis<6;N@=Zlk?Z-HkR&1c!;v}ta<{>zxbZ_lE3 zT06c%q}H8s`v+v9OY~Gi$0^ zC9cV!L3gJ_GipxGo8P}ZxEZV5V4t}#aj{YL>h+7K=o8Op%L0y$wtWm61I*|fsvf)cVIB*RIRTUmcxUQv)32=r8d#Pu;-qAh^%N+iRNTf=FXq0mUNfL^0)_f z!Q;w33T^xB0<%;^w7<90p9XpE5$^hbkV0)tpqwldB;534ZA79;8xLz@>p#>%I(Z@Y zQ#r-$U?E+qk2m6Og45Tl7j(vsV%@kl?Bi4XUM%pShf zl|tk@or9Hz`qTl%m^n4~{+f)fjg%TIHwZ)CuzDx2rIsaCO4&Ez*tJqj03cg!MOTj5 z*q;`>m5ldaWo@7}Jd7)EzX&fW*lQk~3^Zu@209+f}Q_%2;s< zfVwKxyGJRs^gfs3vH1h;E$Kx(loNb$T6<^1VlM55k>x*$?4Jc&@yq=cUV4SnA)fs< zLBeK(F^E8VkH!Rg+8Ris#n$g&GF_uiIHWjrA&$1UEvVMt@0~I#q~ROS zUh^H+>CG#;Czv|Em{2kld`LkF_^^}}SM8R?)sjIk9P3UijK1N+ZJ@TY6#h;R^jOaQGM{ZCp$`PL$`^Ghzl4H-H*G% zPVgTp^uqzNVMAbT&Glv<-nXy3u73a1%HZ>DM@g_b0gTq({KgJUTyZmtz4GFt=wvyp zh^>jxdD|MFq$r{{-xb|?e>uU(Zse0C$d0vgp;)w)r<h?FCLU3fL^$rcwsBuE=`F zc)0xi^H#$>SRKTH%gen;WR%qTsj+!Y2Q_Kb`(>a^XfNgXBP~0KrfeXW{tYedcf6p} zY}G72!li9Yld5IvF%YTy^DK1I_{mj6|9Punwfi2@A(JmOap#v>pBTT5Sf5z&ob?^x zzu}a8IkL-lFt zr9#7}t5}}|X+a2eLsf%4WyKFWXiy%eY9)e|(xU6#G|w2r{KJ zP&+uEdfEYAw4${Ior}8l$;FT1sl*d1klB4Q`&VX*U4A1;V@^k@w2v>t_3+}5YDovK zkt(y)kgU2`5$JZ^@eKiUc6E#|5Y)l~^eZzIUxR^OX{dbcl1K;Aey*k}Wn)GDNt*cm zNL|2EmL`xXrVR-V$a2iKIm^*%dhYN*FNGH*AbKvA{VUZmgB*J?<N3u{-<)U-Wtj+lxNjHm3u6!Am04*XPYkSHUkCE63Lsqk`wOnzy}&pe zj3Q5!>8RO2UC(s{Lf zb*#HAuvF(V;rn?NWn`vijU&Gs4+h_eJC$-HNM%rqV;liTgU0#7E+FFtBp;8&<^nU7(gsnZ|kk;z%teKrcwW z7?h}t>Wn=@g6PPd40TmC5afvBY<9~i;o8-#?=d93YbcZ#uBdM{|M(Niq{YVIjPE@9 z0}{E&tZqnFmn9VI!!1xS9p#M+G zPHXF%dX^5^$hSX|*LYz|?R{Go{{#Bc23~Xc6H}3KzK*R8!d;6Eq{!yI8DEHW4vEp5 zFB5!511$RnZ3RHSJdPN?a!J4KfppUe`2 zlANjdqhGU6$mvxf;#XN#CojO~+`b)jQ1hR59avx7Hf{TC+eI+#lDQJh+SjVKotm}x zhBbG#JYNwJL7D(4Y;>>ObN%27?PtLF6yF>4EBg2>v!1r`Jj1^%Nf>o&eRKazPMYf4 za}~ie(`s%t_JIL6HE2E!9zMSTd%dIko@>){;($J*IQgZQPw;e?hk+enoT+ zZyQ{pK7X^GC0=j%e5Q9W0e;**Yqt2wSF@bM^Z3i+^$ItwpC`9DA28(DOJ>iE4V};< zN9cY!a!4V0%X5@c6IqmbG3B7;+`F!)Cp1*y0YfEHy`!KILPe*u zu)WDc(~?2jHfU|?Gj%{TFjLXchiz4Bblx;pyf`qsHO2BZ(Ub0%XK!(7Z9`k~x6%dz z;ORrb0zx{8fDmm~0*(m}2>~*35L1<~U4>uyZc$NfD+v%q$`#F2?>*-r;) zm)~}Wh4M^xIH4!UQ@OP-ZlG6d8_B%dz90UA`QcYMh(76?5rpayu z$B!zbF^1OfZ3Rgv-dGzA3dyGapm zBLaKGdCU4`txJu~q*K0C7HPY{3tdL;+oYbyh6* zz~T#8U%8{jVBB0|>I%(lMwSYc=rQ)g8j*&NnOmELw z8>wtdX3Tst46JqaWI3drvgj{Y=DTfo+lc%YifY)A8}K56PW|cqnU&w{ZiJl?lATAm zFHqV`Ew?ehrMz6Pmo%@dba|}`DOXR@O&*ik`aWzxGHq^f*a!;6_8cN1N>vUwXY(^L zHLImzE?jrE;SY4rRXH|p9`Q!B1no6gQ+l7^XRJ4TN}IhrN8CTiYQ8c&v_(k>dB%<% zkVN^6mNaJ#rpegwI?%~EGtUcfsWj5><=CPU`BWc8@HbY{>kVyuaU2^tJ@_woNt*E! zT>OL$l#zKdf=U>!2w?pboH~%aX9*FG{uLanIe#iNfc&WkEtnNo%@L*;5Wc6G{!I}X zlh`dKTDF;AkJtMbAt{#e;mk!`vGUO8VW&gOxLCWCI75nQBR<^H{t~+bwZ6ilYnG~;|@7+f@7plI2a|>@0GJ{71C|P=@lpes7D>z zRg?2b(Q>pDTyMWMq7`DU4bLQBSA1_M0RAW=`~Z><${kZw>mp{)aK%$cjM3WH zvDs-O4{#;s3)8###W>;AKzx;oLhYHLTygw6r%GqH8f=M}xuiYhwji*a zMqH}oFKk@Zni8-w|8!ECx$->s$dei(fmulo1+k9OiEU}5>mHS9Vp;Q$?ya3y z1=M48sgo0+2%DARKu?35o1~z8LVKwvJ?3y<(TJHlyf)w~_6%;o$j$u%3%RG~qygo2 zmQrc-FQu|IVAct3^ZcXWR0$cP-_@x~fVhc6rT1>9`$Hp20 zjMhiWEOgh-@vuZ}b8ngDYe!G7yphBTu#PhqOPnTK$6llM96W!j-V?*UdXbNlM2DtO znHNWbl^fwWq$4daMQ2C7&}TXsvG4q-z_lv6_+9}qbFp6B(qzM9J@-!gxAbNfn(XqG zKolxFY)wF7rgI<%V&+wCw(&7-$?-}QGE}>BvE30aB$n1;i=k2Gy9<91@+x)t4OcpS zX&Cm1^X)3jry+Cx^v>!d!6d}CuDfL^%sp^`qPFz{8Y_A4wyCaS)O(#1YE;)0#ihb& zkT&_^CY^4yg58xf(b0$cvKUE@&k%@EKrx4;#-!b|Q+hO_ zd~SqgK%;xAj+?eX^O}?x`5PyyA|Hg7>n~5!J5jWXZ?wc6eG^!yMRSJ(bjut<^o?>x z^8#$^%|m70>$n}2G3qFdAw`Sa?9$%-hFKQx#rGoyCoT_U6Ry?)7KV}i?_hiW4Ul3p zK&pRwoJ?v)2t#tg!Mf$dMIi0Uq{d-Hx;sa&xUZ4rSTW4~c_EO-v~JtI^~rXeN56RY zwPwUrbZo_U0q8qV4dwK=t8Xw>>mouO(T80yUH8pBvAmQSx{(S=(Yn;RZOH;x(99v1 z{Tig&{dQQxwwf{)#!xoAvroM0R}{#VZ5P`R))EDqGTNko^trO=h47&pUZ5n@{2t&c zzE}{vaa5T~Vx=mA$sNqBRZd$LY)gpvZ#CarlqGfoWKZ6?<>safzECw@j=L&wqBw^+ z0#zn8VJV|LU7kJy^`pum8*XJ-75Ns@nt%O#Ufzlq)PS zL$hUIVs_;>8QaPt+1l@b4)6OBu%{j^uvddDUMpx?ZnFG?zReMtmpe&KPdZT4Mzhir zvyOkDW{@-}wKuJCWwRHXYqj#9x&*M;G!l8US=6eRtp}_tf&vYf>wU z#87mG9SFsqRr6bV;+gS+97Ht^5Ms%W;%jHV%b~PI;sXO=FzCB~P-8&cmiM-CmRD4bilYQ~-Sp|lw5e~hKw;~CdZDTn2XJ&@igRD)doVJ{45)VT(E>n1Vngp-Tw$t>4 z0K=W}XI=Ra!OB-Iu-(Zw);}h7Vh%q)h{z`yW?OkEe(H4y79L}B2zBFmL@W<#*&}`( z;y8<>Vr+!Gu`0-+WhsHfc6h8&7z-Qj{(L*^?DMGYWPfl5V%}|B_=0<4=)`$p$J306 z=+kYGOPKMq>HO3Rynr+TV51!m`A@CexBVqg-=!f$+a5^CXr1+df!r6il?DA6PKwcj z*`0pj;UIraY0qlSNnZzU4z9h(T+obHR# zLf=o>6uwS5Ud|H_E+q9a@5V|bd=LVyih5Q=qfQLQjfO?GbB1T`woMH2*Jqa7vDWdA z>XEeT^-nwQjNoWfSKBRKVU|2X=I4MnsZ8R3QH!n4X?-SvX^Ci%9bvv`e(ExM_(r() zI(8%fZS*S2X)RZO6}I+tfVq_TE2Hb^FC{U);&xoMgh??MUz&GHtW}MzL8JPp1slqs z)>2QG`Tj-^pUYmEP2+nFzaw|VE@`c&eGe-`EwwL%Vfi7o=(3jWl%9=eUabc|W))Wb zy}(T5T3flctKcmUxb(Qmwf4V7rQFoku>#JB)g^wWe7qmMht}Z@+@D;wju!%^6prXc z{hlHd(MRjvfm`-J3rTx0hP54CAGKoCZM^1?+Lr$v zz4AVGz|Yo_Ye9;w6XvqcwLOV*pR(sNyxg|h6d`G2xeuu{u6N2ziB4nx40cyqLcR5V z8ei%xuKc=EusP)8A|6F7w(AkosD%WxLL_P;%ybrz{t$zG<4v*{`OX1m?cP(Zn&?||q@kRW*i|FJCklI0<9D|?Ug zbQye}=wbl-eu@PIoYX24sR0+@G}2+i9b?R+noX5q^&Z zS8vlBjYSrS^k9nvwx1&f$Di$p%uSajV_9`I^NK}$4aIYe>&sy_eJm^h+=)i|f!R2Z z(cK-jf-_{V7h^w)PCpLsne~WOJLl$0o~$4Fr$vg1xj{@gcPjTS z5TjT#?dvV`Yi-WJ1habof4ISD+EvWuGI7TG^N_|io!O^t)mW|z2mkk zeZ|%&?wGYY?4!?SBa2pjj|1l7j0U42xYo_NY10PA0nX*zUB875?J#u9U z?4I(kYb}U=HtI5M_RM*s9X;Er6lpqurR%B4?`FZu^UO#$)T}FOxEEX^eOB&jPV2fF zzun(wO-MVe5;EGn9Qphc^Rh@f2il+U4V9;i>*V+WXF^rh@m)C?bd= zNEZY|L=+H|DqR7Qj!N$$y@b$vQxF6M6r?w$_ud0Use&}=EkwG30MZE|uoK+>o;`cc zp8c|CKkWJ4FP9{jnS1Zdyzk62@B2LL4O4H%2IGq8qQUChR&*%69aY;j`_}3K|70V( zAnP_hxUUpTE~gWo1LlKpA~BcZ{!hSFeu+2OY33i@Mzyeo5TX{(p|buSXE#pDaC0ifjlbR`kKSe}J3eX5J;+6$hoeNPyUS^b`n|Qkk9LRV2o*rvrSlZ$8hs z=uzXes2ICb#ZARV5xX^4<87ncX;712i7C8PT#xUR6z#ATNyGhi{X`ZB>Q;VH8ZE2dlL?Z!!e8ADR{>E>YZTZT?CC{mEVPJjpkc&j3c+h3kH&f_2Shk0R zhb`4i^|{Et9;W`HUaVNrj=%QpL+TR9rPG&gPPSyXZr=Gn1_J&Wleb}#ZLX~TN}UJx z8lkg)8oxPEDUF7=VJI9k71aYz0$*Pm+_|2 znu*I$+>4x-l;?eMj_NvS3mV^w=^g!Cx;|cYvZVo}S4{ud9^V-cI6eeahE_kT0jM@1sa28jp7Xy7YX-o6 z$Pm@1K;qvJz>N$z4Jjc6fQ@kAuMoHmccuU9GvPZw!0A9ZpB;mQDc*m5fcTyJfBe(_ z8A)MfotdSmmV3qccMnqYF@yHTykzxV59fwz-pW~^=kdF~UB(Cx;_rSvPZDSorpk&woZdmx^>EBZ^M>9t|bIbO;6 zp2_|YJFrXRASvZjC;w%tQRMT&6TU)(y-I|??c2p-ud(v)?r|QLMDCHXy4r94XKXHF z_;u|xI~wohLBy#>TsgVuZt)mATzO<*9|thxTs?LX&-lGXQ0Vrz-lvTE%X zWCK9tsV2C~@-B66pY&poJ=E78U&_ypp7@-3Hx^IVzgNtpXHr1(R!r;8^5IQJ z0+8{DWmRe0^Cls?;pNAx@nZ`MN@kQw04F?SZ@}5qcCI4iEqln;9v7}yL?|%-es7^7 z^|1=bIGdPl_jl>K{KHUcS^A3z$e-t6kRPD+OrTanwFBHC{H4m%MqfQr1`)CbH0%w-^SA7mBhJqB7fK%Chwy-6ZA%qBoM|4c5)l1Y=UCL!KR zba_}mYRJ^R-lalrJ-&&DZfK5$BExGvqSWjOs=Plln+P)TiqI{~=Pu2EcsyK@9-Z7! zsoV3h-b0FwCOk(fF@7S9FjtM?4=sn3NaydAAAUfG0_-}rcirylSm>GF*pYCVPhc=` z({&$EZ@wT%72lAd@s@RgN3Dh-7uY7_)dXd0*yZ`OPT;r(A!3gV>LV|$~?0o$*B4htATu9v`; zcC?^-jwv0v(AXP(Za)=ygKA~I8WZbcXDIK!&8fWu;?WP!kWNH0?m|UqU8ybsPX4DI zK1N=%kLA}Z9+9wG5C@t>J|x_hY%z=qX7knqZP%rNoYV~i;sIZ?mZ(DL#5N`PWP zxm0qksD*r{5zbk$=WlHU22=N)pO@3BC>?5AZUoKaWbGMLNd*Ebma?Vr{ukb76)?-ed6%y z)CJ*!NSCKZCB7oi`48gv_3N6aqIvWc>JDhuHyY^zs>2fco-)Ty@gO9Go~-W*s^9g% z&U+!w+dONX52ZfSukoR_46~xr^@Xk1cdX~W=EA*o_47bB zH#;j%o(#k@G-7|UJGl$SvW~0mM#+T&!ZteM2UL4hk0Q=n8abpWt{Cz+8~t)*tE27P z>qBm>_G!PxN4eEF*PB*!%sNtaaZNV08(h42;{A!P<6pY@!FhvbunqVIY`5$t$%r)p zrFHE&K+>3+*5?ce4R`pi8xQjVR$F)GkGCSO&kkmYKe`6?2$2>MFqRyX8;cfi20&_f z<~4aCg~$y}f~-xAVP(T%MaA-qf63;#y1d#i2Sn?dbB*pLg*oMT3K$DAEB9Iwz`H(# zLcxRK$f6ZOtf5%7bmUnAvyB9W?z&JeZ!4oiiH|-ZeA4qhms<7PMK9BlGx3`ohT%To zE?dElSr+!qna74d>SKe-QMs0gZ?|YO#=7Xj!dmWGxjGs2H780WM6W z;Bx{r*$BNg^1YzhwjH{#M^12J>>%`wpdN1I1ThJKO zdGT6Y78OU<3n*E6R?@5{cg$J|6VK&Dw+5{NOqEzY_mE-Lk}IZ3lpdzqQ*YnPX`PO) z(n}lM2+>OY&FFFA>Ai?{_hX5WBx|%R4EuY3HH?}w3a{Z|NdpoqZ_)%tK~EBwrBOWs za<{8lCq|yJ_nAyoq;z5!+(RGbY<(LOlk+pC5@O2h&fHJ640yp)TAxatkBqQ2c^$$m zK8zXBPJejeF6s($!N=Z13nq+y93J{);osuJ$8 zreTT6;#qy!me?PE5BC0T30SA_nmZS3hze?Z#jfU(=j~svowMo+!o)WESuW$G!h>^@ z5AC)v5sh11V`$6i-K6Z4j=bDUrPvm?m!1tzCLF!!%i%)S!_qJV64k4wFQ;L(VyR<- z*M-LUXXToZ+JZW}L;H;pZ;c!nH>B(*nmT@Zz6i~Bx5lR?-;BkK3DN9>m{h4sYabCC zEnc${`?SdxF}t_s*1Tf*9`V=b(qt#K4QYrX%JmYUA`_n1OJ65o+(*9)EA&5|%I#X3 zTP{7c@3jaW8#!;MhYR*3B3LZ5_Zq31t3oXWBX$*RMbzMt>*Fq9u0+WdUan z-V*e|Zb_h{GsQY5B>f@fB$1WCH96G#?pMo>oYd!E`0_G?8;c5X%?E(;G2W|k@n3TJ zpq}WVFI%MJ9zTUS0Sanr|rr3w2CGP;G z?Fc>P&b`F;O{#8Nk~W3vv;`nukW}Wn+?9*4bsH679wnn#{!tLOYDyc3OALQ?H=( z!*lzs?Ns}>HEvJX&X;w##9~^K@8NQz-zO`FxN$3?skym}*TBfE zV9(LGj@s-+>(?f8nBC^8o+1-scdxM8YuZketisifhvN#0ziu^8k&`9WdwxjhehQ79 z(ps}ZUP!X=xGUAe!(U5*u-UsF#&jU`pl!3?ZrIc*pz-FSgAy)y$Jl z(kP+%~+W zvb%iiW4UCZbYTONT5Jl3CUh$2?brOiO6{tgcjjYpg8t8Nx*J!kIA7RLwK-W{&i%32 zZF0a2gFi}=@3O!7MPJVrHZo!E$tjP+KHDLCv1iJYiz4ksijuQk#YzViZTj@Tt`oJ0 z1alY=Bg)rTP$oRaJ6r^$d!oh}6O~h7cp4=TAu81GmBE9TEyFqjfuIp>FSjqKto;og z?XAl8{^h|Y|M7rN&x^tkbOXd0eV?Jjc|-jSw#BNY+PByF9v1#_M^5DHz4WYqa^cOT zLU&npBqNlKbLGN%rQCR9d`WIMgK6!RV(r^~6JlLh-+{26%ylCki(Z)HI*m2Va%JRB zRf1VScRujHCD$J7Y*oVZ{a(Ut@Wm-NBQTbep{UHGdK!=;57c2^c&}lxSNbqDsMn*} z&%QEWilLZH4+c4=!sm7b=WHqDV2m)TjC`5>i5$o+1AMJ>x4GX%;1HWuMI~kr|F^&n&0V>57sagU4ZD4fQhFS zN7Yl2PF13D+JCre>^-+p6;ze)AAW;dBZ;5B_Ip?p0v}gxUa~&u5s1_?!kpL%uef%v zT^B>VrqAbly0>fTXi23Sfo@bAw(1~Yrqx;sp->YNAV*JN!9nV#uZZw_$oWaZKtZL0 zyK((LMZcGOB$nNp3$Q;w<9vk9k^KGI8v&9aZQV&EDTgg`*NpvIdS)8eYk0na1&!0( zzPnE){L)*wY%E6>0McwYVN)GNu@S=cF#OR?6W8uqaY7>cx?(b1Wxy7d^~$Ocz^z5G z3_$ZC2TZxCS%k0E>I_6X5%~KWcRj#bQy{s*hON#XMYPB{nns7j|FAOomgroR;fjHq zA4v?_4mPa;_SrGBL;$>i$u5vR*8OGnQ2EDPK5vC5!rX}5%#n!lU-Tk^$TTG5yA}wO zJaA=U$J^fdDOVnF&uU>bM+oO|S9+Ae(w4`|{ZZa!8V@KNURjz?cNMio9NFR-mM4uwdsL3ADG=y95Y3FE#HawCyV2VI z-I7O9lHp^D))k+(YR}08`Uw31x`4dzEvke-Hg4y;(>OBmM@21pcavP|_>peozbXQ2 zdP*EP-fYI8=O4?yHQ2x2M7w!n-78)c1M&e7pu?>(0H#kFZ-je)Lie%|Z`7(W!-Bv_ zV#l}O3fJe`@N-F`vg6NruP3}Q@K_!pi55kgljKF_OAL`;7*QrEX{^c(8#DI|DP!f; z=P}e2Ij`|7z0{ZS2dcu^;c4JHi{Ewv-)dYG;U)Gfi)=mzMb#kqwD?c`oeAN7Yf*ig&p`Q|X>~*|z_s;e2m3 zUb@cwX%PEK+*UAEVV&o$y^kvo1ZDBSmj zlz|c9d*i0x^skKX}h2z}&`0^NeZN{70Ld$l(6nq5J%l5XrB zofk#yH;nJm7TN}29xHGD{r0D^U)^RpzVyh=LktdYAzNuJnhd5?lKtkK6yMX{+->h+ z6=2ki?NG6V+pQQ5XdzD~-o($&?GAp$2B_a1+bp{W5s}h8!XplgrZ5Q$Ti=sko5xOW z^pxkmo_cv?@v1^N!7Ar|v0wBKO`!=+K+B;4W`7g2W-`ks+n1D;MZ`7^&yirL()BEA zWHJ?l`^MHK_^huVJyHe)IRS`v_fDjVex2mwbn}$FA-kIspTNb8>c0u2K&^mnMJFmmu=d zp2(SJ)NWAuNj8fv^3z}Qkn~VZ&tZ8{$Y~%%<9eiUphPC8srR?aqa_db*j?;%h(#Zr zSEAHVbLPvRUw1!8-jIkdB1h6mmI`gUa%A}Lr0wD;DE2h{;zQ7wjZ$h!p&AuBKVu~~ z2`;|!t?%(>rcd3x#90}XC|2kc+Y*lDU#u>62az1yZ;?_*CKL@hgXq#czWwIcvlibA zX#^%GFI~uC{k()5K=7+9`Ho9{9iBwiWP}2BDoqdfIoeaYfH$;OZ=+L-)1(lqI?I(T zx_etKhp*)bo#00(Hb%O9Y|IM67*Zb7Io443AdWolmPZn@v;c z-yi&^jV;Mi`?R?sG8T#%rLk**-PK1Qm5L-1xFXag`133+o}n;o1GrHH_T*FR#N(X& z)A-qaTGeOCsk*i|2ESRG3Vxp6oT~eiz2|IE)T4hp?Q1IIXXSHrb*#22g(Z4=H4rD| z&8Wg~TtrQS1UJ-l-G?VvA%{*0)ii#j(zp7@K+$x#zhzh!J&4ls$GdLaSFso#IJ#NEeY(ArTv@WD}`FZ*yliAtr zp>*1WXnW)&GWI9=0QOha52q5pJnp02lxHr>e@31UV7zaFo1?k9so%Xb^h<|6zLr@9**Pv&>neLbCMSlohG;x?TNm9;~>*K~Lak zN$gisU1zkpml0lW&6XC|+$1vJ=8I<4wObQ_kl)g}OB1s}L|%rzETn_-zKg!<^ed8C z!R;OK_5wB!8tH z5k)Gwkij+ajDeLMY*kqXp97)*dz1~o__#Ysz-k-Y^eOJ!l&X5F;lB45NV)q$^ubj= zAjuc`(lFR6$AwtCI#d^-qoQ)9%uEypdF9`#^lJUiosi)*gzW=JFU-EY$~}yVH@O?r zmNfI4gMIu-F^c;Rr&{nngZM>ALVTa12c-Uj>prbKcpS*)1+$=n(i6(o6Veq@lhr#_ zNCR&dwrmmS^PZ$^vzxRfN!N(00Uu+5UbonDLK^)0q~tirmd3w~`1kJt`zJWwzx}^< zj(PuqSs~|TkeQ+fI~}lunuWL@u3J;PpBy-W{0*eO`V#losfkxyLXXm2!L};|GW$uG z6L~KG&y2xcjA}JWEH$T$wfd37=W0uNvZQQrLd>G$7Q;UTUhavmqFm?I?3eJW;U&AE zj_^oe7b+*uM|DK*?bLLUXcTN<%=bnsm8#-8t!SsOi+@ytqWj(rqC{8NMpZ{Q>qZ99 z?8pu#WeX0@DrDR<%{|+Gq7~_H&X+IuEAmMe7KSgq?h;28q#yJPAxqZ>Fw~lE8AHs4 z?ocX8a3AHitxEImn7I)F@yk!mh#FLs?=i1Uh*HX&3J9(BeUKbx^=|a24S8-DmAlhE zh@NN93z2~-tA3bpyCp6(ZSUl=GAlE;`&=yeO@++Y=XY=P3DT!Xek`;te9pl1XR}y+ ztFn#SZlE$i=j(7I`O(^G33j|f&+bH#Vf!nMWc z*u%?Na3v8HR&?oxJ1G4}b@tT}VS$opE-{eG_ggIQdPS%x%C{2;V^)00CXrI%%fehBU^97=hoio(j7$c#N{ zY#!Q|Bnu2yfUiU_Wo`y*_q#We2OuYPe^&|^HMmQNQ4K~+ZRg2+<-nP+ChN{xa z73jgBCCik}%_0fk29o;s72oeSH>7_pm?Yu#ZrY=e8XQFl2x>*WmL*L)u(DLw7ZcYK zWhzPfRPm;~tjN9G-22aSbVc6*E6eI7JWfp$Iw?Tu%s#-!4g*oxkVIKs#hLxhV!6Pt z4bW$#ZPe6k!IWZ~H%PU-4Bam1H8oXjzdyE?t~@Y2gwU`4dQ~U|H9YoQWAXPD9xBnwn4@#U+#sE6mvcLunAN^y zoHFemJo&4*Ll(AnsUd+*5}=ciKw?tvFnwc++@ z1yPok;?hu;0rc22fOH+|Qq}Wd>m~1QbnF^TBa~<;&CXh`8%(%TMZ{%6vjeVwy`~cq0$rl>vxl694?LT`494nCqS{l+g$9vHT@&;Y_avZplVh%DAj zdgybk%bVus=+0pJuw9}))wkTu%0u1nhMpt>z5}fJ49;_1q0`G&jyKb6<+1Jne^i=J zUWm7IvjLaal2UZt>7WF=&0FO21@g6?< zAar+-D6PYYFYEd8(6cmAF)>O|br1Cfz2xizsYIomM&*0vF3W?@z3c08_!C{ld~S*} z%UJi_=+CudOzL&_*w3p96(~v*Zc*zZ+*y+bDYuP%U!6x@s7pgKae*x9R~MKmR|;=w zriP)TrMZs99}5c{m|9fGm{@%HSF(u?m)1SO$+PJTsG9;U@zAyLh;655bT+_z)C+Z9 z3>Qv4@$<#+`7VPjA%T|+%oF2Udf}X(?^KJRq+bHJD=x}@z@$ysiZr}kAoKO3E337q zs!4vs(U`2F2cYlx(u2A68S5*RF_7Vh_jSUl_KkBQg&<1(siLt1E0Kh-aR?;y=KuC= z&O9?FacV)hN>|F1W9@f^xYG3{Xy06gc+3VsX&{iud;c6R_|yK2j2@_IWyi`y+GNIG zV`op%gFG+>D99}Cc(I@HM+e>|{r~)Iuhs}SOdyf#K71{jO^$2(P{jjc2xH2pv)$vz zG{OJNd4%xp8Qa{VL#wbg!p?3Qhuxg9Qh=-bTq_V~2@Kh7mpkk%pp-eB7<2S67-~Kj zxOdiE))ML}!9;w)o6U4GqyhL?%C}8afy!@y(+8EWFy$#JT9D59l*NVUo`0MrPzl_> z^_4QoL{|#1b8czQ0j$6R78JSWmh-dDm|m(@2z~Y3H}4083+v@1{KSGOr5ZWV{)RLi zWO17xh~3qsYVjym9^>OhdiC|mPp$!wiN}OH{QdGT#$4fafKtYDUZ)LVt=cF;-+_d7 zTYlEMK5zK)DMY(N(2|7HUn(TAS%W*SKL$RR((Uvpd-^UNa?d3V?=c17658O$-t?^ptj!DK{--6B zGNehkn2jrtqThLd?r-p0`tghU1N7g0mA=l`#-hi+{uZ4+LZ3hbw{%+H(65R}xhiLx zF0CVWkka*QA-4yVy0JSj4kR&Tto`ghU-YGD(k)rFHmiqn27Gp-<*Vynv(CGWGzwT>Fl8%kN~@6S+$M^YEx67~=0IbP`;>nsZn1E3t$3lc{2+)e z_-_Y&+it^9rzRKL;eOjG6TX!wC_qqub!U=#z><|wa_=3C)esgPKW#4ZV;fvku z(FyQg<&Hk-S}2>vm5rNit^_>VhEv=)?oq$|PG(M;(~k?H`<~Eu9|w88zI8lZn7!Ru z)Vc$2ja&M}pyNO`ut_FjtkvlKm2v|&{I^|hD9u>9quhCuq%Cr@(4L1z8}(bv^P~z< z>F0jIqHAoOIl8Zk=HPO)c1%yDOIGx<|MN!Qxh2kh9gaAQvXgEdMAJS?%l597cfjVK z0V+vl2u~1U%4&UEJ>%*D-lqO}|8g>lHXY7cZDizN<`>JDil9x^CVd520)&XYKZ# zmtE>ZiSF5gM%+aFohiyu72o&W7nv%9k+B8a!CGKnHd|^oj$rD(C5AU#y4w6Qj;s_j z$RKptmS+{0+K>&*&`KTG{2dl)tMeF*E+`&zcRSoKb?mBeT#uVeg17P+rn`>wnbw{5 zdz}6*Ws0vK-W%6UFHWgF9;R&3@LQWioX-8siP(g%6P@O-#@oTBDmypui%SVT!vl4v zI~3I+j>2f_i{E4jmI}1`0PWl)ymg>|{B~Rwcdurd;4rAL3f=}UBi(Gcg;vwB(m?Lj z^{3u5Y1*eRH8wSa81kzSfi(c*fKM5j2m)@Yj1>HxRwwI$1xJ&Dga<@A)NLcG#2m_TwJ!EiXj|t(YQ?OOn zf<>Lk4y{}cB)&v&Ta0bk%X0>NIw#HPWix^&N)|ZmWknBHw^F<#Dqx^`90qNK!v?l> zh!9H~>PUPk*1;2Hlz|({lPbOmdA!S04o3GNgRvj`40aC-TlFt=_L@h%JHQ8J;~Kei zssoOGP;Z{jXUUo{;lC@dB-hpiEG{7S(9r>Zmb#AOk?oPJ__AlM(q6k1h;2;4uv=un zZ|dmNN#+56v%`WpEmdxk-6mr3WivO#6bq@39a@m=XVWB?8Z4rxuxwt!PpmP_7ej6 zALq06=_^$SWXpXt&@|;}C766d9cRt=pYwm-!+-k2f5yXq?7;skJ78L%$@SltW#+(p zP|9${otbqAL@_yua9ooAXUV%89p_qmdwVSsjPCqfGRAx!SV8&Ypu^t?_+4Ny?MfhQ zs#@`1aO~|nXUM50mjz+H0X$N+2+CI=W5C28yY%0ELzt+?#tajzslsb#xi3Y8i2(n7 z6IbAhQ);{n(QLvlr%0b-2V_Y*Oyk&s0U}CR=?WWi#c|P)Ju$vS895H7&zQ+7wqWkE zrpiaf{AZ8hx$;r-eEUtX<`oB}H@FmNuJ&-3r7~+@KbuwDapw(8}|ucI))kab6K6TTKeW~y-<{>Cw!;)R86i_=GEK(2LB8m^#A|> literal 0 HcmV?d00001 diff --git a/HybridRAG/assets/img/hybridrag_retriever_architecture.png b/HybridRAG/assets/img/hybridrag_retriever_architecture.png new file mode 100644 index 0000000000000000000000000000000000000000..b4dc2466fc1cb36a4265f3e65c19da767bbd963a GIT binary patch literal 99359 zcmce-g;U#M(={AIaEIa!r4)za7NEto#a)ZLTY|PgimX`Rj;|meW*Vtrz zZI9O#Pywob`U@R1)AmQ`qmg}O{T0}qYrOHQai#-Z|NlEX(R{1+w*PZ7FbzHZQphI{){_ zBAF;D9ei_Zc_3S~?~*X^isb*VSzR-K{)j}Q#}2G5lAgJ~_4W^e!Q)Lm{k|PJ(K@(| z?48l$D+doBNodQ#|5!19p+TF3IaZ`C7}iwuY+=n11&Fl;t_|qMs@?9NqOM&sDXt)w z1g=4OXY>RhSRYE^fA*ccbj}DwablTJr4wF53=S2 zC)Rd8o?vkibx7Qi9lSc=zxvsr@!$JIg>Z0`PYhOcy`nmf?d3$OHO?(~saGII!fAw* z#8m`XA(ih5r=61f_*skFdzpd%vkLl_@b;kKIDc@)!8VVRyTtfhGaI4F)dQx)7+fK` zjT8mGXni&C!PI?$eSirWig(ll5q0S%&@K z;%m+91CH)XVguXX{J;Ob^QV*dpS!Oi6jG`32EDhUHN#aPj~Cjf!DBKv?2 z#k=2^8FgY1GnT9u7iPUtE|UpB!)&#=3~IaMItP2PavF~9-$zu6Y{FOV|S1%ee@LsuPU0ZJAhQe2uXbe_}r zH3_ill!qOOUNi6rQ}@b7byIsUM{*kZv$|}dUuAc%tX~Q)cl)0NM)9oNyRaXHT}IQK zi!b{jpp(#hN63{gZGvL0#Q+V;z;)uw&_brSx>cs1FkdEL{i%Rj^S^qC0}*Q!j%vcj z{Ll}LS}%LPIER(CY>ycEVCP^AE_NU?)puu$DGB5Me94T<&2HB|8km|0!=Y>}xvvdK z4Ud(ujUY9K5Ys|*@hk>ZBq1S2Q=@?I`%t`+0K$A~Lz(=b7Q$zi*tM9uL)vt3{MAPXkSsa;?q-b;^jv`(c0&rCx;p1-%gMeC^j^$0 zO}aS+IRibTQ)A7$d-7+SY||)+xilJbCeL~7rv0#)6cc|G{kA}XAY(>2H(v&_;_Orp zWc7c|XNyGwhXR0L#P0$}IsWxAZ*(%gI)^D-!6itm$QU3;bMMUOHf85`#~W)g_*<1r zKJ&dAt(qkn6e@y*V-2Z(FO1i^F#~$cmBIHJ!uA!5q>Z4L<0XTngo+T)*<(SkKTlKRMMVmD91)x-MdM-!AghkABobS2(m<}oItqM3Qy*6jA^dfez!!Mabiz!>CQ7^1 z5*TJ**{Tp7VZF0ycFL7Ld9dAg|Su?23D6T)A1DeJLgd{k?8ETjPo5cfA*D z;N1EKg{utldmvMNrr8nUv`Gt>F`9`MTq=+~o*Rowajy#KCBZOkC}Bpyb|U+xoGNCt zq=t%8yE77hWSiXiln0s})B>1~%~!G)kA77S{+9lEm<2v7WFt2JanOJK)6>1ow>HK1 z{p%iE{;=j-dqad<4*$BQTjKHEQr!SH#{G?ZV_ES$pX4-P!(RB|w z^jS&x(42^b)Iii0e+ys?Dt)+TFZ64 zN+>oBhe`i+tG0YY={7PnuDBGdpomprS3gCkmu z*Vq9*=`(|#Jsnfqnp`($`s5$Dm%^^?x_g+&c+Nx&N)@;r(kbeLn&HdZ*Yl=Nm2xK8F(17U`d_ShvEC@I%P?U@{ZLY%{FzEgO*u zKcSZKxJOFj!>e~k+?`o~@gzT$Dr?%NvEPnQa5H#5$DBF78fwe`!3h;93p!1yV+5sc zxrQs}|8n_|p~j33r_$S*Ku?uI3(m|vgfs?mZNG|W-j)#ju&=1D#^)nJh7Xwv`~nP6 z{r$5pokm&oq4pMVDJ%a553Vh8r$xL zT1rJlikdVDr+z^I+W+G{v037#ovxGwG@S-BYcI}4;TxAvj=*b@U^$cGLjvyr)r{6W@ ziv09K?p{>J)$HDfJ-6a)&faue`={}jmwp_k6)XQ?U*asE{zj<8uA=ATg z{;25e0}XnKC%D}05%(KdT~;*W!_|();d63{mMeOR-5+m4xU!zZGGSB&dsX0uZpX~w zjkP563#RjVUvQG^srD3S|?=c&aa7A1gTTrZD^O(N=; zoSKdTn{uUX7k$+f76W#~i*8+19F}Nt)UywO+Qo&_uFLLhqL8etd+`x;G;Y&EQ2h82NN> zuj>8#l84jcAfx>n>(ftN%~?6U&=1z`3X-@IvVbiSq(wt(@`>%ix@IHzqDvFM!=c zB)7lEu;Ma~YG%`Bpywg*XBX)6zmK@`sfL_;2~D6RPt;VKPpBT&`xD&V;1`VOMc7xg zl1p&tf4=m;HdAsCYn!{N%FF9z#P3|$=v|Las~$fhCQG`vC3o50Hm{uunG*H3nA?wT z+G}V9!K~?AV5CBu@Xtr;oxLBuDfwil_W8|J6n{|dtt4Z_uPH0^8b=cII0>4gj3qiQ zhr#>ksG5Z_&&vNCmz1|w@h;;knX?M~cwVHLUCZ)%W3CRPZu?9bTCnvf-wxW#xgxzv zNkoX2{b$HTGAW%^*jlmP>`F=tZNoU6UoVD+A52Q!AZR2n)@y6TXnEM@<^iG}U#%+ez zI2h7Em?Gd$lZ?S41kvtuxJP@4-fZ})=3tJp+H z2=KHAEXKvcM@6rT^i(Ta_C=Iad9W-B?<3fpI>@22*aLT$2X9;45-fsDu5)9jZW{WY zx54y{76WF)lWT37Q_o;4Nk+jkY&sVEBh5{BXZGAKyYgkrO*mU6+VQLT6k;iJ-`ONY zSuq*v$Bq6=fppEVdNHiJD=Ir1(=3wp!7(+~R6OB&#L<{L-qkJS(gY}-ae6z!7n31`Qr_zEBTy-^ z=jww`o}$sAK|MvinVQNE100Ig2a5VtdBq<0%C-<@V;`D_Lx{s{3$>1+C%+ZD;RiLa zW3cEkYp`x1tR)+w*`t?O|Qn=lN|nEQ`h-Yrbnny;3tqfj1uq#_bXg#+0`VI zxU*sU=d|Cp#yi&&1ec}C=hf)=MM)W%)qYF~E`=SX`#yg`Be>#9iW*B3DpRVn_SwFo ze^!CW6}C&@fml~sge*K(xm{9xW1|gjT!zxJ%~bTp*6x_kmo@9Q+KN_m-gNFx)-wmh zcZ?*9C7y*Q1Jc$8O%QfI-)MdyD?i;U0T&1$H_RG~+#Pxi2Yt7<;ceYq0*+#L(P1_PP7CRmHgJTjANe3ODK3|Iiy!FFe*Q^y}r1Gws z56)$>@TfxrlYH?XS&dW>TrT7Exo_XT#cyeS;pjWj#4n6M;y+&1LDX7Dv-yvM5E(O;PzV~tm z-n=sOo3r$3x?;4MDCT|epAq-Dxolj;c=7$+IZ0EYrY&TyQo>H+lyCXI3F+$}_fx6% zf~Ku2|6grytS1BZg$=#7JmF459!LoJMZFUCOrI-XKHDV{!IfKy$)Za#s-HiL5D#cY zZ*RatMDZ|>3{n`pma8izkukW>&(PyH6B#d5gn?RBT3S82f@Y9|}edN(^thdu6}RB;BtjnVSA=I4YXS!C_be zAG-Sg%C@8&G3WrMT_4L#2sm=90uZBW``_QCHO!I_VP#E@ruej69YvC^1fZ$Rd-ug8 z`+(f=7?!g>OfO0F+SASq;PG1J8a0krn=Z-bQQZ+^=~T!pHA+bD&}3N*HZq>xkls(C ziy`I?B0FFOd20vVE7>Ps%&L^Cs@zSm_xYVdeA5IO!2HDLNt|-$+T}b~h*O)DV#*;5 z7{UYh3EyBh*(L$^I7tam^o7PFRzltS$7FA(mGa}A?yNwJC9@}f?A(X~R{uEz{!w4a zkEF~z@E8g7Bbdt7W^Y5s(N}qz=p46)eoI{V27UW6XM21{Go^bmYi;;vs3?-C=>3cTLmSQCA(E<61EEmhaRw$yZ4$6Xo^6@r6Z@IxI$EUQ0xvs-&(h5LmozfiSf_ zEX&D@!R&X3#$!H*3`$tJQw&uVGiKEBJhr}Ruua?$aLgCxYP*LUp4t6`nl;P`Gv&J; zpL(e}84w`G1heq;Yc0|2+D|bUyieHvx}nK_`X0ShzmY;$L!5KmM^#lyqy5 z^v0zE{!SN(U${$Ur#2o-wL8q!>yL}WN}Jy%wdBoHZb=@-u6GoHEO1&<(=SZEQbNJP zZQvS)31}H$OTS(yaVED~fq$Hb-R5%!Ls9YDJ7_-0W%`lCeilaAordxdQf-DmjYmZ< zBc3aZI?@Oc8~U3Pm=Q!7yYK+GX^wlOfzwmQB?-xSlH|liMQfjPXG0$fS^CZk; zf8bSe02(K_ju81r(tFl#Z#PMst`%vcCA(OlZbd<%+i0bRvYl@PD&uyjI=9!?&ZXMJ zqF$dnwezn&esp?qUAway_Y@>`ocRCJ<<<8aGI`o_*KE#1qT!6D@vofO&{w3a{)bfh zrsp4^HI9jb8FUC`QqM)cP+U>D+uAey9Xz^u4p@3B{X#QgdU_<+941|Oca_#^rWbk_ zE!ul`4>0JmW)r<-VIzEau=5H^CWhFmgGGmN73wX2H!03dDB` z#P?E@3_-ur0%GcCue=zT!%LfBg1%Tx%XYp(H2JE?97!ZW!SXvRkC)TFPZ9uW;(vky z`S7)>yx_JKO4=aEtmzTDVl|c7QQvW!yfzC-aLsh)o?}F(#&bazifiQ~e!GW=>=zqipSN1YdYUiGjWzk7KXxqE&~xqIH9wu{&{*hNem>|S3jeBTn? zedtlzh4*RLoSS;qRT?87(fZ)43~Niai=OFk5>T=2w!P#P=t7t9z-g(fw+XQdw07Vp!AkCpvh(WROF?_5rnV?TzY{w9f!Jr!i+4RUfffI`PN>s# zy`bMe4!tUcGNc%HkSyOQfkE%@=CJX{EAWZfm}`b})Xkc7VPj02R<8 zqq{NZP$7Gl4_g4XgtKyC9HBBbMOefA?D-MAmC7v%T}t_haWc2aJP8s z9H!Tj86!1$gi{KC`YEr1-E%;&>m|GiAr%8ob8ZhT91f|5pN4QI;kLfvF!Ca8Bh+n5 zcWQzf=jG+ovFye-Y-nXynGSMaJXRsKO!|>H4r|Nhwle-Y>`qPQH>FE|hseim##MWJ znIZLAI3$n?c8UHl_hqc}5iufmH?aAzF9vz4U?%@67-sJYt!GG(Ftys#=)!vSev`$< zS}}@W7>iBdxC`tIz?c)B%}R928^Yh^tyz;F?y5*8>%;stk1|M|BD`+sDV7$$I(ewWSP!@sNe zJ7fvdE)Rn8ng|JC&wKXHy6t!iems{qK$$OBMRVX}cA(MoZZ&lknj}I>QyRh;T(Gp= z`KpHTeyKa%FAW6L7M9yz(*Pw&wvH!7ealmRE)0y&YoSp>;K-bi8u!aFoyhcJZvA=; z^^>0^M5z>2$R}w|ry7+w6^Sg3to3-fU$Ore+iU+~TjL$zPR^kVip!qjD?d2+k)MNf z;p&B{b(^HQRR;h9zUmVqu=QjFC!>4IzG!3FL$})MO-e*I;3hX0A7we82JPUhW4My35fCMf9cPwwIAF}jBGpl}X%nl>tz+Ui zgzW-1Xml;GBmiG!L; zch(J-7!dJ0ShFmti;k1Us9Q`TWb2!0&kIMl%m7C5qFr$2(BJra4;O=DDvK+)Jon-5 zc;n5-Nf7+t-0>-IP(H7PkO_d^wSGpQDQSO>x#^xK{*0^&7io@Y6k3R;Ki$12$icZ` z3rM>2Nz{@wfl3csPL5i(ISeTHhPX9wk@I%KF-O@Xf&|9JqIZuBo&&1U-%_f9Y{Gl# zgT_~Lb{isVd?jL5NRb!!K*bBJGtW+7KU++IV}i#EKcD=J?tZHwV;_u=zM*a|k|m#9l@wII=VA&@!PszE0X@m0%}J}FjGKHWvzDvb zxBHo(zW5u&x=HzJ4fiByi|^c= zg^sKps6xEvyk<8uVIR#It9aDiqdHs=uK|#Zz{l}s?WI@`nvTS2U(SDmj0_TNuWS{p zH?2`qy|n-On;_i*c~!nuPg=^y(bjA8nU4F}23OIr5DWgh;JhevIMs z_f|deyS>R=er9tcEVFfX#82X=blG?)PMp7)nsTtlybH!EvyXbUnK$+!@D}86FIy4f z-#TfrAqtSWIMw_Icf`u2o&`U+r=+hGx_F!3z2W0B!Yyaj?!V4x?PBi!wvzS?=(3%K zhH?n4f{7uyZ0GXoWO7&h(VlGh##ls3ks)!s;dm}%kje{#F_{#!;qPj-M2lE`>~-8+ zYo!LiZ&I(a`lDP|af7a}#Tq^xD<0L(me_qj*hTtFpi4gl3kCkd;n9_n7n%Hk72r0v z8vW_Vy0fhLs{iG$@+;om2Onx|$61j74wqyLpku*S=ia)7U|a)=PdVea%~|mTc#r8R z;VbSi`M+xiJJtU>d}5R?zaKTY6&ai{vZ2eW=}G5Ylfyyi!tE>9qd zK&4)+YpZf*Ec;v{{Jk^A^yFV~Q|ALL$`l-pXf*Q%DR+Xz6EFYY671kz0j9 z&{Dp-?Hy}=STR3tp~>oE;k-~c@ZON3JNP2>g?yyen24D!5GSO0^hK9*eUDvZ_nvFd ziEB3zb>zp;@Lb2yf!{eED0StUNaZw@pYBVwySStPJJUP$#d=Upn9H5tV}!3G2y&ip zTCnn5^e|b_lzU&NEJSvTEk|bVvyDarAw?2rEREVsu;-0io_uyPT`+1PrIr+GZ1iPz|0QO^XTQm80rsiRzAO4Qg2Ywc! zt8a$Vz8Y2G8e%UB%%|FJlEvMy2`47jhDS(?#>078h&4L0KC|@pn=eo55dxI^XU!N1 ztfS@V*szRQc2t$S;V1p3y@@I?c>{q!5rM-`GT)5o_>FF#OG(vrJvf!V3HgM*mkUCo zAAG#$PydIXo7LpTpx3d{t5~aePP%v;!wFKAa`xY+LZ{wVO8a^gpFRuN{Y5Og!}T$+ zliK#-Ssn;(#JiggoF@?CM#Ic5#>h-i)*dI_&08sT~_= zvV=qL;azDVzM*1P^4Dh>?dkcTf>1(aZqywD&GX@2;{8AWKRo$=TDWovxq(h1nQ~r4c$IB1 z$rELKuYW|Cir;!nRPKVhI7PQPfO7rllS>|TkoZA1po6lb@BPUAn*ZTK6@gR&8`l6C z*%PWHq#_xFATKl?hgnt4enMpk>h(A2NJL5QqbRM#O~mgA<7(_HsOVZ$Y}kP9$AO(N<}c{}yBj*3 zasV%kLCum#2lrH^G%fZ6RU~VEkufm{5+qBI07*X^Y~$KZN{%($Qa%f|0QYX-D+cDT}y+VR)ukx1fj3amW9#@~EZ~a-3_6F3V zK^TK&EIrC}F^(99FjjS>3(pdMHTT@xnCu}zG#ePqwaXI^su^QekiO6b2aVs_RFFEI zya$tR>XO~2iS%OQ|3&)NhRnZhSX<&#QFJ$sXOv}4%Qkrim;m{?FW(61#gf8fh^v;{ zxNyfz!xR%-Us#%Di3d5JDi{CkY7mc`ZF3V2*u6&aa-0V^_ag;gi#4Hd>bb?t#NtG1 zn|9Zc&%Q(o5*5ejD0@Ep>JNTe3bKwiE`y+BZ|q7;5ECaZ2GHXnN~|fL+Q1yO;l=FQ zH?Jc?o4&$41qorDhi3keal{a!6+W6G6B6*mt{O`d!?p4E*EM5}ksw&8{AIB(EEO&6 z8fgYE6kTfFv9@t;FvU$uLQ%Jdze~-z!Hc97>U?2R7tkaAD2Hk6i6IThhTUv*Hr_=!&Y&!8bI7(_@S9!g*Hzr3gUhv$-<2lz9#r@C z+p@ipG`ir=T6__d#(3N>3W5yL-+$|RZ$8fTyUdUn=~mIkpqOf2vm)EH)1vQw_wA)r zo2QO~G^ZhUbuT%4|`H3PejVO({g$2 zzdIC5M7_h3USnMS*%~^KJ$EskY5ztJr@Tz!(c3a$7{TxH3|8)Wf9x|^l<_?`YF{>)Y$w;96fL29?U%3dpM;R6-`%6Gg$IXLLyp;_$bZp3 z^-7XCL8TI+v<-_UUsB^@EgAdG-sbiv!?b8wMCIu`d2@0(W-UB>QZh=7@>P}S$o={bs_e4VUCZ`99SM7RI$+XI5S&J(FRPTF9#;cs%GUoBy-DK$m3 z@5rN{SRO}OC{;!lgkou5ER=H`T%4FoIU{${!j+3~SIZr!Rm(k?P|G?Etgc<@>5e#P z1<6&0mcwJV*m5ls&ekz)XbFO^5=wKX?(wp_-_#SdJMkK-GP9@O$c^c&pnnlaj;B5H zwCKEB`k`QI5qG8$@Xk>w6|m;_G415+fSW7B1761sjEAL! zKkhF4XMjui8+CVb4NXHIK&aFyBt34w@gaA}>dG@egT)z1RrOl>4j4D8r}l%{cxe@Y z_q78t1KTo6@5Kp%N7zB3NFz5+pfKnh#xTi9Uk?lKuU0KV2g7C(+c`b?;h{I&9C(q1 z%<&NZLY+8f)41^MJcwFKZ>4J2ma2t4&=(QQ>tCD9XpTC|`9G@rOtq zmT;R4wLy*;RnE&{91{UO>EJiqCAJ0^f#+{7e9mO*$_#O6SG0ha$DF}x$1P~I%f5JE z!az4Qj7hF{n3pFkEQuT5OOa~FZKRjS9G~!xJ4mC4H6@vmHUqdOe^lX7nyDul>%Q9d zcQ9#t_OL7Td>-P$tCsca2)FA1gTZMI)1&0_rT$D&bWs8u3d{&N?9|6Q6@gFdRo*}G z`IpFd(YlK5O9}d4^gcv~Z(v+o=zB=s3}USoup1?3B}pRs89j`0RxN47=Y-#!>j9HV zbUUrMMse)HArr^=Z@r7X;Ahn;A*fg3NY55{!MNMJ+mlJq{>Uc6pgWYHN1Y)M^iq&t z=Al3p>DubyKBklIaT#>?nV2=F02FALn!VDwGGmQ%HZIdO^OvdKY2;fqp<^8jat>)P z&H>EUA>GUYFK;4TQ3#H3donYT4PI#Y*Hp7je})gfREA#}z=@!QpgcM-2lDCMO~g|V z{cG%~%4YsarIsMtK*I?YZ4hqbusf zu#HFc+11F;TyubvY$9J&)_XtXD5|-gKo!gH8nV zCx#aYWwISy2{H_r6;tJ}#g*F@-gmUj><}bKv}F6;JZ)iP0qocjTIHqgZ&R%(#WVTo zyt|Dxmp+^|!~VilbP3!{L)&I&bX7V@g?zZAp6LGQj*AEakr?c7$kq;IzRqR6&b_?H z`)-uKZ$o^_joLONdAV{D2X-SPztT*ZZtjpv{rie0Pb)-A=omKd!uLgUKkxfPWl<_weett7krfPm~F%ick7`xmoXx+)3GiE!+XqWGG%TQ2XH6Vqe9 zZ7M6%f@vyVmZlOvA3V>;!^Hpy^!Hh8JP6G`tj;)$7hobkhbne)Y0zx628PiU`g>3Y zRdxtbQp|a=zDzh{^)>;dT7tf{L*7lte|WC*N}cK_x)20?P+00l{7xPG(z1CBip0qj zbl&=PHQwD2*ulzdG&GA+cQYAuzAE}7eX1W`lz1U$D)yUF`tB@Y7d~iV!xGeK^0ccD zgamvm;a+vWRRMOef#7Y%1?QlQ0QhEVy*xdP4c&YlPpO}Efv&`HZyXC;W z%9)KfBh%%{;+vF^hg(syJ68>M8Ud2!K_U3o?Ts17vQEsO)+0{%kyD;Lpw4+u#35OP z^uT&73_9%fT;U!A`$e7;5zII9G%cMn=uz(C>AtcsVgB-_g8~9&OA$N!VwG1Zx%a)D z)UL0ot!`S6Ta-{xc>~iudxVt{7=a8fKbexK{1v<0n~Za1Rmx0i*FK1h2H@;Uymjon z+%_nQ(c=wxE&T)Clji=A*Hbf%UGy~kQ%S(|Opz3$=kl^q-2Uw2j34+f%dJBJD#gIpCMUpUd?{$I0kz1)$4+-J51(Z;6jVzUv zD#~Lbz{-#2`3xZZ6kZ@D&c|jzL)3EhOp>&Vq^iF3<_HDWtcuP|H!LDI9i1f8oFx@x zRw`L4BbmC#A`ooU>fmlg&7T(IAo@CY9aq?Cq^j4=Exmd7yBp?Uf-qz3_-;5rZ0FbY z8PhcJQxb18?!HCvNMo(KXF^b^x3S8%Jz2X*VOA1zFB&&ebhtWQ7fYgd&yAsczN-Wf z{roU?`|Ru16Go|h)w%@&=1y;R2}9zN0jA*BzjmZl-53I>ZaD<*_(*>~00_n0+JQ&? z&;)_^OLmQlRrnm&)oA)+cP}YcuDMgSNWjubM86{Lta0{ORX9 zVUiH2mKJ0pBolt{!Xb@rsVd+Nn04i zRY#x?B?#`)wR2-=y)s+Kgq5^9DrV5k0$xEtSb zBz)!VKbQ#r3lkBxoI8=9fu>^|-}heD;t!7Wrs|z@TVMEq?mw}{f_Ydd z*+|&?n!1q*n%&XNE!L}C_x!LC5NV zGH!PhwL*e(D%PHeqgQ5vi)%s78eIOEt8xP@UCzcO>;>M(AC-}uJD1R6#nB5H-$@G7 zZ3(G!Tn}yOG<<3vedpB+;Yr|1Ju9t2g%QGtcZXYmh%Xoy7!EU>!5=Q_>UeuU)kg)Vm1h?R-=pMLUb99CvsI}$w z1M85gD9L84*pv)6q+4`q+Im1MaMuMLKMWrvUZoSd$Uh$9!W{*kiyR8k1)9`$&_s*^ z!)+BlRVhKK_ICJLhs9y~>pT_q)_gCGD9Ra<8%@ZM7)x?In-y&W9z%7Rer0Uu0K{ssiC)9Y$sKQ&!}wbZK7qlWT@5~vksEXD@al%Q1VR*xf?m|JdB1z^(X zcVB%bHU^+?Q@sj>IrWGSH`q_8Qg7IF%rT$acEeparUVXlgafAr+im|d$SeNUGyf4O z0R>MWxjGc5_-kCoTo%_VnyT=RGacKbAI9Si{;puue{>F|99w(b2%{22N)zm>c8di1`ijRAu3M(tgnc}+lJ{T|*%qd8Dkt$>Qu_|fo(&xmz@t*_*Zz9WEC|18DDmmp^+BS&#UuK4 z|G>a!($&YQ^QW@!93m(;&?~o~0uxx1Slk};f!nm|Id6nTPl01;ZKNRWR>;6dhk#F)c7ZVJ|#a9jOdTMsoGEN2J}?~Xu1%O zmcxZ5o8=&8QuAq$XQWqIH85RXuV&~biI#N3UrwjGBrN0Q*BKj^<`&2DPjc>RU>Xiy z3FvKv-qNwYqR2cn?6ZDPC%{XaxCnut)ewv6Ump>Cac86(u(;t20+A=a-&6A<5hyo_ zUW48;up9s!_9PxF9t+Q(w$l$&!2~cPRLzsvAA*-)*>iQvUwIIjD2FA&3h~xBX&28M z&??GTOWUYfgjPq}Ekijw2Aq}FB>$+&2}#>?1OU0rpT}Q4N70@*L@SrAej|X8gJ~(f zt_rv1$IY8P(iO5}PnCE}U9O}fIS`K1g_lkUd*j08*m}d@0AX)wdqCAsV2Q*f*!tDr zhEi9lNkGk-B(uY6`84UVd=;9w_5rs9 z##tamw~UeNr=`J{s<)*(olgt&br?HOa1WkI*>Gz3nGSi)cfL9*qT64V%Uu>eb0de8 zrH1mfS;8CMsS|6wpM3Kzc!!tUjC%X*1O(xyeJ~ZLeIovEUi_E%uL45!UgWuMf;FPo ziK>u=ec`Sb-VyV)Seb3ueY`th1l?igdu56nOr^NTO^v$djx4~Jqxe}vnu z1k~N}Odpa@$(Hlc3b4teoT4TAwl9a*hR41%(O)pw0Ml1HxSEVjs9!BfgF4kNO z{;{yokI~p_5IyAF_!KmBeLd`j@0DgUkBQ!fmY+_{V=!%QH%?=to==gA^}g|G5#fj? zKr??AuEmOnSc?yU83E2Iggc={62RXyNDyxaY!R_Xp#C?u@M+*hIp|PSo0SXS7&n7K zF}M8iDIo0imY1mairA7rwp ze-C_JXCJ(PbKjgV7wJ?J>M!Xi?3YJWs0StuS^jEw^Svn1#E5?0q$>!exC38l9ZS%6 z{ulS3s?Q4t_lxu(s!ZA^?Z|xrbQq5WmOH}O-HxnSJ4$C+t%dlEi6h6r9Ajo{)s#2P zpuC%bY7b8To$Lz#O?G$mrR!xmfaD2P{VAn}xdG#v`hu-47W@v&DLcrcmWjhs3lf5w zJ7h>~E z-V*bZKF@x$>GZO#s#dAFfXr$UTesNRf;(M1Sgm#jA#u+ev2qtYHM^D-S6ib20pSrf zHeC42&2BPv941ImV_zb7+1OCaVQuC8NsQ>>#3VPZ1Pr(%%JG6$MHDXZfbT>Pfkx9G z#hMElea^a8TnS84rQ=(f??A;x%KTn1s=?M`_Uu>pyc+#fkhq7dUuXC+F8U) z3NjMlVBAP05kvh+x+6I)@e_Q+WA;@|90j(i^a)qB>kmu^-C7bJ9#?sJYGQ0w_4)Y2NP@hcc}4q8TUhkU%D~!KvR^B0nwp(+?kou-OT2N5N6=C4BIrK>?IW ztg%*1Lr-dD$?oZ7B-!+;hL+dzuQgq;FKYw4VMu%Fba#slP>VRp;%~HP^WF>%Xx`pGm$ zFH}SBq&EAk3=^}!bnJT61}(RqK_^l zQ6`cwL=91*x9E%#BzlzS48n*y>I|dYBfo#U_j&I5zgPFDw7m z#oqvcXPf%2@Y>{-)G>&=i(wW%=`ur$&Qe%p0mAXv81{WK4 zY=PVScBv{M;r={i3k0x;>co7Tj}t6A9&JJsThk4!LH7M!jh^5NyY_oekDxcYHeL!0 zCQ$p`NRDHPC(Y}NB5X~3XA?J@=3&9Hl>)Z_VhDw&R5J+Cn2C+Zg$?Mi_5IV{uah!X zlu`*fV8<_pQ@ggEnV(1^^}wq}+u$e#qqSaXa+#xV?4{1e?yZ3OiW6&YHs> z@)WGMsP&|$W{2PEXMa}(eiTJ%Z+#JMcKuh5vP&=i5fG3Z#!_PIG)L0 zl7{Q1HpG2NpW0=bR4Bi{4JdAWav%LQ+32FE>%I3jC8Kn&wE)f*5A%oF z0dN>X=%Y*-&g8NEWj+%yW;`aFO_cY6fTcPyF{Z%hoal#?SP4diyJ(dPs>@|FbJE{MQBV1g{aoOuq=viv-ioBk`gzuOY=kznzvU;c(jR zMpn+gy|atCc-n2@aIN0t`^)&^9M{rm+)@LR1a1qvYqoEgax#T@D&fRXB45kXwHUp? z9tAx?`BoucS7k=5y(}Is>&|t2wn6`(j2{Jbu#wElK8K&$cOSmC`w?*1Frd%7y{z64 zXc4C8{^&Sc*eT1jcvQ%47A7$fsY$W+OEdFLH$CIe+8Ykjujz&bCe!Q#X$dV74C0j2 z_6&Y5?#fLfLvqI$>B6ube8Ez!s~c)Lrm^r~Z7t2~FVy9^E2Q;aO%5$(BYFOxb-OwG zj>A~!8~|xi;U~!hocWjZZdXWjzwvj!)tnBIeESMi3?405DwrW`7$_NeJZm1o+ zbeQD;*Gci@mzxBly+{3~CGl^%8VhA6$H_{(P)y&=zq$$>O_P8~&cRD_uVxry9xhNv z#&D`quJp+BDr54{ouWN;rBhK~`+iyW{o3*X5n=4TKK5TdyzTzTp2qNTO7!tEzfE6$ z#$<~U>2}SXk{PC#m&j!12Fz!B9#M(M5!9^K497&6V-ODZL-I}pr%fD6T2znpePhV$ zt3k?P(p+}L9y8#@-_&>0J9NWl?66-_-El-B+_OR}M1utH0H9xYv|fw(XOC|7FG_3I zn!a(+zJrDEo_uaL+N3BHASV1Ga8rD91>rq@C$;-UYPzo8K-lsN@-Qk?9|S4+j36|R zAOrya>#*XD%A&+TIFO?^uL2t;o<3;#IySAxg|e2UjRJe?t$5f1|JHOKtssY3tLqHW zmA=_X-=tm^o+IvhX@aZqrD&6|NBs)^@uN4UJ8;mKx#4`>u{Tt61TcaGPX-#Pz`Ne~LcE_8fQe-e8%JR1G z7eNoZm*y3W;-?{-*Pk+w+>X;hL>GEd3{m9^vu9U}R4@HRmX2?R@}-S`go&h477F$n`0C{z#>dst{KR||r@ zv<_Z}Y1Qk8EmS~0CJV(sh!RhyXD$*YVP*`?f3d+r@!L#W2?C?KAqOwDLXG;yf)ewq1Lm|FHTZ2(!lvTIk6WMZIm3kH$dAY-c8xg3g*z@c!z?&#nqEMM?PE0h5&CmP zg`!WCetZB+5IzkaNY)k&2S*BYs%1ETkfS#4M68lGov2^?WIXtgE({IB7=}M5UcJ)h zr>=Nfm~8TEs&ycd94XHpWG|w&5%WCfSt-k_XO;JqX9a_;=SIj#*%`R553|wWKlyh4 za!M#`8rP$cU0BiP%<}4M!E9T!s4{hadvfyY?`zlNSWk(s4Vp$> zVXd`3uMx!6Lj4}N&4t%&Mt=V;M*yv1Up@(rZ+OpdxwAbt$=BX~u#<0j4)zM z&Rn_uU^C8-8zKl!pe8)4F?D%UYmHE5M*o3pEjHs-vHL&*oib)>nIA<3yGC^SDV`-< zP_D&1kUrdO=IxSChihH?b8o8Othuv>bsw0>m1qv512k94sY+OK$2jha@Odvyks{bs z1-`Kab77*h5j->LTitxw995K8LFXb-e$R}CcMiG65wB7ePjY-x$%a3zf{Ac_Bqv%l zxB|&6Va&tuiS00lix6R!Gxn3#1%luO+;ydz?Ki5CF;@5BnI7&lZw%fGEX_!XNT6D1 z#Wy4QQ^t;|9*CMpwCt~1I|FA7*CS!g^ChY{$9c+XJne(Ak3Qs&EMUpQJ{-@DPEO?8 z(C6Nq7yz5r@cf^Q7tde&w{5G-qk1Zk-@$L`sx_2&cxO}HgKIjC+(Aur3D~qWG_R>^ zbT@xdvfBH;BhhF>+s}z59@z^?lG|HU|Dmd_96I`L1-G;ueO=yBJmmTvw&H7L0NfS+ zU74ItIH`gB49Q=SVWW8p3R4UjRI};2M~L}F`%A6IYkat&dz1(#w75_A zb|qbj5VskVCW(HaX6;yQ@Awo#G|;PmFRjYpXS1#99bwjRS&NXDF6Ff5S*$8`#Pf>y zw+ycLKT1;&HZUD?kVQ+oMyw`nLvwXkl33?WILtUoBN=V@xeCkTx0ugSBf9`%Kr48XG@qki+c%y80bJ>p;(t$qvi z=}QS4C#=Mx+tbpB1(dv!`sKVT?9SBsId<8b1s{48%)Y z%m;znd?jgeR}yeC;f`Xb{cwQ?F(|x0O{-OzcUEN<&smRahZ47Wcym}~JkMdT8iCbV zyA;s~l55tN_Fdjw_83!(9aXFAI9vR7ah!e9t!&q%TaFe$!Em3?ghJWhtcP%+d2sYT zq@eG`v`fl-6DLH3=~I5Q3jpn*SrPhV^Rm}bq|bMpvok@p&k~E%uW0|k-AcgbfM~nl z7J9yx=%}+r_@YRgBtvLE#y4FOX>Cw*i!odZt^k}_CfatAj`Rl=@wP;*vvoiM80bM8 zGLxje*!4SvT|zb@{~q_V2`y05{9(wugVERh=&o7ZPXBRP1E*c2;f{#if@a9oY8sg<{9H9zB`1*vi`OIe_&ImiI=b`lc2IIrHr8f)S^HV z-LYWhd4u~crBTuDhzN9_$1LO<>AUd>=hD;Zl9_n3T|=4ox5aHns5R5XIeg*7H08v_ zA;w{X_j|Y`Qr?`2a{9`AZvCzADaY`xs$%rUaB?~^^igeM=KGJacZ00y1lptccud+L z8iXOu!m;biUtm%OS0_hQi9&J#NApP`|7e}EuPATrVlk-DqEcZ!NjQEri9@-mC$lT8 zTcpx!sAIt;5KXqKk` zQT6~N+m$~?KyY8>mcSuPa5aecPkdBF8_FT2ku<^Eu00rrFx|;Hi@;R@;Q~dB} z7rO&|1!T4sL}Bm3-6lQHbkpr`FckbxZR`HOYFnt$)P|j-IJU{rYJ)%;=$^D%cSn*a zS7s8Gk*}x}L$~6?stmH&Fn@7Au*T|O&+o-tahtL0{i#x53I+m@r1w~CLvkE-$#esg z&OGmAHBDP;2tgtR#AT9|4B7~y{xRal0NseEKi%iVfI*mzT&LsPxWzsWb?i){eQbNt z>jfK+Ts6RXP}~tVduYmh2&y+RDh>|^m z)g!E!M2$K4MZk$F z%oISkb%?kkL17N6@22 zI5_wqZYU-rad%xX>^>Jj#wO8vb(6`Xx?uH-PEf%|8cCA`<|n8U*%49i9g1T#l;->) z0n$^E1P6v(u)ThQV0oss2SO!OT253Mq$s(70$cW2J{pun)e^M~^d+n2-nJXQbAxDL zCzN1-14drzJa(_AC-cEXK^C+QZGI2SWNx^viIt}n)s5SD+{^N2HRJkm3xF-PUcJ4YcE3gReVwjT}^+U5kv*|SMZr!7dv2&qS4 zargx|f}hFO5dr|?EWFfe=U>EM*FU5U!he`masB(#6nNNf@<#0e+vYy6b#WJkgMI7f zP&gkSRa1%EgK5x-uB0tzhA5h%AOF5WqqiIMF7fRLZZz5&-U-jRRu>VJzdAV@U%om` zvN_<}F%Y~c3NtmM&7%9>C?|TR2*jSxMxz2dz64Gg){Y}wmaQc|?4uAOF_&pzv#_bY zhMiJ~%&~zb`xhj z$valV!dE8%Iy@6Zl%>w4ZLxm-S>Nf-Z-u>az|%pUbN2PLaFv*GDVCt1x+e zMG_1Y%JY*o%qT>tK)Ffp#L9>KcN3Vt<%O)A(kb2aFD_I$Lz}~SOJby!hhckcqo}v< z$M^E>>tQq7VRoI@4vO)jACuygHz<>a??ohcuV~~x`+6&7woVD=>3e8kOiEu#gr$HX zmCt>N5*{2rSgJ-?sOAEd>qcF^n&$Fb#)hr50E)DGO>5>xy%?z*6X62vM=g%9br31W z@lO^%=5kv%eijh;sN3uaRI1qVOGt6DjnKd)2ufDR*7l#Gtz^ z>l$SgqD0AvEOTysKH(npv!%8#C;!vrMW9NvLfcU0Pl z32)SKxlK85npBVnp+(#Bzl>HoO;fw1D6ntvKj~Z*l&rLyc5z8ppkgiP?--n-stj7x z{RU)jdNS4PmzIGVE9jI#-Cw3kR3)uuM_+PXJ===OzmBhcSzWE4f$;R>-Qj$-D-X&{ zDaz9kTq;!DStu9S*I)JBIb}+-0Qr_Y{Rf}E=+cyV{PPz9E06C4?K#1Y&YPY}+ub zpp#^^r9fef8cNHaYQ~m@#$wvQUI~|EY+UAx7w{VYp5KfjBfvgrk65|p011wH%DMNUMs(N< zHPMnk)+FPq_xJylc>P|1B`l2|QAe~AK(L-~2E@I49~ZYZRR+IX=4KjCwz?-j>hVUd z8{tt6ClNcUp_b7by=N5S`{ITsi#;V3bVv4Z>l!A2M>c2xYYf4Mwd;N(5{LHD=fdpOWlt zJ*nim*+WFs7jpMe*}AR86a>$WpDG(?{E&Tvpl`%(O(&OsdZOfYp@Gn%AJr@v!~E9c zfw_yqC*0XrrrP%?Q9`PM?HY|x+0ueeo(Z08cLcuUk0QmzCgOc~OuCJEBz@dQ}qu=XPaWgG|&8|2k1{bJhYf-+a`{&T|Jj5{9>HOWqEd=0`v9!*%`-8ijt_u zt)?0ThwUA*>ozvx^#B}EM2Bd!=}5UKL-2MtG~*OVpFFFhM9G746p6Yx1PHS%YWNPQ z(u-s_Q*}FQIPX;uV!N448>rhQpv`7PUHn`c+3kCvz}haU`0^<^lC?ZD3jSuPEaCX8 zY~@2Sg(Um&S2K*i0$pNXS?uc6L&=Y#QccIY}G3>D4Sqs@x)>p4Hjgz zkRKNBs%Ld7{Z{p!53!pXYUNXZR$IOuL-~lP%Jx~U!PVJ|d2~)c(6pOW=BsjixEGJv zxpBnA^fy3T61qI815v>lFxDGavcFP&zdfgxH%ne0v}7$_GF5>4of$N|6EFR?S&6|s z|2KI}V|oaoIg#u>=E|B6MWLkPOVo3)$$ieMoM+oI!z>yxAd2o<3F84@uqn+`9O2E% zPOB6rVYODIPD~F!;mdfGd(61o{i31RtU#>VJ^a&g6 zpC5X2wLSC;8UW(kv;9jaaueu_Gael|u>p7>NXc1c4^4sg|Ab_`>$Q?BIg%?oo70!ijGbdxO~9YnY5Msef!b*v=0cJ8rnxV>FC5Fe28xYaQVa>z?PPL2LG?wq6Y zdJp$WkM{6gc_1`QimZujLn(_x`VCQRcp4BjJQyKot&6<0n|3Q!UycQ6>cL1ExS^o zNP~0cHIFy1W*3bYAbmM-jAQHNTB>aqQ#vbLxAGY&>;NG*chb^qi8bQq*k`wc*)NC_aZAzITSF37< zyt)fp$BMbKRN8mjpn1nvotthx&yfFT1hxO-N5AB6!BBjr40+s!c5I>&l~6i4SVo); zpPn1zK0g3`GztajwVpDVtPZ>s@KU;M_FA1mUQw3}n$Ikpf_Wrv|0bb#9KF^e5#et1 zN1ABXdCpOzrM{OwD6?3tm*V7!jG5@&ExUIUG1=oU&-$dT{z&KUPHB`ccD`HDMr!g= zhwRt@ye^Rok7Wcb!J91%*XhPCuA{0sVI7un-(yVHmx39D>S+mE36Is7LwIAQ)b?cG zh!GNR=%X2MXCu)I*Sn;5ema%-Yy(^t$;rQ_-d!F|fhMv~cSTB-_|xXTgiTz3TtM=f z#JKa*NQ#(wc8b`qN6hXe;F(b4gan}LDaNiZH>9Bo;1KFu)5E%Ix-5Wy#u{?dF~#r_ zV(%b1Uqh5&Kxf0Lc#;x%-2^SdWt4`{XL`_Q+Z(^Q3qpC5ErM}}4aB;T!oe%`it2Su zsD(u)3I}*Q5C~SMa<3BqchU&tspwf>}=SnTpZl*j5%$G?DV1^ z@hW!OeRe^z`d0%_PX|bOzXce}v$vOHIpH%YRT3iE2D&@BZ$j+BDP|rGr{sYQi<7Lc z|5${+eNHQ44<~ zJ6zvK-Ja59S&%Ip;AhQ^CN5+boe~BH+3}tS~EQx$VtMo?7py*RT z1Noi$0$!HnoALa4eT$>7ZsFEj0-v6pUHw)vMc^JmVosX{!8~t-$)@~KLNlReHn)e% z^Do&ueYh_-AE0_*9BQc9B+-6WA4{q^dRp;eG?N^`L;s|5DDBN}OF1Gb*;*mWjr{Xh z4=rHP?jNPP5Izga69QQ(O)<{dsw?>6LQUB*Ap@4fz7JhczDG-)L61<8O&UvQP4z%Q z_))TIWEH)m?k4~V2OzEtilfW!a;hz$*DcqeJ5~>hwJJUOJ-)d;BdZ&ac%U>Ikb-sl zO!qV02f8wqb^EEHxzxOk$IFnrmpsX(RL}l1zg1Il4J*gGYr88ugp#wc$`2L=D^z(D zZi7TN{rt-)P00iT1)|w6=1dHwyO;;6H>^haH_aNU(;WKS*E#uZeth?QiW?yBUzesE z4Ww-wgDkZYOEe5kq$t{guEAMiRu;6nr~fEf^>pyPTBxB<-ZEKyJr~3KMH;o=ww2gQ z7no`^Bzs_&u|K|N=9-pD307m89~pOBc^T4g&R zN@N=_;yH=>p%)Hu=%f!@?U! zoOtTo_BZbRTXyy2;H7W2w!C~iBC^bauT^-DQ;9x+kYe<1PO*pHnZd1vy@)Vb>6VXy zy4mxa2%?}$*^JfH{&(Kn(*%K#L~Ru&2Rg5fs~O%8fm{i|X?RIn_Lsc;?8^VTMu0zg z!yVw;0Oq`!?~?uC@(PViZdL>u!7pFWzge)@g>+_M&->RUmvtEeCT6Na|6yO&*Z>$i zfS361js8*!U$UMcmH+x)jg74F?QoNDqKrsDT;*`+-4PlVNDr3cI3+0Nt0ZT z+Lpp68sPVq1TI4&oYLgqC%8x_Uu*r^W9I(=iSaztb-QllU_7EF^CV_MMdm`%=QJ7r ze9iHv4~FgXnSm&>oB|9{dmKAO9uM?4|USF#_LOSWeokV8EI^2UjFbuarC7tySyC!e&_<2zWzGn%SVs{?~=^?o>RHpT?{M# zw!S8iPyid{zpm1Kz_kD`#J{dVp8wCoZ=~Sfm7e&%3Hbrc*yN*21N!@_12gvJ18091 zu-+1NgI9$^y!n6CJ&TTusu|X~Tr5B*=x^kEX_OgHH0Cq&&Reo6PLQjo406DL4bm@R zUy1)rO6lm2y1|!%NzgB$lAHU>hx)0;#$EdV*I)*MLW($AfCKF6rBT1Suqpj#(&Rj^ zS_I3h!smfmGWquqPQYF=EdrDFex1(V?bbfG(l$JIr6$=qWXvF66~Jje6@xhm7jsW0 z-Nn2vvLwnsVSG}sz2fDgf_Srn-HQwJbRjUYY`WW_w4OdEJZ-{sETj?BBDk3|_$h!k z_LlpJEf)6z&FGpoc>`_o8HX+PM$Iko@6Ven)8%h+Vg<@|wH)0_p$ z^@Tq-JKjFJZr>tgRNDFN;b?{3#uI~ReE)5Gc6-kLv(Pcs@a%>mpj&$?_7)U>`tVh3 zUD1Zny3Eewh_3LwKEp+U1_AF?(QmT>R27atb%l+qsMDm}RbdeZy~m+sxJ5dr63!js z9s0=pmc*ay^A({2O-!p0(ILi~7n6DA*2LLV*r{f+MSO_6xz=LaPkt5R9g?hbOlwN0 zv^}$1(=4H#x0357dLct6bHm(XsnWG6GBnq;ARh1j!9nTG`HW1eSTp@8kM-pi{~qu; z|9C>DGsfGCH&aq>4L6dn2(9%2CWxbNPTuC0KUKJQZ82^Gc90G$wilxB!$GE+%O1+byz$Gu+Xk)_Z{mJdj`KD3 z>x{48%}?{(@|nnWX`u89fl|;}^FAf-;D4)S`CE@xl;Z4~(CEuJeU9po`K+Hdt;}ht(dUreajmX~h(9yLNkdIdXb9CSf&2&(@ z$%@~M=jQcDz4L^b+8xNX>lO3IRLO;Shs>csW@U^_-<}`PzgV?%(5_xDRzrVE#MYtn z_4lfh-`3?z_~+S+OD}P5oH~5JElbe>a*jvJGY(G#Y^yfo=Y#z5bAFhmKC#7wrtOX{ z#3>aZ-PEX@YNi4i;`{EMff-En0G~JTX18F^d+KpZu|Mk;lu3G{L^>GZ0{nUG@-g^H z+Fw5!9l=!r{R%y7TXu@}vcFi#^9y}`eF~EqquFp!oE=(je)6l+wZr?7Ge01QQGTL6 zX|Jf@mT7YoqjjTb-ss9j>*gbc!!b$!{jEA3Y<#2t?@H)y)<#(uQnZQfv65?<+Ft6& zHyEq~0zCx3*ZpjEB=PXXw#`o0ORn|8>fP*4lTM!VT^}~cxjhxO{n#3YX+9Zb*;9K%Xg-88TgWs60 zp}<@FI&Z7VNdj0w7}<{(L;tf|hnA8fMU)gZ|*4iY?*f_?JKkqb>9T3p`e7=HoDIVr)9vR71F#c@Bh@bpcvcB4?@j*_jQ;iTERpP=|*XYBv3T z%b!E{>Dj?zSgvOYUD*#uui=KiO6TaV;Ky5LDm;scwC_RiGG1d(sn2|Q`8CtuWHcT1 z2HR=#c`%=B4c2Z_+nR=QV2)=*Eh26izIv9Z)1<6MNKx+e?Y z_2b+d5TEtvd|ZF>@wtR0%2HO0%g&*sj9$BMzk()5Pgy_H+EHz$Ys zd-VvkP%DL^8{FyiwFqIe*G=1^U$Wt0$iM;8X3SD?{nnAZfg)%nNaKUbGXDVMZ%!E; z>&X#o7~q&VKi+jO<;wg5op{th$#^UGVj|MuO+TOn$0UO!G%`KrY&e?P^@xX7*l%^) zglSdN78{mBlZr@DVo&SIMd4)TNLnXmr^#j?Fh|jO#cbq7wl~6!DAt@L#l7WBbo=TW za<8|TF>anGe6(P|SUrP-G3A4xo=TpWS!kz-^Fiv_fjVjz5FOIav3+LB(^J;ED%a30 zuZ>UaSqTv{&>|gVtUWCz$O@HTh-VKaLhYaLyB7mObK?tFBJb_jU6*luHJdeZ)1+2` zMao&YeTahs#G&?;tXQZJ{yN?W6?#ET+v}drMQBb9IsXiC9)mqN-!AH1SkoZF&*+9K6_j|Sy`gOZDN0l$NGyDG^9s#zLsIZ(j^$1E4TklAD-{9=7ave;FQ{i zPmMXPLz4a*B_2+^8Gc~7`27Uf21)oYVgN-e0pnWd`0|s`mtyz>wVuHY?OK?uX-JoW zKRN>o8fl?Sun1jdH$5ru+Z`MA0p%mRi_2SI_lY4{w3POniSFE+lHUo*&aU^{DSw-q znyc$m*0QYz)B%EE(bq&~Zec`4D*AqU-#f4NG1o`a4Kw>!a3uJ8b4C)c`bxWu>9?H* z5@H=Nd1G_+4<%<`jJn>MM5&*B>Lgj4E)@`ce{Sk`Hb8j|f3O8NGA}J&|8it`#bvVo zO0HTcylUmvQckR>j?+V+rMlI|&+%JI_8$Wt9O$%&yUl^i_Up`|M+0g%pi7I+emhx_ZA-VvcQB z9<*V5-H$#JK%kgoY0Ksb6fM71TeiwxT2j}BduzBrra#NFfaW{YMhK&6l8^;}+eKo} zV+!@q0lWpDiwXp!7rcr=i9F&#gQkb9rN%9Wc@K95DR!i@j>JiNVY(i2Oj>QyuhRsj zh?ZjXJb7I?C~3ZBEd|Ck<0CpBDx3PSJ@%&|fqKQ?M}qYNo?ck7{aA>;&m8-^nT=8D z^cf>-n-UE7Ht@6bRuAXyVKyP|v`-9v96R5#d)P*ZbJ}*P59VNlI6pA_WW+*!rsOvA z=g!u!dto{vUT8n0JLwtJ!*2Db_u$=Yad7}GB5 zPFS7xd0sbqz)3#$`0{MA#0@mTB3yQ^RWz@!KlfZQxM;USn0?AVNH%eY0%`z8xkExm zWop*eRUge_1O@ptlG-C)#*z%4I!iGpU?c||7MJoUUo0etg?UUD>=?aDc&%DG4M!Y37jp_4f5k5}*5pIZWH!7JoDpE{vGH>$-j<)LQz+3p*)?37n2e5v(ih?*@I z8Jxr{PH?U3%fs$58lff5A7(+1R{Qb9ub);M5-5Ghk@TB~`SjcS6+WM3`U>!mnxpdy zjmpD|Y-Wx!ZqcFCAsZj%-NPGWj7$TrM8mJgcS0$L0eLm&gY#BAXGE8CGeQ4Vfx)rP zkSb1jzb(Ux-h)EF1-x?nldPaHt()f;#1LXZ?}JS29O(4(Z+)Ng2KbHyHJbu?USD$i zD;*x4B7LXB&!4w9+^yRPrY!PHX7|nfpG17En-7GpQy0;n3uFbVojo=vGBr43-^W^M z_c3R>A+p~JTDjB}hFn`&x`HFM;5g&EhTLkRf|iz(XP?;8!kJn{KWR@Hc&t$qSQIJt zRw1W7Zq~{rY?i-?un;cc)`<^liyx&S2@$Mcp# zUth!*mo@9>T*P`DuOMW6Jn>TYqK0DVj{9fejCE)v#D+l+Sd>_zf6Z(w zBgzzi=AzNK1$IAX9_&sl{P%}DNfsQMnjQ5!q1s~0^RPZw|LtR@-I2cXqG!J3XCPer z{ped2d@%YqvNDa}e%j`O&%|7qdn=Hn6`YF+Q#K6QFEIlsB|=G;+N()dDXsTQ&{uy+B_SI>|oaBvC zTo!NWvR|>EaQ}wv`;A+fVOo_7(7cN!oqql z2sI-%-!t-MtJ2Ae)>F*I6vzBaWbNg#-gJ&TqOnlt&l6K&JB=jKmw&25^bb!%Z)8jA z_=Mv%XgRu6$yTB%znSwA1gY6idC$GX3x}pLtuTFWG~IEp3VC|!?T7^BucuplYi25P zam=uD{MGK7{P;s)gf(((GUFcvFNfCWuUNsmmd_25m!QKQd7TU@G=V4E})6GR0oHLo^2 zHxnu*B3{)1qsVcZ?%Yw5MStg4k z2{jDKXC$+nCgCIyz#F<&pMzo_t3aS`Qx2=0`+^UixSdP1amqT!MW}c1_Klbd=zBeA zaURDTG{}0RA2uxK;$d=a8x2dn#p8P95j>Gp3D*&#dQZ$c9kN2ds3nRuo!&ZJ+7v@_ z=m@Gy&-xb=!Om%5y9hzKyhjKlCk?pbIlxD`@#$nJXm?lOaOev?j5z8-;WrpV@dxDA zifzx{S!DT&`SxDR=J<`lx?hW0$g(j5*LdMGle}BI=OK!gZz+eR+@^CP?vZ9w=i{W+ zFlca6?Uvm4v!Hu~kc()I55g~#OQ*Ag;O5SOdaG+>?{&vUZ#=oe%IYA?Qz2~zBg|SV zR2X%N>e)A1_V~?y^VMqtn(g=ZyI-Ao{vf^ZmN5XD&JuW=H7c&K%j+nVaygJu9?eBY zd>^?fQ=xbCa0BllZz2I#5YTQ*4+aP(cZ zo>j3N?>;8+=P-F&nUJ=j^BvTbBwcP0GM>TD6n**+As7bL}Gekn=}O=;_9&g&0X$0;3Wz>w!>90 zsP;aAX_H%|T#Q(nVVPGJ<<JWmL$MtsmKu6@xm5W=+_l+kOjr z_XtDW<_VKdIv`%4tad7V8|qtnQzkwor}L+Evdy40yqaw^k~{;Xb58)@ndq1>4aPML z;JXt2I)wCIL|@_b5mrTar@85k+Ip?)lH2D zRG3&&l%xc79q4aLINFlW35ZR;<@o$p^UFYs@K7qYadZ%w=9f>b4~M0l51uA5{K|VS zO?d2h==Xi!Z|Ai>KIj0aIadcLU>XD*pEhBR!Q;Xj)NgTE^?LMzK%^wVfR8<_yC2-B zO|bBmXpiSuW^?0BZj&e(Y)i*O-%>q(F>OLX<1mX@=A$GN)N7(oO$J_M97j_1IYCW# zBJZa9Iu+Zo!8Vwf!Uv=EC!GWa>9DmuUb}Bs5u_S=y2%PB&Rnc;VF~lI2&B7cJ+L2-?PO$;HyRq>ua zzbpdZvcC;T&*IYU_Zm0c8E_(Byxz&4JQU~C|3xRm_l-f9!a_3$^<8dMSH^3ohB2WL z93~5u?TbRABwt1-cj=4)K|F(9Z?X~3D(K?k92;7H;=Gy<*L4NfTzh~?U#tTBgfGiky-NuMZu zN_58uIta<4#t+yOw(dT3ycu6N>m}p{s1yS_&S@X1GW#?{8>qiJE0xXDqZF-C9fDlP zS0cRKK^IsI9_j*Po`OA`sF28w-@W=$C3!a_V)v&Qsmf`SwWTA(YEkx-b=7mU1fS|) z@iyD_!|4Df2t+teGqmWw;l(sn0 zA(J=hZpCyWvr*a!Pr1%L5;i5dEb`!-^nIa}RYs__?KIDCx?mYw0|6GF%FV1M3p6ik z#zpGGO&ub?iyf9tvuQG9J(Ooc0^~%|IgFX>F`#_XB3*2AsI^sz>7nsI`1z>*!BMa@ z3VgbGMiVfYh#Oa;rxK{^yo<27aS^#EUUmCCv_Z2#_v`F}zGx#$swnlhf@+EpbW!Mp zMYvBsnhs;c(V^-Mnq&mp(IP^Hu`A0tSBOr8J8RPQr;sW~L+>oP671|ap%`!%CFaUy zsfke_^=&sjY|y&%k5KCEdu;-m-k(46i~u6T_wybZXg9uimSptg`;9RmpGGoklJg;u zJ9)VOPAmoBe{S-t?=EY>bP!IAY%6GWAuK&6wXE~%wS+-$(yI}7p;HvDFmB)P(hI%F z+`Ow#C0An5)k#e!{=*H$Dyso zZCsI-gU`JEg8}*I50tzqJ9Bx@X$9YxYJ7zo z(D^}=e<#C478f-qndXy!bKe~0aZzBM2; zuG{9mDLv#Vh;op-N_?yJ!lQXx(tDQ_c4XH7)2AUJ7Yavt9Nn=Rtd%(_)=T()!9J*} zvK?|2S3s~EtW&Y8$oA}zEukcUe)3fG?JzCLYe{VnabtoD>D*tl>TF(LI~Qk50h7U!Sq_Bw>Dvcy7y|ZsJgifs2rLR=L zj9Fbw4`kht+mD*Hi;3RZ#PAG>_{KlT^tnA9dcaoJym`An1)`{rw2lK{ekFQ4xkTmIT=<>huS7?6Fm%9TofXUmoo~z1{Z%l2xK%c3INtddNTN z0eY`z;brbM=+Z%ZJC)8Zf53w`@I2xrks2uh*q&kq5LnO6zmYC=1|=B=nS#fpU1m${ z0a6=Qo&vT?pQ0%bTzjGC0^BHf*`tVvT zoyi8)8NW^amDD4Zn+|Cj$)b7<(4(iNbb7y4snaiF(t`4?3U$GhQ_>5W=|`o6(bj5L5`+k5+Pi0M=7H9-XF5o7SjLz zGHPFhcuoP(bUr!!!K8fb<*u(^6G#^3S{?7bV)1eLZSalL%rD1vez()w2Fq8b`QB>d z?-LBQMYys`VI`DN3+cO9O-F~}Ugh3GxND=8-dR$}OnfI_$|3CD6_xB6bNPnQL;2TYK@f8@nc=*K(%B7X*9LK z1CqpdAInA@bVo-RXThHb_WfxJI)A@{^nuA6vd}5^nX<%0`}GZ<14L(*J?}}Yr14|C zm4?uoGO*q@sro^wDbl(C2>u(;f3{)uL;-+dS+at>R(JPVo(f>AY1MAM`boR3LGv|9 z5WNR0t5%qiiX;#X;OMJy$NJY=3+YJCiv zr?*yaBXUt5A2$^nKEL$V(9zMUU+NXwUGJmiQsq+Yku@av=-_})D6!UWW&JNWr4P+@rk9INYj=8s zKq=j%&9z_MZn6a(TC$m3bHSp7ZLI$hXn^`e$e)hi=-Y!jk>eSIY3~%axqfaT^ zL1rJtyZE@1vt8Z1Gow@U~Fdj3A{Nc9xK}_uI^&?2hLnZ71YwUys{{^Uko-mtv7~I3 zkXJl^l&*VE38mAhgb?;jr@D$^vT!_$#-`3l`v_ zi!Mf+<}V@FGa5N|fqVx+e;Bd_^7A|+k+^>-_HXSEpM4ajjBSovufGbV@45k-mvqO^ zt})2tIT9G;xP~G_V870jRn&_~f0v%jI*1!{kK%^B!?-b*dM}0-ov=aElW?)Q%7lOX=$CS zGJ%EQXu>A9IZ~`3OKiev___(LrcID0*qS5XEN|{JZGyRZK97fku({5qGqirJe5eld zIG1J{CcK;5oW_z%n&+7N!_QIm94pUzaJ=v4zQS#Aes->#=bP7K_KRiBzJ%9fUOSiO zJ*G{VH}lZB9{xKhSPhk>&n>_|v2TCG3}y)JgnF8g?-Y=yS+B+I6U%QW<(v+`8f~|^ z0qxPoFeJ1Fe`{kU& zbMD3w*A%JySJj5n(M%l8T7&}2^C;MP8L;~nz}FQ8za^dm@6SQNpE+};n)AL>yEl;U z{Z7GsmNR66zI_<-T_bSNJ02_Nb;q5zUynQPxCwJ7wL-oG0eQBO%1qJ8b`3+WcPw6e z{vq evh;dd&4u|NHWMDfgu;mE{=SJIsAm zKMx;scs=X~%X}f{w#zTTD`|&NGxZ2+iEF1ZUe(gl(kebOfu(4mB*=0fj~PCe!>pjz z@UdgEZi213&)jd8Gu8>i#yXebFl_EqW?5NUoIFR$*#wqt2H>e}LYf>?4zf$+lc#2Z5V2vIsnKpE5^+LlDUkW(VsHEzLU3daS$_6$l(ZX0}fo{V}hN zaL<0S%!xMdd*P+IZvH;>`2zR}gnOxj_spg4xgSmbu`U5D)Uymb>x+vhsr*_?hBf);a zksMW`Ah<}NUf_PYz&&&A&5?R@RK76ao1^)r&Hd&`J}tq)kst5HQU7XbLIR23vT?-S z0Y~>4`Pu_h5|GM}hOdQ-4zxc-r)DMyXV59^EqvRY}xQCw0zkw^|8ze zpn_600TE#;}!OOW|j`&lml9#i+gwtIK%>(UO5tETQm0)2Q`+FDuL*!O zfiT>xhvyjQvF=kr;_)D)=;LSZ^8xS;`xMwScv1lF49f2iKtSL=uqO`Q-FxA;StW>) z-&g$xa_<;SouXX@|LXT)SqThX4p}-RnB&tS`Msqrx81ASWjTx62m($OG!*6H0QgM9 zVK3EK{ozy;WKY6jOKlwXwUq4)0e=gQu74OB9W!q2~vum(=vvD<>x#8#isj(4j-osdHzvZPyNc z`t()lH;IXfp;AT5=kuu#bJTpGB$8vlL7sbrfcq%10r>RK?$Pk?`363wKst$Z{-8 zAU50rw#b%aIZ`e+%UCMs8vBbprwHe;U7nLI>tfY5x5cV$wquqR=Z4n9t?HMu>Rhu9 zaaLFz$J&>5)h7GLrMaJVg!ScEJ!T!OKhzgF|8PbYeoalo&uMA+HU00K##Bp7OY84Y zQSfrE*dZWBuuD**w#Gh!w8bef=)!A> z?Xalwyyr|~I(fTR1_0vYL79>AqXG#z?vP;6EeP#|XY8d|HtI(FIQj-mA9F3{ zj=KpvCS8LJIbLACd>hKaKL+@d1n>u6f?qd%CT$)9ehZ@Du$V8{s{WkW{gayh|6*fi zgIl(2!Amc_g#TP~jT&9Q;*yJS`6UuRL89%|5A(O#5(TE2uYKuFV$6&vy*N zFLLgg~0d(z@~YO032?fBgakvd($U5Cmg0t9|Qc8 zwJq(0&Mn9}plpD)X*s5jhx(%Gkag9EBI*zyWj#DMJa|ys;ecO6J)t&_i1ChPJRnFf3E;su4K6O$D;EikE8AFccA?px1mu)X>41*6mBn5c6h%HATL~>@mp}o zQF^9S@o&P9IH}j|PnICThZL7zmh!tx0AQ7EGw0uc|37vu#bX4&XtC!z&oR;luf%G-sU4<_P-HjGsSH#C(Rl_@jD&ws|eecU8?X8|{(b2sXwj+#a`OZPi_nS{D{%FdSK)(~A3}y@ zBo4Sn;edd99!Kb1Baq_~pm%(W?W_ADZo@#8r=RKk7Fo)}($9jtuya)#;!K+7uj|?Ot(PUsSX_qf4!^)Xp9UDC$!1$_rUS`71MWjm8Rp}FFLj;~;Qw_# z9{}I9ivf=sQ4hlHN`htIUUYc8CfeR{JG$L*E1H+O2V3UMfzQh?92@tV?>>aTlVF-N zKk;wD8T!rmQmxB&^*gzpMP6@HbY!xsjW%^79MTcPbZWz~|JM zM`PLu;9p{l-iHIaIA!|97&-hpH0WOu?+>^i^@i6(^9j$O&D3YnX38^YHtq>D{N@>f z{zs%tRZJXx4YJMPzyy3g37B*de(V1)9NI8UAoH-wxaY*2MEf(t`@abAj~+dW@4g#@ z|6F?={&($V=<&vbnBVO+toq_b@v~U_*;Clq>shSm@iK-tc>=Y{-Goanxd^S=w8r5> z?DC1)w{IVAyZKf;|41pM#tp?m_Xy+*)aQz`+{2OM9f73iZ}D!Os_4?-DP$7x-2(Tn zVG8i`eWMVJAA!anJ*HmR?a{rfL?CB+Oa8cy9uUCI4FN1uT26p$0a`W6eH?-dq+Edj zZh-(jt-$_wfX}iF228kTorLC)!$M+4vp#Dhi0UXMs^Ke95AbmZWl8)cJ ziTGLm%bFW&M^20_%vNNJ4_Mv!%@f3tWPxuJ_*LKK8W_+V3lg|L)UH~ZHv1E9DI?c@ zRMvkJqq5uu6LlLZtzVDupBjf^<0CYrsF1 zB?t=zRx168DKy+Czadj@m=ABf)GV+q_cP%0VE(8P=v3_iv=YD{`pU~lmhw!|QZGKs zy-sp_k>*c=*DJq?{8s8kY_ZejN6fZ8h+MlF`!;OB{tcV4YsM^etMM?J+-0~lM z_5R!NS}f|R4FfnE|DL2irU@0@|Ds*20slY{N7p@KjNZ=)0XgAbpgdESBZuCAdS6#Y zy`eSGcGC0seBlS^yRs4buWp2|SJuN1qsa zR2wV5sDtI7)xlDM`lYg5Cd*Ylp2g~(vi$4?jBEWI?zrU|+;Zz}7&GQO?2X)u!xEvG z8b4tIuKxGGFrwdk$WI!L+`wq$1;*fje=PDnV~{IOwT!{gFW$q{5e?K7nz;h^2i&9a zvu7ggTfafWkDkEA0{1V!{4%n$GD9{0Jt{b0g046O!N*Oo=V&)az6sa40@=AZoKNS- zdXCz64Aj+VeCV8GqvIwVGfzLv;kok1%FwKX{b3r-K>_@O0{8hioRy8kNon|1{)?Ww zb3am7ZbssQ)o}f|2u{<}m%#DkVmN0mhIigdBre~GoSo75)su*$0^|iMxRCl)KSRr7 zeW!Ume7&%ZV+!z7Gmx8-id+Hw9G#-0rKMHOR200N8+Hp2+64NXYBb$x4G7>T35*Mv zS-i+j$-#ll12`c6PUL6iAy57-$&-Iy!ksVsPO$v@!DX)eZ+EU*W+E>q9l5z_$j!+> zp8PwTo0W@TFct9ves(w8H2R}H{Kb524fw}&CVz1TW+0(#VW7E!BL!Xpsw6@U-M#$wsfAsGE>Gi;qZ7e4uWuvooF3?^acqQz+Q z#M9{T)U#;&^pj}&)Z=I?fkNv$Z%6k#Z$Y=aZbYZ*RpH(h3Aa;zTWObRT5Q{0#1ny2 z`P>71_Q&*Tlo^SF#U%v71@Oai?hntr2y@0>gQf#3py4+)(PPFN7_fp+FK{pBa=?oE z7_hV+1}~N6l26cS(o6VY$P@TzU<4LTxB`dyVJreZ>m}@eBlUh;9!FCn)hoaM2;i?< zw+=Vmbdwq#zwC-D@bAkn7r4I$gX=wkb-iB1%3ezP|6GBYW7}ZaPpvS1WJ7$} z{WVM+*%o2>2DG|18G^)Kz*+9*zp!A0gZ;z|U19>c;|vA}vs?!1_!8KfGo4NqL@v{+}I8Qlo@*?qKBu!d^q{&N=IB7Bb6BocW_9u9! zE<|wI24wAv!!Pnb9tG*ro_acAwiW81=~Du};iK{o!LI`N`N^rs*C{$$T3W?WMZwFt zV5Z_&J)BO{0k;60TL9l>3m}mrubXya?eMW!)cb49>-!bvfAtmS^y`Z`Uxit==l1J| zd4)V*ys$6bN7lc@+^;?te};Mezr>s`KgWWBgRo=nd?ZU?;r9?ejeLAHO(x+~%+cO( zxYvMxavM)#xRiS+5Ur*;wF{Ix#S$o~ZyiWIoEno)NfHFQIjZh*ryw<$g0U@IVNCt{ zNV3_Hl$e6qU-d?#du~OOoBx9rWy@gKj0N&rCBw0G2Rhby1`7v&1K0N5ux;HA_nzHw zZ`y#b>%NM{H~k0AZ@wNAKW&NRc&7v%Y>!WfBzTkd_zp%<{Xdz{Ex>1AOl&@Rzj#C) z_0I)n-C)dfZ;rMLkUOSciuObA!H0vZq1)6~1>_sxYXSNGD;g-kAGo411}?9!;C}F8 zE*qlnq7TsI`$zEBH^79Yj$ww>Uj@$l&%PzhMCGNZnUw-}>GBZQa zc=YJs3-~Ihov>>{FKv#%)4#}mfqa#JFW{RgV4j`{q|pNE8QdqwGNtUXbRM%Fx)4C0 z;P7%Zf!l0X)-&@^kerEwaSkLc-w4;V1@KIqgW!a@NSZKDJYQgbfp`&;1@Mz6EfX(A z;zWUcaq`4P2uu*rpEMti9~Z&Dd;<91z&o($dl@PAUps&IJ<;`Jp4uh3^%>_a%6cV)tVIqGkBzo%hh>uDkK>)mP$; zt1m;{Yp%d+*Ib3VSBLqktMEE~x#?q z?hQAfzE{;Tt)l4{HU{9;rSp;BD{{k%`@{fUJQ7%q^&vz)) zT=sJBTmtwm0cooMb%MZr96v_J`{DEp;0I(qAisM67TL!#es>})Yd53gqmN8mfX;+sJTM#aL-)GO(iGnD#s@~`8b+~*G9vmYkh9|=u=xk;FAwRk0vzua)10QRAP zl#=p}kplFW8#CV!#zR29eB8CDKcEU4j(!4PE&K=rm(<6=W%V&&c>{s_Mi{ib2?nnG z1cR3~#gwgI;k)&n&~N$2Xg~8sd^GGqH2k_GwvGL_F|!WM0Ak2R_^s#FI28Mn0r-d9 zzyax7{wnc`Q2)5xfg?xr)hoE+k{bTF1`Qgd0Kd%}wXyQ^*RZU|OIX?SIjrdRv{>L? z0DooAXVu6(Q*@SpSqHPaK85;^m&empOW^YlYvIRsPhs^JFJkTI&trI#TKLbE7vr_p zUd86kn^5(>s)(pm3C}(E0$R6ji*aMW$D;X*(4%_~)T&VnrAn4Y$r2?|;$CryQp%-E zm&Id`KZ*YRzlP0X{b&Dx@{0m|g0=uKUqnq6I|Yan(t+qCV6P9@EkD$5kJt-DB?1;X z77+L?67cDhLB8x~o9ZQ4I@GQjz0bpuluYDD$$x1Vu7Q2(e0ZiTKydOx0s48eoCklH z15@XQJRklk^AVUlAHGS8;hVTbmJ8H~zGLEC_!q6i&(ThSdZ(GZiGH^!vKO)xpkQyMkFw8l-9r-%GwBQCkWF{U*Ut98TE8#TrZ zSq^*o71X=^zxcFNDJ&g60x7-#yaK0g0>0gO0@xo1eCeBldvhwxqOQk;`#%PfMY*^D zzWJgw-=nbdg=m%TB=9cVE`fW0QWAU~!oBSCB}#qFi)R@ZtOD&8k4vCF2~z~lzpDQM zQtf`s9xxb9@4f?V@3;k@mc1J>D;Hz_u)+BJ_1EFu83RAx)!4lUz2A5PElS*pu8-9~ zA25Lzy}Vc&&O|3B~h^ZW?{3NfIT;& zKzwBPZ8#hpB*hO2%Z)GqpCjxc!2eDB!?0WM;h-95J?TZ2hBJt1II{d&AYTFgipKbQ zX#@PQr9Wa)w`0QQzUa5?BXpnt78;Cs1aA*0k6FX7z)_~>(EJ+iV5z&u)%Y#uC%I>b zaMb^q^yfC|-wimr_j=`mz1N{2sv-(<*2+~dQ~$W6hCi+i8#ds|tFFdNRsV}+Jzo-_ ze^J2x83p(&#jCnMi`BiJ!>TWy$MVmg!}l#~;n~Xn!{rxUtOA3YWpBac){kS2!2N0o z0%r86gS-ECH7Z5ihZF(6w6s*Drl#WOpMMFp7?S)^0}HJZk;MYzqk-@DiT46O$a0)`tgO%459|`yPm{WG zSzid=rH+D(EF4Zs$D!mD9FgVG#B>yx4yG9nNaU$3#{;P-NJ_(R{zPQ#iH3i{Y612I zNEA3%qw*7$AxVHedCCkVO&yQK>4T8`(`QJT(@oqR$#Z%k=_lDY{d*)5{3k6E*k7oe zIB6mL0{w~eHz0S91xNf6m?UMYwjpgJVD#r`5iXEm0~tC0$KF}MM|E^nC6C)xVH&aSVN-A?GBY1fo%-|(@u^~X@9#29HR@~Tq2tBKO zh#}QJfbY5uNDS~pghGkXAT=Tal!#2{Xn%5m0x|w>ENjZqh zU-X;v?u~RW@0>k_L$l`K=-fq6-ns>qQ3tJ_z+a;w*j5qrt0^eZKtu4YxOokO>wJOB zzi&s3#)5s5Cu8`>HE?a~?+Ctf6@$M04qgWi!=N%kr#Hdvz)=h+R}B-37eVi$1>w4M zCCo-Efjpgq-z$|x1QJ|>FBn@JQ;*$d~m*(f3?1@zu#=RHTY3cQTVoYZR8_`C#b*@@VDljEVW%lv$JIyWVtG;H z{hEl1d_L&C>}%nCeE+JR1^8U6AC-tScP-Mk5X7(Y2G;ljYiY_}KO}2|mF{qG^&>Fz&KFd8!~Q?en(-agWxAUA+zE zffG;>q+7QhM%d;91o#B&Th0>L??%+VDTq7X33sp7#6z!wc;HnC4=6f(aJwWDuh&4# z@$LxQH-om_D!|^dl~Z}x$Dt$0*X}=uxElc!@zRUst;9ngm6*Tpr@>wN!++N?O{9Mq z;^`lQdm0l`BVt9*L60a$$tbP@NU-Tlb=G<@YhE_IJ=-I*%xVLLQA)udzX|4uwHY zQ-U{x$_%r@1S_4-Ol2`rpkUJ3Xx;*|l7DMqROw-&d6QBvwlUMWOmwbrr5ZcNO+=5< z6)?ALCm8$#?a)aUP*2w*AYY?(Oc@N*`^kIrZ#Xm7;B&q^KU>j_rPnQt{kaZ$6)B8< zg^FQa_a4w+xeU$at1x(a)3YMD*D7gWy4UnWufyv+hgaO6g#lY(2;Cbi}OcXq*^lEyR+y$yxOf@{x zEvM&XYQSgNDKU-UI4v!iem$b;Lz+JL!)d%s;5}g64eZPJL=FA}^15Y3&~Cw37`wM6 z7M|*X#ix7Xih3U|tB+vusUcW&W-zWBPr+Sx7^_bA$AS}GF#BX@jM~=>J-7yc!AID( z;$5UN-1D(FkY~Ffd2(T-DNhT)PZNFM-zVRrf;=E|=b2nq&d%q)@}CqP;jf7Yv?@`u z1m4b(72nnM9KCMy^h4SPOjcBWsD}|*Kdn43+C&A}_Oz7HJ z^p#(+@H=SpMMbpvydvJol>_g+_dX)RL+R-D8*ZQeQDmOu_{8ZyKCC=j&LjP+N_3{F zke{x>=NkD-8hefXDgya6caXNmhwLewy4nk=E7`Y@c7R}C7v;$5j{)MjwmTm8D5{R| z2}HDSFyaWL*$D*m37$$MJz)hB3GNea1t8|iZI}+8ftDN7?l^*wt;Z1g`$^a~orih< zNyMF-jO1Hokmi>i2&UIS`G?a~4hQAJJ-3=jJhcL$TTj5Y?Fd2sK?)FDX?hYyvSt5y z#9j76l1CsCJpvFdfFfX~paB{T3coO2hKTpRBkOo$s@rr-ZD+^4hiHy=Z%Ga^!J z#?7re(YHz!jIZ-80(s>km7z1@MDX1(ISZK6X1QE#V z0#%44fDa1}LU^zOVL`!&45YvH1B-}LJS_#CwHl%JVB$k7rw^I&8g;PLZz1VaS)dV>k>M-QTZ z^~xAsx+o@=D25RgD`H0D2AJ8XDW=wI0?z~cVK8dxnNe_{Hp65NfydrGnAfQdY#Ie_ z9zKq#4I4n~?JEF((r=An@(7|}%`DzK3xT`h%qdLy?i*ZQwE|X6uxJ>~K?Vn%&c5#z zdMYn>k15vKufzFnI>%gi`dxMU^#XjSxJ-YP>zO5taR*$>06QJM=#U@oalwXVh0$*A z=NP`D2^J9Gb1nY5OXJ`fb{5w(N8qM80Uz6StUf=Q;Jz!Z?}Ayb9WY{7BXn8$HQFqw zg`KMkAeDm#{yMOl7;HKnd*ORX3-zZ0F~C1rMyH-tRyxYdiyzMR9otc`U_p^`lipbf z+~0mD7s}Gh_uDt0V&U*^&>vp~`0ql>ojrJ@Ah`56fO*6EpkkTQ$dT3FlMe5jD_0)O znKK8esf_Q=3wg5WPN;YKSr?XGFS5`?pEV8bJW756Brwp_Rlx2Y3Zrt^ z%XZI5Hrc25sl1QOp-2rQxaa#5mw@C10{rMd06#$jzLb{=in9Ukk4O)cijR+&a3e}qsqBoN*cGGskyq@4Ol8zOj(;{52#$A&Z zzPq=h|9e$1_J{A`ca8piFmbOddIV|+@abFz0yaaCKlUtJic_1m!5YNDMkqm|4gNua z*uQEi7I*J}#T{CaTVqkjR#@Jy1y;0fjuo9dz-RwH#0CXm`}hgyUZx@zwCx1Ta{*r? zrf)jnGuW#rcw>uNce)q6Of+2Ob)4SBUp&?HWcCd=D&95QB>r6*c%KZx4j| z`QYrl1r)&5LBQopFly=DQQBZMhrxa4b}a1N5fLgCZXY^=$xRwSedi9Y?LUGk^%_C% z6(HV0?xBZMeZmw$nA5s7{7+qgS+AjYk)M%0=;T1tIi7)@%CF~gb6LsUS3iGo(ovwm z4TA;XbGg&qfBQ3dO&-EuP}&14(k|3~0`OD!y5R8Ig6J^&L-bfx2eXcJ#Qam;G5cgE zthg|Y!2UASQNGxGWd`P+=!w~eJjk$dUVWl*J50gUTf55J2k?WH7ets zyzk)T$>WYQ@g+U^BJi&lf%`x2`7eqApJ6^d_yQ|$A#~Pp>~A|BQ@;2O zlRm7Csnu&^;(N6*xz^<(pASE^yDKRlfjfq2Abeza# z7v`@9Q3(Q^MIZGvy&J59>Bu=4w;hFThbyexU1|Op3cc9;W6y~j5lYb1>Ek|M6bQn>`uHun4gwCBr<%xfesjQ;$ zaRO2uHTVqou@P(u_)=0*nM+TD%U`iby&!X~ty;^_O90HkPhhRl*br{CLUH{XuI|{3 zs~dmE^^IGE-5gW)HS(2BPH&~{w$ifAxV~v4u5MWmx2>yjWWq2Es#+G~fBFHwSFb~F zG7`9H3BpV;65NODf^lZ`60|5@6f>K&fYBoWdVLg>>JS9_`k~*iKci*wBIs1I1Ui;1 ziT0&RpmWJ$=vtx}niVdHt>eZaF2En#Crm)s(&aI~O~(uYU*y@-bxOc@f-iwEUB3wM zxOUtT@QL?A06yP4?($TRARPfU1#Nz3PhxkW13e+=-Enu63j!wK-SJx6Jvti`?(&(_@V~p9^2=mYM5IyM3InfQv&JDzx%fm5``@}!n6SK~C#kgZFFlfgw z=)0y4+HlvJi%P@1B|FkMMQ1H}z6&0WBL+J^9!R-oNBT7S|CfMUI?Bsy<;sDr{52P2=K$VLkvz88ij}pZgecp$3HB3)nQwAoy1-5U{NM<%?$ES!+rXD zk`D0W3Gibg$x#yUrKF@Xm!1ZfzhWoIF~o6hI?rnq3{O^sC``DxaXW^5_dQw_EsB=; z3!>$_h0we}0rISCs0`~0bGQayA#&aMUUABc8NqZ72HM7A(!*>q(fy_1 zP(jy5+j6daNMHbFwQYxG0|vqptU`!B9Bw=JVPU7v2vsX^~Fp8`u5>G4A^?x=VAE zESwitb}S%pJdGO!{rPibMaz115a)RWrYn2#VbzLg`}60B^*M^(9hxE^0lv@12{^f6 zut?YG)3;BidI3`J-W8+NJ^}X*m`x3j1TF{Q_`s={QnE4*Zq6Yte zG_IDCl9GDe^fb8qRWk&MoNKbE8C4n(uCd_O&I1@&>r1pOQVip3*T#}IZLy$DJ1l71 zn%ole+BU`9){QW)O=HY&+YIwtx5WHbZLpvfZP&s+o!6!nmUL)^`9FP!!Nm$;`j52{ zbnPOnCJihK6)tbyi^V>2RsuExIRZMXF$5vB zEKFk~TM?==3rCtl5N5C(~Jy#1B`G5Y#tm0F93?uJ1XFiNDo{%F7o@3ij0Wj1@OMG4j`X z*fwP{%v?SK^&kTW8gxzqeCMELNWXR@Uz;}bR4z#hMie3YiU7rno|xP7uUkzP4zK; zUkgk;*anjhwZ()3tuS@=OB!kuJD* ztQV3~h?lt0uW0B?Zslvtm@c zU-4Dtl6dExJUD*B)p4fGq2~cUqGJ$#;u5BO{w>yIe-~lj{R#{p1~gz zM^AvyJ=3W5FdjV*Jwdl+8v*{7V=xiq$6fRUEW90+HC{!~J{YN-Z~rvpKLz+{cX9(E zTj@HYk;*mk5%GvWdlTlZ1osU1TaH4%l>#@q#(VU~L5$e@^nGyN3kTdwz?YJe`Y$LW zc=;>lR0akj1~$%RHfji*1B0=B$S}0YUl7aMwS(^BC4>Y9!bt!A8fa=FTLS_R8Wbd! z+XC3Zumve$p=l^B3!!D!Ab*4_{NS;EHHK6uhsoc41Ld_#Fl!YE4_0A7-6m*JxjJ@D zm;mq1o6#zFZY*uy6b27>7z}#oDd;fjH3+j>5Mv8Ptlok+T?pdzHZsi{%t*3^z)Wx- zL)&hjI2k?5R=}e6onZ3|6yT|3fTioSf2sHb@Ok=11HNcj9YXin0xR8L9{pFR;_faP zq6zjznhW1Mdv3nSe^+r|^t>Mhf;7)ld#*dD&(OQXwfGhju5I2ZQgvcNLUEY^zf&iK z&^FhOx?)1phK~V%a{UI-`rHv+nTBYzSlnYE7WV7|O>iI`%S`8`^Xur{)L8`JbB{UB z6dhZz603UkLHClS(6wY4Y#%flVg4G?6O{9QI~x(FccuY+qG;yn6x;_M_ygu?NVBd- z%A-W2hMz*}@e=kysT{TVJg?p9f_Y2cLif4V(SH6X=(y~2bYJrw`fT_C{eJ%uJ=c7T z_DerOCtBBg?k8Best7`8zcg-G%c(i@Xnu?f9?hUZ?0^%YKi_X{5V z&;;l|0@2&IK}EK0+>D1Cw*!;r0v~-#PlO&W`94{ZB@0&Mc?YAv{~12V&LW-wI)?rO zj8kcmpwuBzVGxaL(<1D-_uQ>!jF%sb`%l2Q?XVco%ChAMtb0$1!L9_{N8cldAk{Jl z$()+wPj*h7zC-qP%&UI^B5%;X^pc7E|9Cv`Q6g;5Ns)tZ<=p-)hhf}D=Lpo&c8L@W zq`MDJuulj0X$1ES`1d8?OG!y(Vm%Emf5jr5flZLc^J=x3fZ7V<9UrXf(h=QC6^GmE z6}V?605lTd=?UcYCKz;PShN<{G-NdaBAI`i2)d18oe35KNCN?x&1k^QojWnON_9-G zT?gu`S7GIWkb*RDqkk{lj~zpdQIDXrr_nrD4lHd!fbZo2gVhW@!M#P}k7LUhVe^22 z*xqjdwvxB>AAoKB`e9q&epuhXAKdrsLA*wd-7{vOch&c>sAJbB0G}HUv$-}}0=@(A z=^5#@X84{wi-Xf=z;oYzgsRkHASl# z7NZG1$B$rP#}0@MiNNKpyD+CS1HJ|~4AAJVR6?cMv zJJ@HGo@0LouQ%Yw#KvO8$PsjnS|lgi2<8_dHJ~O^Z)8E5sw>hSMH67(M_TY?0{a|) z^oh@~zuyJnJF?)&%6BkyQ3bS~Ujr@Ye}Go=KNMYIwp#cxS}uMcW9Jvd@zpsH$AJNN zi@Aop#NM#>{@Azi==y4c%VY{H3`kWpLaKXKVz$qS_cj3!qeVIGck$Wxl8*S-}0Zmye?NR4Ep9f_|i6y=(s77;Yf{$rk9R$?&&-t z=bask=$qa!?RJHkpgTnLTYnglu9xwE{=-jAh({_zHt!R59%;r#fT}$GD^<>JRT*+I zAeg3s)d9s9K=?YO#&K#-BvQCnAA%yr zNw5R{<3_d;@TH`rGMO@hmzT8@@OfS|P*j;=Fj!#-^v9as-O;B~1$b`Sh&Yv=pxi?6 zXNFp#g_dk3@Cwse5w5j~e(AZ-dbN_^k8GjiL=gN(8LddLS>d;LABI({iD`A}LV3v@ z7Bihktw*@p3L6CmVKy5APn|%k+_|u}S!0-Pdq8V46KHCn^>xRfpT0%QLiy3YXhF0o zR!F#g(E{jDBtL$BD+kt(8Hogy0z0QqL$Av3VPV_Ou-zfR=N?s_0(^T;`I`gy+lZ64 zY}ta;)IZu5GAtVmW_TPwiBTVZgniW`TIVnO?kB1Px?w%wT9xhp~m@NXPBhAE93iCX*1 z2aaG|LweSJ0SLZ)9pitlk2}Xsz{2OH;6hF3QfsXCDOte10DLnAnpOndyn#{Qev08m z-$nnTg)!~7CNO#W(Y6LCXnWC~M6|oGf4#AHrKx}I0Y8=iKSz!nqT+c_r_xX#%7zs0 zVo0@&L&}3dq>BFXN%up6n{5d4v)c3V8P*+sM8L1xkqxKUzJ=dczk_uv@?+Do0@%8u z051HV3uc1)WDW>8I9NgEn)@j(xHmE@?jITm+#`rjOQCZ;Kw45DU>r*9Toh?H-a(q8 zF%|Lz-ACdnM~!|)qNDzO&6qJ0xw5)o)3gylz{$se|G@h=qHi2T=+!-lxOET@d=Dd; zq2KEu;OB~q8|UEN+&R#`Q&%Lv=C$_Sx^<)f7!8hJPwJU~{~!(Pm#)P0!NU=7#1$}T z;J0Hf=CrJbK@>I58a4#ywyj6J_bs{~UciERK!Ku<0iVq&J0avvxpQM=qsEA#Kh}sL z7>HKmr@sg3z~1Sn0Y8o{0bfc=DwFAHc>HT-h+^CGthv#x$q))-a4?ql=!qVc zD&xk+4TvYO;u?7!fqkfwp!n)FxUO7@U9+a+(&mjY6Oc#g3G(TfdfLt$pu&NLD{y4a z0-RX95Zn9o#=x@WG3|$65Pb1EbSCbKQw0MB4hDq{Mneb!&Ynd}0-N6n07Gwjz(BzR zL#Ra)4EOzeuyMq2Y#KHc8_Bul;vayo6kz}60RD&(BShySAAR%@Hf`D@K%ap*%o>IRv*u$+@nRTXIv<9XDvVV< zdm|!9D{A(2Tx(8nuhJ{Y-0;>yfX%?odHeLv5YRK+YeaXddfJ8??3xMi!{C1S1m<+? zfS3>)u58(bd7Zi-Or?SQk)xR0xIO_s_k({Bo zPTS$HR6KCk^y$-)En7AL(3!>icTB>glz75vdtUzI9&Hkpz=cKx>e(ogBW`2RciK^_ z&)sOI@KmJnWC8Zkl$MG0jvjL$YVNb(-p=;KlgyfYaL)hd|M6TBQe#|^rf!K8&pdeK zT@ERU7pW{f0M@?<_@18LC{(y8KCV^?$_v{GRu3c1_YidFwxP*)pQ2)^68QYX>hM0e zh}ID}`tC>6^*v}*=PTsQ^%h)Doe{@zj1%@+%K4Bp0{v$K{v%-A;33#KZ!Ye7d%n#EJ)~!bnwuJznECAnV69aOk#c_{66cMuH=r;j-S{y}+ zGzCxNC?X}Grw~?vc^aGZ`(rqNKb+P@;(<~N%ifcsku8J0Q8ctYhByy@q(ySN5S7V! z51s?~&TcX}O-BO0l$2CvQ$_&!@@BYa_~8MLxaSlTtom?R0@YaAy)U{|s*0;yHzJ$> zPpi-&RBgcdjT_PT%TLjvVkz{lUJjkg7R9V4^`SX`7B&KBlfj5RD^{aLrRwNNP~WL^ zN%W(ChXlr!I09$(5GlY9G*E9(FDh4KNa17 z4YXzh!88S7TpLa>uhf}DV^}d#F+G!Df^ddyJ>5qwZO_{nO`*7c)D?3&af(hTu54z& z?|?{pR<{lx#U!r5_w~p10}S}}q4N)B`{ zM!Mk9-eyQk(229yZ9jerorlkxN>G`S8itgJ?RcOcj+6()T=o)VC_3^#qnWd2BMbdU z)9lx8p})8l2tE$W#Z74c%V+qsdL=aZ_DiT-H_-AEh`zQ9BRe(}H?LEdZb-OCpeYLf z$3;0cb?Ve7{=;}(%%9twdN$yvVN|bvIJSH#9tHTob;WYj{pt%;E>{*GRj!WTzWf?n zXHCa_Umw8j1~7C4JrR0A==e{YAwQTrB7XrKTDTag1n7t(bKmoHE&<^YQHZ+X3FF@5 z0`Ma?A44cP!u1Ls(0{Ni#=7Mo1R zrdK+eg8~M32LEV0Rr5jvRa*=t=^< zl$2CvRYm~$vbN_<+iOTUeLL&)(=#xiYS9*^F=kW0T4P%k4WDS^Eog ztXKxSCya;f%2jL|G7PN>6vVpTePQ$QLzqqroySdVrGGy+4I75NImvMi{*pp|p?L$Tj#D=rm?A&=hxF$?uPm0x3?p;(0v<4sJ(WF}Gu9M1@4) zA_4w_4jmA|fPe590e(~He1hP%j{(0Pbo5O9&tAr&PTdef&r_qZQqVxA@^Kmv!@Wq& z(d!Uw&_Y4)45#aG?R=~;4BLl~$Br@MU^82w!TyIIet@s9uVblQ&jb9) zgNIAyijyAerDkjQ~A0E)n)Yu3`}98vw(h)1v#z$W5*S@Gbk#B7yz`=3aKv5@V5? z7*8P2FwZsmyf2kMmO-DU35m4ceq1}`lT5vZTU71)#mfxc3@sfJ(nxoQG}2wt-O@uh zh*Huap>#J4E!|Q>NrS-94QJl(@0{y8|H90(_r9NXuk~40cBb>A!_#M<{YzhJDMhij zeZm5Ofop&Ir~gTWE+M2nxARG-jH~auCW+PiaAJMT)d@w-JYSeX6IBZ7un&SQ9HVu*W4@dU6kN6^X0P%WZHu(k zRlm4$;nDnB69vicUFBC~3s~GP)d`BazrRuc9%C{$WI1(pz9iyDcQcuZo{wpZXRBek zk^134rx1g<@k}-p0kNBmg}YqGAJTGr|5}RhawFOqg(%JUHxQ2s>}^Jk2Mje9r&!{E6aPB}Wk5Z+#@b#luEn zcu2zPHtf7yrv&T#C#J`Ay{O;1^eM&iOof7aCiBV|+bG(X03UPn;Tu6& z(EaFggGQH!TxYHrs}c!ILVd~T-)AKqt`cj<0W2zv)QT&na&=cYVqF>BBWA=u?y^Q< zaYcO*6n3GY21$-5c-|&YHU;~wa!0uO$o>Aa&2=gsdvI(@7s+(Z(JLkuOx7#@7ky=0 zkSU*H!tWmIatFzSPAD&AL)BIdRY`W<&o}cn4;%`}uVjGiySzX981YiH21ngZg~!1C z2&Vs^9|8Wdej(T-`0?h77D7v)7AC>8X*-|xw`X@TlKuXBJRxxj zNb?G6+vteW^KIX}wFGj<*)s&`^W!e}qJ3JgV8xltPJX^5Hh+fnlD=F{2{yTlvB?w7 zqaXv(TZMF@=WMX#Stp%V5)u&~%*Di}MyP{gOu|vky(>yHVRGJ1XB*g#D@_A;hJ3Xp zuDF%X8-&{qIzy%6CUwnLjw*NmWSovS$d6+bIyp>2Qye!Il-wkqv2I1<*ad2Jv?M_? zcp?6S9$3$XI5-n|tlc|q8+U-5iYf4v^M`|nz{%X+1e8SXr)~Axb_G{Emw=ajgpIsV zX|~UtSU>f$fFgNH3x4f4c^S@hoL*8FXe7|p1pqGffQN1i!jptIUw?}-^!LYUCt-YU@y(Xo^0!K_1-$l`fNV-=2M!_#)`}qap^sB$= zFLs8`y(>}((_#qKVnITe?w>#5$Q&5>W;Vlpv#e~ZCKgLuakby{U|(KZ>BMnFGNvj#;Aeq1KFR)&5Ajw_vk7SsY(TB;?NyXCyP)~Rp z`!Pt?v^(;Xk)ZQsPH6QK&TnkQ;XRBnC={- zU4Bl(i`0-UAeA9~@8&i77AjykVArCMQ8_q$w7^ySN$QH(1S?lLO}b^)db(R%)2mt= zziTW*=IkR8ccrm67ONJzm=dv^UfA#?!)HVMTxAle77vD*7N<8@?0QK_TKHXY8L}<- z40a}(UToJCOjS~_fLyi1Tow#_?}v@lU4rBCb4JCV9r(8@~HHIMbu( zPGQ)-6T2SqqKtp%yb{`&>ld0h;OKoCE(SOC7dGpeJXTiXT&m})>z7R+BJB^;n-`6hm-s)kyAH$lf%~tux9hH1v;#wx( z5BiJlT!OS;+JhrJwz0Q=Q*tf_P-9ifY=Pn;=Nxo=Om3fpOjIZ|2%io2dPDDb@x1CO zCQp8cdSBLCPA41GW)+C>Q4WYqG~JARIwi?}ju~Gyp-FyfB%T)tOd(d)nAlHFA}}$R zH@l3qU;L2ShN%n;q+D)1S+cqxdRZso%Q2$1n&O{KhC)Cl}pUd6j|o9;e!o8eL< zI{lxXE^m$Wh4WYfEH^L;Gp~as2f29;j zT`0susq%y6G;~oq9`HOMJmV|5TRn$U_fUE~3fv3}-v#>4q0%KeT#Y ziJ&L}Ma3D4@X~QZ_tkvu6L&1+_YWQ!bo zt$ICzXDnoRP{*jQ-CG%B8p9k5+0Tkul3W$AqK|Pkl);ivM>wBYXzt?l(~`GjmscVw zU7co$+N>T;KI2_niZzD$r;@)*Jbt{!i&cFiL}Ts_gDezgZ75~BRl|`)bdwn;8lDmT*EuY)3(Qq(EQ*mF~z*E>-{kGdF-6&TXo^*FHn18Eho%c0A zc@uWs+x9l67;r_N8W0MJL3zfRzMNSHVV(J1`c(Paz8Cs8IFi@AQUfYfPVnuR3trr+mWUbacnd;+2;xiT`$4q55o@$l;xg}OGM|i0R9xmzv<#aD8g-q2I}dgUbXf%V z{*8L_8#tZFYiVa}ydR zVvT6I=nRLtb}_*Dt_-ZB@OF1le-te<*Yy8Z|G;2l=8qH3DMh0I=0hLW-%_83W9mkB ztOzF(Cr^ObYp6_6PxhcNW#R#wk?3eS@!sOE8<-NzPYhqzpYhO_^@ z<$imIy!CtMJ|#k0ce>6+EyU#Cx+Q*eEs}%|Sv>KPD?hf5xMSt&{|m_2EJ3?sS#lzmf78)OG5dx}THwgE1w;AwbHeU@;`|+HKel)R)6aQ{;({og>ierUD`RMo7wUr9$57}f$i+-5Xg^E&ajtdkZ@!(nww}OIg zGHe^>1(rKxRGA4A5(jAujmX|#=p=xMq2F!uDDW$hd^oahZp!9H3?4Qz3JZOuQy9M( zGJI9&gD&^Y7b^`k%sRsYknSFb)TLYC(t3|c zz@uWbmuimxz;pL2;4TCr_Na;+NcHq#EHuefT`B{=OrWYCEbLwDe4!G7dGmdHVhI#HLG?GaiOkL8NIzUX)naN=+8|Y&C4IF_M6g56J_VW$w>~2fKTquOv0I z8z2;qX2@*TZO1f5(MC*C+ceUpPWH}`$}n3dq!30#;6shz)!1dW6u_=tFpNgRSicRa z|L6&9@4UKo6x!uwA~pSaO?vZiuJ-O;x@_@z7(fm4>4~#g%QCksYbRZ)ql)r&im{+1 zv-})A#OoZkbz*OljdHNqF8&nQrV6~xwK@Ya6`Q)h9Ier11q$U#_!8%bm?}%Q#Vhmg z)Lgx}Agl=kM#h;x<^_?8xd7{JC!(mXyN3{y#T2hsqP5SSdBtJ~^XX$cPU21zLu9Kf z4Q8sXNKy+WkPOz}#RV>O%ccJl8gysXE03XEZMMyt&)o+GJE567Clgw${Xm{Q@eu}( zf1&;dy!GbDG#x_D!)dj>M9_;hSeG}yiicpVh4y0;m=x+kOcdZ&U zL+Y{263Bmp2o(!9MBCX#@=rPkX;%Z4jF6AlZ$LRly_P@2kK`)&WZ}g*A@o8t9SQ9=HsED8MRJV7F6p`Ut?rYTYT8rix0CF1bck=o=rpN9Cc%7T&HT5&{`H&>l)>hu}W=u z_u9Z`u3T<8 zc-0Mvebtx77vE*A8q=`lB#Qr}y-=<~&2GJjy))=6=CLUNkPb;o8s;;GQtONz2Cbj$#Qd6nvO zeqk`4D(b39xENmfEUK?^b>%6op1=eF0PG;}U*o&Kze0OjOw#;~|I`sIN(VQsf##zK9NH8!_;p!^8+ZOx%rZ zWnvg4?E~!gUIp1Lo`yfBS_?{iLU|%~k^N__F8Y261|ds|PUlMb`b|ZxPUAZpND)1@ z9(S7P7~#ooS+x|N;Mn`0ybYgRnvm`sP&ZLUQJx%eAOfuOXqOiQsb(Rvk6#CCD>lrH zNhOfG;)#di(%CI8FG{z6nseEMO3)zJO~+iNK9pBgrr2vNdXSivjqTY7%37rJm{z1r zJy(m*x%r0_I@84;4X%EDj4}@0V=XjGyPYYNCW0=t6Zz^$E%ABa`KPhOwRv~<-@L%# z957NRGC{24XxyPb+Mvyl6AH^~C8vBY;-Jxt{=ViDhD`~Dr|}c)$QX?(n?1lY+wr8+oEO&lzSYb&V zS~<0xmdK{aRySAMmwwSHo9ULton%&>ug#oMTa8oN7&Ny0E(wRKWah%u`J%a_efzoV zt+b`H7e6P+EDbN^GndL~s~nZhd(c7M{t*=_(qZHxVar)eGt!+iwfbr?_V+S4yNdR=s~q@@^>i4s|5-UE=3=lhLBRr$IkN#RDbb66`PEz#2d(R7(15mPZnF?3DDOOH~Er<_= z2;aILO${NoiUnYEBxHswKbLJiL4Lvv+9=&kdy&rF6_r)qGSRY!k|TU;Q-Ly+9h~nc zlLh`?JVWOVU}r1~V8%+5zyFF5Xa$yl@9ZsWdN&%8QyQzJKEO!I!LNlfnezoy!uX1} zHQJycCF0&+#Ljr9yG##?xO|T8ZYZ}3W`KT)2Q+0vfxaEHZu!UA$eE_S;nwa~!9H)= z>9woA1Np?6mOo4fW)&zXfiG0|^jEJtR#=aJJ+CUSK`#HGac>iW9SS%KQ}x!ht_S8n zm=_eaN%5%5r5fo-X$e(Gn|f;x376j6v-ziTghnE99Rl(n12e#{(XtxDiNC!4nZ0yL z!}yj&oH3;QgDTHvP)o`6zI=L5r|Bvkww{0#9$BiEonlu@w{9+-unr~-&r5&v#i4Es zc9%k`UkOHp#fu@rFopN#q`Ix1Ge6#Q|1!2`#-;8xX}~`Bf+B?j+)?QNCfYnWP>CmI%9 z)A%C3kTB(&f1g{_Pzfv75Dh$gSW#;H%e-`U*bGg;fXj#y!b->UWXcgZUcLuv zFt(Gnr#)9DU4Z`e{X31b*QIKt8#sGN>LuzKmfJwQuP`|@^775S?u;s3*D6t zzY~V9Vp#vKlscpB=V_-lc1b5Q16hiPRiV&&^kd-`nZ>V>wVl=?5&kF;6@*>yLkeM? z3a}{=YoF&DBUB5OZua&)KhD31hqLa)>wX1VV{2K#-mH~pMs1R6sUY}$RFC6pjbzY0 z1~18!jQcs8bm~jYAII86_?#cQkvAjbu0Cak<`skUhyLSFDZ5&rO(KScFkiFkn`BdK zB}>h0pIWE|WD##k2=JPG{DOG&3VzYsT11Dv34HeUavi5_Z4Kgi&|?M{%5BJR1iD(8 zwD`1kTfq$^t7_{~>-Rr=|Mf38ceq<0;~1}ZW0Uy03}1_N)D;FPCooGDn3hH$JaO+x z%zDDp^FGNO3T|eMoI~2ov09jWz1!uiE>yx;$^HN6cZ#nb(V(~#VdTrXf7+Es0*&%BO+VB z<3?IWzXeZ|zV%0fc@>*2&pU{UF&idzd7U7;uaP_I)f$P zylY6L1&^DSY`#o`1s2g$PTo7V|0JI zMM+lD1J?fjaUMAMQHa7bNi7<)ZWb72P$dF60wIwjqxu%B2zL1MXYEWSTIaU+{XId= zHjE}MMvfg)eEX*OcZwocB>f-KI8qjw#$Gh~+~FcE%gfBd1H*J+tWTnQ(6N83>k3v& z;;Nf7rr#<*(nm?W3XFm3SgYv{<2P({t#uE`QWXm{F+d>lzNKgQN7NlpXAYT-x!diH zo8!K8=s_kqwdcmUp%t21jcHOS1BSWE2py?$BMoEV4Y#hhL%g-giluvo&(4j$Y%f%L z3rByci9UV$34g^#YSAZiwMs1A=PcoHBPeXEg!Hen`Bgr`2KxOQ_2Fr+Pi=jqy4|FV z&ld!xcwjaAb*#B8fx*UfRjv{p0u`5KLO4W+5&jJ#R^@84<{7tJpx`m&honYHSeUmd zks6dz$fvV}Y32ta<>pgg3G;{rw5tU#4)WZ&B6er5dc&LlaM%qSrIkJ6(|s~+TXyU# z((Ps&-;oK}135WsUL_zs@+pd;K;)E)e=X(9LCE4qJyd*X7xBT8;Ipqp%k{gGIKWw_ zw#$N1*Jn7rX5)Mvj9sqw#kA_=6JgCgX8qo8PRW;Wb&26~qu|y;?7@}i;S`Z6k>6Ua zz&PCptmO@kx6SVi(-1-+!krtg?CWO@7|83dC~(;Y@BX!>A}=?53OvSlHvvz7tcHq< zZ8o68CQdmr7YC8I4}u*`>_pmef98be7Yoj^c-IP42H|XaItQc_=E|(R8{F)!lJ~t^NbND*Aa))Li)ZG3U@aIK&W%yFh%ne&ywe)_~KQzPY&4m z%s`g14JrBl*v(~tEr;qob{Xo#z?+L@DW2bRNHA)0!8g_PHjsb`m$#qap#?kyWx4e&5m8^f08$oU(H{1b_pT23G*4H*_iV2BD0 z_20uNKC#t8``$o7L1z@{RELOcm1*dOB;h+W{woSauOX1TpdTU1w-Rh^7_{r7RojN|1POifA}^YZ*y3~w^D zTl+;s#^)4+a!p#zU!GFl<+|3+hk`|l5Z()dJz=&)pk3aR3Az;^+<)x3l7VX| zSM z?TaO|W;>QoPAe8}(A%L2Gn@=I1C68A^t%M}ZsUrd%rD-`+IG{pI*&Dj0YQs}7hrbg z4F_IhkM2a`iR^8cSrdhyrkr9VFe^621}~{!7bZWDxp!>oYt?OmuMq_~O9BE+^3&9l z#~jNrNrYfx=#=y$VRQKiz~WuQsjg+rGMV-M(PL{#pv*Cj{wY3-RDLdQ9xSky{P! zHosMJC`1v>%ZTN00{H@?Hhj|a69Jfq7*0hmQf?V%X|qGT z#YSLw%8uqnpxki}WZJbGQhU}j1U~B?suB}PDPJ((_zgNRswZU(x}nweb&B6T5wZM` zeWNdl*HEBr3tev@|M2p%B<2HbPpbsCIg9juWHBJ=2eJWUG~{*2dxJN@+X3={mExtF|o zQ&?QINYD9jvU?OaA=Y93c|@AjC_P~41MbD^YVj!kW(IqV1(DLW!y`G=ENM!gN(P4E-p~cWt=en2KgZ_q z@WAF{O}BJH>$Bwma}B$-X&SS-ipTw*oIIC?&hs3l63mI(4o{1YI{nWw^@??u6Yd(t zKtozF3kd!(6hR6+(M&ulQjL9rBPh_?iP;lE8I;Z4>ZJ8$z^<1U;UnfUT!02$xi9Jn zRo(8$*$US(8KZ;T2MxGx*SFYc+oB=iL{b@0u7~}H1Je=~f4!=x;DuksAx#dJ-Fnxz z({&Ek#-=?u=t>MXIOI{LCCV22Uua{lFBM|UxSRkEnZR(+FYW#!ZCX+N$P+83V%x)` z%+*p%tl%56C&?;ASQ9*{Rty%u$<7L|nSXO4m6Vi6Bvq5-#I=lPk103JC*VXTjImAr zXSi{1f~&(?*+XmGWgVXft|Dqbh&5PazE_0@KOE+W8;qKUpnsgXx8jorCG|p(VRHS( zVNWT9Xp`l7xI8OfeOSVK6vD$$-4m`3Rk&+r5lPF4w||`2nY%%U)kAsQVJ2*Hjbe`M zH4gbCTiWv+tB7q&qoPWqLPZ_=Dgc1Z9ruQ#x|Kj2Zz>Hk1$;Rs^CezK{i6dpHc>zF znihH)bZTuWcP`p8{KCIL z-Q>uddJjLzjq7&?^dtlz_MChik4GAhk7i%!)j^$$&a_g`XXUfD*uSm~qLsW7;(|gH ztPOQz!(v$w7mFcG*k1-2Fx7H}PnegFOZEXT;Xjx~Rp7TsZZKUbYvVtx@745smHk&D zBiO&PAGx>oS2E?mHT0{F=MA)c3I@R2BnT=z%^km)Cz%K6L6Lfn?y?t}2$9st?@sS| zp!x9(#SAzO1_viPZq66xdYvjL#xvFugVmGX+S$4#@1U~|vs;mDQQj}9)BetM48;Blg^HNd|u0nXikT=Oy{@bZHV;v+qcePtN z?pN!dk8vKDb#m$A2QGJ>L7bZDG7;9xXJmq&$x53XB7BJA%~kqWV8YELAj^3}p2kn) zxvOX6e$b^Zaw-wue#d>&yMIvBDauE8$5m74dI%c7ghT{Ejw^*YDdkxDz)%W3Ug<8# zgPrTJ&teuH^&IAAZj8~kuk{_}y<#&XYFPD-+DoF=W

Dj^;65)KHWwmcxhP8MJH zA1_?*y)Bd@Q%(yFaC1{o$$5I^A+A>!L*4ds+)ou}wT) zw86pyY^eV1DAXDJCB`6~(JjP5(y#A3K~?(Q!J@@bwc{|zz)0+j3XdlEy}DYvL9`zlI$I_#pMqWsrI91j#0;lggCZ! zncy1xQK`zT>PuB%ATeTBuU*8lrIg1-1^LEd8#v@|ChPjhGyiDdIuhSwjeOe6v4%Sh z;PccVv;G!AD_%_cuQe2E{DF}907O7H<>~Umh>rd0g)<|~s)(FsxgvMvswZJfqo$zu zHO?=+N4z%7^ub3l;$o#9>~bZr8C=_Y4+yqK-PAknAzpw7#us_UVsV_hLV6k$QfurmyJV^m#P9EHnf>a|h4!va^&SzCoIQWU3zCcj9S*OxEYLP(nK zWNiM@k! z7>f>gwbVK0Z6@YSG_9=>%8%>+D~D_7U(O&$T-G(x|t6c$qvaJ7=Z zBtH1qjkU*LlMIO;#+M7t0xpA1*V&uR)=d1(j`1Ld7YFQM_XG|d zr0ubcp;&t@ck4@j?pl#drFjjYZ>MntF!cX5_$pM;A>0a+4^axz zsxZ)1=8Nf%e*LE27zV+f+Ry0isT@giRfcqZQ`C%M&hgT*vFwILCg&eBFAn!%W=$Az z^rCiKw0Hk}Be&X7og7L8<@IYN=v)G5k4j03T>a;+`1@Qw_wD&$YIX<5?97vQ;bceQ z%1Hnr$ll#Ua58XR)hnLU+Zu8~G)L5{!85o&R3{pTRJ)k*LANF@9CJ_0Tyjw!RyOZ| z{*Pkqt#I}22Z_|%5QzvK<8@ni|hD|HDg=oB{B%`+}nsrF0{ z(w(KO(J4><9#h~w-_R3`T>e=*WAc(mKID?J^m&C)Tww|#KcyoxrCm0w2`*K@>+MJf zN*Di;p8CG7`3)53Xv^Jx>ML0P8B}S5p*^6Fb`{(c3VwYp*$2KvVfVymUB2J&mE1M0 z&}bf3(mtre(CMg=zvuVVC|ncOF5F0KuaX=*W!3mzsxz|C@wKB$_8P}ihwH z=N7v!jfS$CRl3$=4$zs#DzMJ-iadI&pQI#~QEZ7TjMDbi%CLDKgYdWkIQUp4e;$P> zE9CEWcKUx4La>80n^2CV@Cwcxv^=a(f+Hm-BJ~dG$25QElc^#W$3IbU9?>t$v@sDh zuMupk$x2wI9v->t;;_Z*E#7uI_D_-qKt==I3E92qy@!vvP-w^;H8>gVUj;V9t<+KS z2Cef*hX$2L?^5WQ<7>WygT6o3xIV|9vf7gkn-L0<`bi#t9f8v}w(<(| zZwQXth$Uw7zEM+Q&MhAc@;IX>;pT+J!xt=rDxAU4v zIpSdjGSO~!OK4Yb3yfBp`J8-DY3O*dhJDWDk8d~n!43=D<;FT#@AMF#D*_|LbtGgn z;?69*`lHY~J@EgCxRB0ll6dXrJhCjnd5Jn#RmR`L;*<^ynpoNwD??X1d}uzD4NY3e z{I@U1dZ&f(%$f?(SaES{!#L0?XuK+;NSJyH{%3(R_e@uaPi~C*`2q=*t*$#UwIKSSDUQn6`MAiEeX3+V_JaHUv6N+bOoEnx3&Py zY$@E`k%2ED3!bBi$_%nS>NjCF1M3P*jjf#=pxlBLK{kB zPH-=wpkGaAqidg}kkhKJqE@~vRwgd-TSx8%{n(5YczYsc_XVF=0+|h<6oG=& z>)k7Tb-%-!W7q(qh2rIyuSrfVNuD_}Pnda&VQj$R0yB;v2Mx>qR?rfkEwS`{yDWA$ z(v#j#_>oUz)the6qfRjHB-(Dt8y--#jbL&rJ=|-^99aL1vTeVtZ^)lsy8T%!VB3Ct z%?~{zs1Y_}fJZ?VTku8Eu8+X|<%0d{c#Z^-ajLJD(w5t0)n29Wc`jgJEwt$GzjsM|y@DvSsK!$lbf;dMQjd+sz3&&T2S2rc#VvJ{%urqZC^#{<* zQjgcWqrE`PF_VkG^39@x+J&NIlzB-Gg$?H&O@z{E1}T5@k(0xW-8h$!lUqAfeJ{TD zN%otCl68f$1ckbi0eRwRPnt0&PdugagLmufAC+igCkO3?jK5LK?I(3R3S>yl?-8_K zmN#(Y&h?6AaN;Y*5Q8~qz0qLHKS6cV`E5~SbLp`wkMLu@{SWe8Q0Nni3+H;m+Zhwa z@GkgV^|U}EwmBC3KMdCYtRO9v9&_;5xVd-)LI^t`xJGFg4c3CUc?>F^F;{%vn66a> zG;-)7Ij^%PyvQk8+MQeK&oyJKw1~>N;cafk{)CGG4s)fMMm@+jFSA(Rj`(^bP!j($ zvN@6RVbS_R3%h+#5JRfNT?o;xsxGKdbQ$z2qJoj0o;L!hoyRhUcfB1`3Un=|U={~v z=@8gh7CeeYwAbPq&2SR{&s~^Pqd*rBN$-Gox6~5vfrZi+l!a{Q1KA@Y}2; zc0HV8hwfze1s1E%_ZXy_UBdG71^4yV+@%izq()5`QF`Uzx+^GZ(A_(zT29y>^Qezj zMVI=8g`QDliLH%(tWgX1=ptn*p+`eH2V$!`SOPo`wV-!h=eCH5@OYJ;agc<*L;4^I)_DlpL| zy)0eYR^_F!TEr+5SNeDG`g=J>i%KzRF!%#XBj;sf(l<>H!%&fK--U80UVBQdEU-c1 zfRfSh?1K^Q$)i7{(o(T&H*6F9@1W@i+7XUJ{IOm&XosLR`WR3lP|zXOjZO*$5LEHB zrMMy%Vzfu$0X?GpPh+G1mG>%8z_V}Wmig*9iSoZ0aPO*?Bgs7%d!5()k(|WB^f`s) zNkg~`9(&!UettlDcvt8C8Sjnt8QnKpvp@gay1f5y>vF~fFsquz{{g@0*3d)FOn!<& z(P8xVL8&uwP1{TsZYO8ldymxIH)GUXFa4a;J)b^Qx~wKij*;_!)47VI{`HY=oD%^g zE){bHU;OjmKS1P@7!|Qf%xP22FZN=2TAOXDp*|c#Dx>i$8XA@|$qT+&-wNG@$s;?DI^V7xAg{?;^f_?-vZLry_oi zs#UaXL)GJVZXZ2JvXdMwA!WCWiVD4jy3w1f;u^-C7%kpLTs@kYnQQQw`p{$CVFmbb znc-$Lq%!U@k2Jbn=4L;LF@E<`T$@R4d+&_AuD}nISv?m{Wno48_zN@+Kmv4sv?4Ym zXq|{ymfRN(A=@T<3ET4S?z_u(w7P(gFtUSjqPaUSuBNMSEli-XTPJsCVy>dKRsy3qK455w@($ z&~77M$Pm7N+Ujc$xPzufEVw7*eJDuIL~V6!?(M&RJ0#!|86||V6;D@^67*O9)0~O) z%RwS3*#)`F(fW{BSO6Dv!(9q~J#xX{q4vkjGyxDcBX(>^*C%5pv>TK9ZDSJV{h>m9 zSESa+4A`iGv{^t70gyKrZQVP0JHe1;5TxY0;+)5nMB%5fKuyFcN%h{)@~wTtkcSDe z6WKx9W#ZUJVRB?D8>p7i`80lPYAxxHS>c)1j5BnBe~C#lO4^kNQcU7(DmUT(fxzEg z^NPL@i3EHDy~9FsfZ}$4Hy334g3x0t0Jx(f7a-WoHvglKH)TRXE`y*fXpYv~G&nZX zrLS4(G)GLV?n;Vwg_ZM13T&-WZo!Dxh|laH-ua;{G!Y^?YA2t{2M>^t1B52DHt{TQ zq%!%i3ca9sJgTZ&F$JE7h)prmtF=};f>-bK0FP0)3ZFFKoh29t=#TnH-u$B{WVW+;N)QWQBjH;IuTN>;*FRtq zQeP{Wc@dquPEcX!WC$A zUBQ~pjknM0)~VoEhtzinuilG10106b?q6yP*#r8WMKN&vDXnqBh#Pmw7p)WiZ}tYd zhI&lAZ}d)QA-ze z?3Hr9z=Z$|g9~V>OiUsg(bDW>%%>)7YkSl;siHd!$?+F#-p+(veYnC!Qztk~CQQzJ zl(g?X#jr43^HU?ENpSnlp+WT_v$E0;{CZk4K*bi?epwM891Ean%U4Jcou77;o~CcTn!6%x7-T!diRxj%%~YW zF^5@*WWLp?=Gl)Z50s71;8a0+AvUzVya-#~=6Cibr_IZ&g?1y4O^~Ct1(Dkr+Fso- zK1!Z#VL!7|>vuhaltWs!uXO)W@W&za(D~p{CLPdFVi5}39T;PTQ*yQ07<`aIF7vYc zJa1BcO%$pO<Y$E~3EX&ZGztxbJ3r(&Qg}wK!3_iyjpLSxV}XCtI-}5|8>Xm(ULq zx29+Kv2J?Fuz~j3G(ey3Gl%+vu+%?}gp3%Zjji4=yge_w=(}2tR@b#ygA*fKr3%QP zcLjHss#RuqYHQOOX(_)Q+8U+VU9MQ{Af?EhND2OwhVt;onN)y8X0K@=ZSAeq)R-Rv zYsDx^{3u2^Hz5kAL@S4B!m*JrO%yQ2M61-ot%QYCv-^FL+V@B|24sK2M!N%2e3)6{ z+s#)*f2z%4rqTjcI}8s}3Jur7>`8u+e^1UTQr2TK{}h#wPZK?-UGyY4&sQwXvF$TVO7$fax} z@_BVK|BxU0n^%k*xQzQB*xE+>f2sdlHY-$-M0rex&;~j3D7*)wwjy6wUe-+9;ZoHB z`UU_xZD9;Hx|DGYax5)Bw8_9N`1-vU+jr`CBLRiX3|vcZn;iK3ri4YrYh*m5CanvoeGb?f?wI(x2@cJT4~=S z`}p(w3Vi7Onq_Y2$}8n}|DN$x19fq+S|LF^CrpHAO|Sh7+JJN7NUaABXAMwF)|E4_ zu7}u7_g-#Vj5{t;ac_a#R-2>uR_VUxE#6~0au+*W>7(tqp85_BJX{yvUWc9?YGp1| z(I>DVr_!B71JO?J6|ESuK`H>`5Q9w)&NP2S!koB4vz&t`3b%uwP-1$i9$Kv4MNzKO zKP04@^@8$nbaCNX5U>Yx(O!awQkeRjH@Ogu)+{+!S$`GAIP*uT} z9_)(}iFAeZ7wVQqzWQq+Cu1sTj~ryl-{*LXqvE`bH8WG!+H6Ha+~^$7EW9}A?Z{oB zgQ3hMp=XD6Ikp6H+L!F$*j@T_6vDc=_8Nyxw4%4bdgN0vH z7z>`VW|&kIoiIzz7=U(s$hRe)G5X_(rCz0xxvq-U3px+6BR}JQikqzXiu4pB`APum z*ZT4eVSO+)og3Pa__N$RD5eVQE*4joeqPpy8J=?<iPV(Z6yJ}vb%xjypC1?8~z0E&bhr3;W&1z`<;WgKja5! zBj=p|AJ*P7Dvq^l6UN=$2^QRfLvVKw1h)VQ?g1Ki4*>!R?ht|m35_=p+$Fd)4#C~& zDb9JGd1q#QKfn6XtB_S)`<89@-q+PtFrcl36C+nEAhWq%8ZIa9{eeXtNLC6{hbfLH z#3+6snuRU`?Fwd|ML@ekAr0c_DvvDhZ0{>ZUg$YQldXYRK@~}FWhG;=4wHhEHF}gu zwLr7y3`m49qNK9NgaCM_1_P61Mn5_3(nh0@D=DRS+%VrDvbU zPh1B7Old6FQRi07*u5-~LVl-QYrAj{-q{GNdIEn%i0q~t3h9tBAZ z`(mTDS%qM9QQkqP+*<5kFEC!~k@%rfU&pV= za}_?`p3Rc8F-y09M!;v9U7vDEP@-P98^(U0P&4t+bm_o*+p<7*-BSt%dyD+2*C?+? zW&A&_+)Ih)E>?1`ZO>qsXKW%XGLQ%ma%V6`0J*i^$i05SBT@g3S)$5_@OV3r^3V4# zJ*c@IQKB7>!MTAC#>keK+XkCp!o;CK&cPFi7zUe-4KEIq8BC;-mNh^rP+Ae# zNV~@ij^y;?liVuF>?Q}bDBn*%Hg^qA(`5dGTjS^YP~j~@fk;$p=86%iAkv=IdKj{C+ltpRzPuei zLc4Mlq_ZXG{<58EGisf5%l^%?Y_(rV}Y2toXDw*5kC?C7+Yqj$lanBd)rHRA~ zRUKxnZ1=zLXw^Uip=7 zX|nYe7Qe$KU6FxC#jQw+xHv`0n(A4-PGh+UwE`rxc!FQJmUsz%Sah+M6;;6|J$5xCXSsH)RQGtX`MIe&Y+<9 z4QDpPo#71vedbCq3&J5!$BVhSCyekLVfak4EtCm%j&!UaV(bdcwj3#|&YKe+cXCv%!jZj!e# zi6T;hc*tUBoGy_ldj~#HfmKw)4n%8~W-bKr`4$wXQ0AEgHlENbB<>^O?E0M>*t@P_ zn%Ck*iQ>m)luS6*bVWtvZf%feUsR!O+{3pd=^>#2yTasHY0R6L5;bP2I$TO>I$U_6 zQ%VOhXNFrX(%R^iHc%^DMH9^xLu)>YO!nIEx`)7rPl(j31eWLtidHF2<-5=8#(+y{ z5Q0zwHGrQy#*{|6F?qoeOTN-Pac~E6s zlEf|E7F#OQxJzs-F?-&&PlNUke^l@=4Q9Hm7<7FbhZNTw{PC7>8nuh5KQ0eOQk!8v zPi~O9w?q=>4@#WN#tybN3Da~P`!uE-`4A?#HZgIeR0t=%xvcw^#{Ij78voIhB+-O4 zh8!{y-Akzu;hvq`b~7j{zQTZpnHXNlqb-w&h)|=2|?qkc@EAO$exhz zjj>=^zwQqlE{dJ*pB@N78rgDL(n*!wk$H+uI(u-rgJu2{CryI4Z$)nQvQOJS5(j1j z)FE%DH&e*3IEmu-0v;@M@yc}b`lNG>-mfM(42tB{?sV+iw~?ne9{+}lN_HNeNM~G9 zAQ6sJB0-OPvaTxe)fX4wb61F+ZTwHjdWGzxEx296zhMdg^d#?ny1^tesP4)u^l%*$ zx^Q++5;b{AjT|G2LD9snr!ls6qJ->#DiCT+AfXrFvlJ zo*tz+_NUPE5|4h-pZ*WI+dwMK_|x}8@myDKCWT~_Q?f|375Te}FVI+s8NcWBTRGUK zXegUF=T43RPPI|w70?MTF*`_5CQrd}6R@s#u{~iwl-2`@-8!t&!w&;a#Nl`=L#6e< z7X%fn>>A`FMCz4=T5hfpvsubDVUX#4`4t2c=Vv#dQ2fHFpa;}yKICj=3*1m(s2l{- zI=TiwT0?xU3eGvec6K0cvj}q^h{1&a3$+&`0gI>(f6!(s@Re$+igqfs7sI50^eS%d zu~3K58w|K#i8NuWib?VrF{_04r<~h(6DZbZivWz%oWqITM2i*roAvFG!i@1xaI1Z& zqJ<V1>L)pm%1 z2sA4J(X6LQT5}|dQn@Vdho~{M zAt2O~1{&+Iweg7rKjvNm^Jq^^kBg6HoVGAZ^xLGQRFYbLy~LLz>E>2Y)qtiQ(0M(f z6dzq9_G;2i2uSqOO%x%O5W!y8bD!Es?jq*&gbgB z%>mZPOk~h4>GHZ$GZG?X-n;(Fs_P1xB%;a|p{^Pt={oQtbVCXfQ@~a{J}U;s5p@;) z1}P5(Md(*st2boF2xKRD0~-ozXNHxS+d=GDd=T0{AA+l2gY|i4r{e<8$cqk7hD~kU z*|17I-9%?{P6=!5dO5-~9JMCIx*Hze=w`q@G#whz$0CHqA)FXPD1JZ^5Tkh80|Qvl zPDG-4>{Fw0E_zz8Dr+21EPhXP?BNR%Iu42%PF09SPX7qOUj-!$i8XdB?abmq*<;{| zupSca-d7jeMNYrYg z?AlnM;)-ATr?w>SS`LQ(RlATOE5L%b#1!NvDyEEm=xnv-aJ5GFMV?#&P#BHjrksMu zdS<(}pvev!>jPLPj%@qZiw?W64AJe}(EIrRAs_RTy04fi6G;exOPr!1OZ2QaQo z;!mH8hKepEPSKNX3SS#Rs2@jW+4Y5gIr54EK)3QEnGDm52Gg70Sm2^{9PkVE7$Or1 zD<|n=snq*o8I|Jm&wt~!%1@qlp-(gH`4GYT@@I=FyqA)z!Lh6l%8sgscHe}`75A4O0&t1I*=5+ zubO5(*tS*H1JxzMBS@{NU#d$)cO#+1(fAe&aZIqu&+rp ztHEbbupoQPhX#Cv;ZTcOtp5g@#huR_(B9J zZhWlRLWH_&X&DGk>kM-Esgg=oV7uUw$1D=5;ZMgIcH^2aqIakQH2l|C597iawNbQj zeo|K!O0o9hMN;o473$aRvA}L^Ht%c^-Rr@Tj*rv*GqO=jBlREeXQxaVT7ccGn0^d%V&e*^x{XWev&OL=fi{nTmW88$b7*dZ`1{>}2 z7Kjov#jdTT7X_oYDIPBOd0U7}cagdu$MNIuw@ArA4|~4*_)5nGv5H*+hOQeoZa--F zk&G4Mf7ezWPE^pt2cG)y)>{N}yq`tM&tm(X;J+83ouXf`LyaSMBffS0mh%Txx(Qr~ zLTwHTDR`v}2@<$d4bBC4d;sJi>&{T*xh9WNxI0ecUlt4X?-6F0)xU)UdMIweS7T%C zeYIIzjqL~p{G-Xer0p;n{CTFJlT1h8<9M6Dk6S!3`x_`gub9a94oAEZCD%V?O`!xG zU8yZSJheqxNqybu${z`8lcUL4Mw;+pAj#qA{c35xNE>Y!s8@sV9rU~i19b`OHyHo? zWPUb4d;Ofe|GZnHRd*E|IrHnM7GJp7$zzv}{dm{ALIgI3^}`pM_*v8^*)PX6i{G&> zIoW1Sc?MwJCY>Mfo#<|9#>8}oMYD)OQ~s0DD@Lt1usq5%k+m4bb_zqU5=MosHz(|B z7jP(aT+_4$na9}!OoDzN>3(kr$~Z4ldoP~jPMy^l=b&Am$RXYTBJFzZ`!|M6(?xFb zDnF;u;>g)%be3BOsdDwLCO=Y^UB6Iw%%JRU29p5R1eaAzu zr8wnqd_!4PX`J*{J##ImGkmCG*4nl(vIgm{iwqi-EC~yZs|boxcL@!3SWC0@3j~>P z%3#@sQ&tjTAz7aHTarDQVGrNzk={_fj>lt*OGLn%APFAYyBUNWv&go$aiYt%wJlxM zF6Ku3qhGQL2&t(#En3am{J@@j1oY_R8o3e)b#`O1^;AMaLdkyJL%vU45tudej$psb z?@E159y>VlUtT6R6XM~)nVXx(!s(-7VTJ$s;FZPXvD+kYgA=!6y^ij_bkTI_r z=nwZqHb4Gb1f{kIf2f~veyVTMU2O#s>XZsD_Gk%_=QJ)feFBl`RnptSE~vL3&lqeU zO;GFpO6<)`1|^Bqn`8uz74JRd2IZ};TAMA?*5n;&)*s_$Tn=4loG$ugJUKWGmm8oK zO5VazpSQ*3=LTYT5RzJ!)Utpzy(Kz=b!dJJu?%Ysn7@$wpxj*B!4+r<58E0x_ZF^H zMXek~amd4tTl7-wpg+#@neeFrtCG%zqEB94fSgVGz@lDmt}9kIG?Ed@tAR;$?^HIf zpTzO<(R+JFGI@(FWid7%@(d==3$Im*3pV16d?N!1srwKpr90Ms;c~!Zkg;ZLTyXdc zWh7cKcDvg?d$f#aHe;V|oy(a8(8+v3zxyX(jq{UVI zdOVvy>_e%&QE_zAwx@p21Dy)Q$qVYr`Yaow@<9e=XQZrEqm-QwRScKGnx=He6<5^l zzf^W)lQ~=QknQ8QxB~E*q4xu`c;Wa`4)5QhNqdjapBv6sQZA&iB))Xnl8I|=nmEh$ z9NqBbss}B;zf*TsA2J?4CgFF=xprE!^>rWT7c<%(3Mo0>lv`Zy`I)DUxwKG2h#w*f zAd(}+QGnPGOXEIeW3whes|c0ATOsA@&ihFp zzmoFj5ye9OIs^@K(n4eL&91?LX*{bgl6V}^?}3JWJmMdai!TWP2p)~&1y$JYT*#8t zH;;Ydmn`HrX%Cql3pFEXlc%JV8nm@OX}7hr^zg7hyDsqUASG|0t{(#jj5$!6pU(N^ zJWu~B*&_FfR^S>(%;N}#%7SUov9 z_OfskH+%e-zUExL-$FWSVeRbnqQ?RG!5$e7-x4N-bZZ+LnK-bcZfeK^o z!tKnXnvW}ttL|#zROg$E7seSo)I+bGJJkezeIdi`0n}eIN#EZRXSkR~jh+k2gc6}K z+3&OlCVPyvJ-ISigv!9*nvlTXcuMXz@@YPJod?06kbfV6f12a-!io%hq_O3H#D*sw zqvK^HQZ6A2pWj`b`9wx#UPI8m|AZ#chMUhOPV(!6dT=f-!yu8&u@w`v8N_LYMPhOr zfEXD7W2ir(q2FSTMxD|zlJm!nFyhwn(PN7%o7l0?D4rM>lR7TYz)l;@DM-;4$$dAF z24DH`mp2ktge#7EDa_;&(a=35FwApbq2x%d1S5~!WNc#Cz2+z z*7*gx$6$hZu|jItFZB`+CIva^4O+McbGRcykR)TCX36D2YWPQYs3+|^Ll){8JQhd|AUw`Z;pB|&bWY78bHYZ&toV&w z-(PE7ROujusciK@WRQ8!8*|+(dex}1#8Lh2#voNc33<*kY1i6)gv1pE)Gk_&s6-Mh z&4WTD(%38ll0wFPQ77vJ=|e+*y)*T*ULBr~p2)O(-U2H|87ty_-aKa;DrrQ-OlnK@D6sEoy~PlXsu(i^^DZgmK?G!(L7nZYFw(GQE-82*Nh zCiE>YFPKxQIyen=tl5{)jC8E?xA-gkVUKZJ!38J)rQRE8j=Fz;)mPrbZh`*drc_FM zdd9;j11$rrkUKb%P$uFPrR&R25*m$JO*>PM-KHK1jj3On$J|xP z)yJ%mq_pQ$5Ywg6ZhaDgslk!rcch;|>OrU{w-s@{i-fAi4T`Ii#JqWLI87EiFcm~! z3dQQ%tB+G$@T4SAi1YhV!?(8$sc=fS=RnLl+fK$sj{^TVPIp`Hd&jewqj$#<%O8I( zcKvY3hQXL64z-srG^0sXAJ`(nZX-W=ui{-|-&O{wSPEqb3^f%5YbT2l2!R0D(Oma5 z@U9B&s#=dG=e$%r9B?AyJeZ3uY4zPr%bEwYcUDh`&4+zTHe!jK4@=e)Ucwwwu%Fa@ z$~#4+*IWKl*L^u*VB%h*}T(oL<^f9?>cGG2lCRq7@x> z{jfbC7s7drYl~xLinDNC>oOFwLh}?&v+u7gJS0C7c>%5G@DI95ydrtFsKr3cPxmco zGuB!|<@sW>b~_CCdBmq34%cyh7Ent9GoM^V200%L25dWcy|nk;w0Kf~GT*>GfqQr5 zO@MU(P`P7}aJ*Ss>cB*F(37g}Ixf(Rh!t-1<-0(kcB(FZv9?7q@x-kPxadI1Kx8z` z5THQ=E^$pjkv|EAaXOsBs2@!M5_yk_LhLXGpszoshI<|JTz+*%c#HS2>ezH z-%@=`4|`XM`3-1)BgLOxsFw6s4b1?^%)cN#2+8?Zt-1~W zGak<$kLgU$;D`UY{z@|bW99&Gmgfsb_a6ZP5jlJ!;4u?Tl0^V|=&yzZC#!e^3cvpO zFK+tzxI_SGEB*QD|D@cDip>7GdDZcMO9P0NcFP10`=7Uae$Db>ZuA?M=)kPnPF`kU z_W}D4)U(`noRI=lLB;=PEdKpH?dN~RdKQhf9QNrSi|{Ol_!p5^B=99#ecaD2e=N_> zdwD3a|NMyO`T1qTVEo_jvEdf|&uc%6`qlhdl>g2n{`1Y?!}F?9dOlK-janY;lXA#|X$*4U3dFt~A?A`w!HsH(eaFzdk)bry~r-PK9zwi{o^zXmJ zRG0d_&yMPpQk0+H=FR^P16yuTeSdw#Ai+h$JHb0yot%=ALanL9#_&I~mCd(0C5iwe zWl%i+iHEFS0qGW1wF3nQM+;+By?Otm%s*17|0~5^`tRUsG`+6ADT6zKn(A3<%YT*# z%3?Dp$G*Wz&th9YrabxhHqRU{^dXcM_9S$$v%x(&v@>S^ zSU0@2`S{A{y0(4ea7QUcJDL?XYTwfFLBg!2W4);_Ef{9C?xwp$gwl`oyh7NYXB5Td z_4f0O`su6S-e7$ZT6VUaiGMrS{;nn1`6~7uJFovX)yF09<0RY@{k8|yLJ%2W=~(D} z10^>IXT^mw&0{6Y7aTBku6ciYOd4?!EEWO|+!7@jHIGj2RWlCo<+WbG$`TJq47|4kQ>TCBgooV%8xolFf0uRJu99+_Uok2OWxK9)luq>vhyBVA z*O_tc=DhPdlCbqL05MwK=#&_N2w%$ z6G&yD_gzmpHQzPAtN)t-yGlA&!lAKG^ktv~d{T9{uhXb?ZT4(mpGF6D^@jUtZ9<0& zL7#bpm4II7H>{zqj(LKUTwvS%pBupM(vRX~EL$RpO2j=+_CC?-X&H2Tvd`TifpW%4 z*|^@HUnm@Sf%JvjG*OpXBkqw*#W&M*(Z{;8HDa#CKNrrr>e9T?whhh)WreSF)N15= z1K4lxn_-8`Sm42`2V9{-qQcd|yxYSZ`p>KYi&g32Og>s;L8kz$_ST292zJBfk6-7i zoie>YUN8F~L-_zO*pGThOce%I(v?XLmQt_~q^S>C5^gAQBpd}E)t#I^NzB1Z_rGu@ z$}iE++EYj=g&YaY5xpH3V?dsVxdC@y!BvG(i4zSj`Ea`j*1M8k5B$^~ykEHACaUgm z$Gro!QO-PajW#;;IsZ)TKspg$QM>1Zddzz}FGe(Y(%!_yoUf=rg53UR+dON6=?Vh? zi-_HTi~qwMnt9+YF#xcuIUUN^`s(Hjw>f;VN}9|cHvJxNCe>6To}rUbY53U^060TO z&ENYNog3FBmD_;E>+i#h4@--U4%{z%fYCtcXMgh}{8#0oRTnr0V2K;z^P48y95aWb z4Y1>b_2oJ##}A8wsTW| zH4>fr<|?WUFcF@> zAL9*?VkH--oJ$mW9bgl9t9Pdv+#BV9-<^ykM&t+ z*nNWC6ec0JMrFwMNFeXNb~ad+|D;u{LVUVqIq@KG1yd$`DXuZU#*NhKc+o#WZ)!jP zd5d1S%=BK2|HrBpRg3YgbN^QcW)dz##!z&Py1mJ!3NS9_HYeWw`6HAY9qY+F=u*JB z+1FyLHqC1J;yquD{|*iBYPl@4Tm0SS-#Ck4hZ_Js9vB<7rT+ihWMJ?_h;9RCM`md` zX$9R4^M?z+2*=yp9$#t~js%Yh%whi(3O2Kb4rcuW9kwwWMEt{Xky=#3||5cBNk zInDGq6Y0x#c#0$81d2$t!%OuiI(goY? z4(ZOq9%mdVxKtbzkfY@W&D855?sfumRN?9z2WS&XhkAt|nV^mEQuQj3o@@A^54u>3 zqBq^8@ARzBX_L=+U2UDQea3sk=jeeWv5M(z4UCAJcFI^McImFL$79p4Hlv2WWqRec zbLkxxuJfeAaJfn6s>|lTNfGQ;#DjR}Hp)78xRF33;?0**E3LjgZT|!wmnHwY3cxr3 z42=QYqFDJ$d8r|=`{Q;}R^BYJr*ln7Oo)%n-Clvxw4z1YfWA=J3M!zt-J=3>9UNvK-HgF+a#hy7}NFNTGuD~WbRSO zSi=%l_tNa^omhbUeaSF2{oLhK!u;3PDBO++ zQo`^wGT%0U{%kl4HL1g)5`hlZWTWC9!K!LOq9o>ZQzj)9>D4i>umQ%iI@@6q=*Uo$ zqsdBfT>APIr=~WJ2L3k*FVWsa8b#?o-sGU5qYDxIa90fNS)l4NV3V5=bVvnv+I(>x z><@1>w{t3kYupIP-fCrl#p*ibB#KJqp&M`S0A7wKYC;#hYGS|cr# zCR|uQCTZ(+;^U3xY8bQP@s{r|q=y~Vs{)4q!+i(5Okj=HeWUwExtmBW{)RuAdfX+r zAEt@B2m3`ne2a;e)DJ9>96J<(u{xv&(v<)4^q;-`e!Gl+6=}7fpHrmJjD%|T7LQ`1~GYdm9bYo4nGGC zm*4-tyij0WwQ)R$H-FXtjfcvEl!0*vB10f-^V5C)65wyMrOv z+%G+BIIhe(!uY=e-9BmWpCcCSI^V^AT>NSkZp0gS9|XBE{>h*b?#Xwh&%!-}r2Xf8 zpM#<4irF{>3Ea^O=n5jB<6qB~A50S!6&J@Y{HdSjzsrtIwK3Av0FqzvR{_4vap%Po zr`Z4yP;tqFTe~6-^ZBvlAZ(e6zY+AzL+Nyp0?BbB4k z<(?HC^wWGhtM&15(}r=WC5o&$(%yaj@qWFTX2=bc)4F?VU0@9q3Ccd}>Hmq{fXNRc zIH4^nf!<|{Wi&dAqiAHrOIY_>c0Ke6f83_0Unl%6h}9iG07LpBPZ&(w;*PBQ1ppiT zNjM(#U^8si0H2Ola4b=qo;G%A0;kzXpM2F+sb%^2B%1-YV!BiN8k<lzn_4nLDl7&enCK__y?nJIkn~~xf1TI6#o%LTMN!Lj%~QWNXsManJ?MtjI z`gdA-|4U7+zb)qDJ~JFhhU!Tn_s4@ksRz2lOhH%m<)_ECB&g1#tWCKYX449c>iCaC zXw#w~9X}lxB6!DKlS{eTpVY3cb=+>{HE8??@yg=?rA@^iIP!zQ0Bx+pz-b^S(e1i1 zXyO-Q?++v7MyS-12NHosvAEa41rSP(#Zb;Svto@kEdpUxb<19kGKRt||MsGf zxc6qfG0GV=HcTt!mjWg$Fsx#V`rg^FzO^`bYwe51H%Oa$kuD!LU<+cHkg~+eBxZY4 zvqk2BL;q^z^(s8N&g^xaR;@hFBjBQ7RC4n{{{*EV;K1`SPJnfY)U_OZmcisaiEs_{ zoLgr1I5uVN`0Bqsyy><7={*HKQ)Cxi>X09 znJ|HAuZDd3gzkyySSL_oNfUJaot}c=ie+yXu0;Wx=iMd6@1l~y=IGGXCUZ#W7!Q_p zIC8Ef**5WMI0AQ=_MbQe6g)$7lT1#p%PVFlip(Wby=oO%{f+4^sd@a4+z(!p#5-JDx8Ei+wPyzgc)g#zx$4eC!Ch#!WHBuERiCuA4o_*k(Oi(95-0q zJk`-16a}*Dt={(3As+;^`2FJXjI@!t?`6_BUC>X z|G8Wraui<2-MeOW6W3q#4*42^{dBaGw4!UfuCp7!lIlBd4`0dhjYFy!e)=zhHtHva zoShf|@`*$vr#+MlQE`os*(ddvm();ax7)dP(UI`W9-{w>;et-2Ilwc#zg3M9_yzBX zw{ANxwV2Rx(X%g0*B}DgXMH%r@}r$NLf?|tkdh{f)~@Pz%Zzi`pJs-mB^Y0aS2hwLJ{K z_sjZi;YU6jt4lS`S7`N4BI{>1O3y9d-?f<| zW%u_mGZx&AwON>T!mlRe#J~|eEV>zTZ{{SD<546KlXhuW%<^Qs70cVplSYe+mov-Z za#Sdvuz(c3|F}Adzp7W25RvIkXPO=7NOw9ZpQL%P#ACbSUSwl4FiuI}o$$#&;pA^r%vZyp8^&h!Z;6os zcSEl%MrEjk{mD;RjaYA0-eP>>WQ1wkyjsAe!trpRoSUhL%40X!Z2G1m(HROmv*@(T z5TYe{XhZ@QqwnJbQ|Ii^pNPdZSCVB``c8-@ZJbMKYfiQcsZ>F{#4|IzNf{n!x@v`vdWtdUlfc zxj|>;qExC^dFl7htybe0Wh$Y-t<*T2(q3+q$%;|V6fAr{CQG1I6YdS3n(ve&V8|{a-rHb&SRaGVV zQa3{F-Wg=tMZ>P@TJeQHr*bVuM`eZsB=0!bpvOmM8EuSk<>f|Y)4n=q0x~Cj^}f53 z{xm8Ji}~&?S71V7hB5tFo{8s~6Q;bvV?e|m2${m_Zv9#V?O=+Z`=|tqw*&xh&9NtV zKYh+gRCbsdq;dE7d;7~HmKd-t2uL}w$eAgL4 z>sy7H(SvXL#fULc9|}K9qKsrEDzYl)@oA_j_swG(Ofmfhu4Du#O zq*y7?-xrjf1yd`Ap17=jxLFz^rNpIrN&wYD`LMdlEIaLDsV|NLOGI(=wT-gC<&bY1 zth0drlL}^o6^ICZE zH**Ju=L{9{ymxs0hFZOIt>P>Aqh>!@FuzlRhtTohVcD1TQ+-gz=Xia8@4~*hQ~QNSQvSmk#I|1b&Jsk$ zzq*-UaJ^-nvS15`>e_3leM87rOblzPB>X$$CN-B43&Fy* z_#VfiQJ~i~Ag#Z)yA?c%L(FU0iNQi7;tq9c{tkyRVF(UnrZ^f|C@8Xr#%DH?%}3Za zz3a3ii{u3q5fP?Rm9Dsj#24QLsZ)U5565;qIytvc#40vW8JgO=g6eV`|!G&h4cJ|LF=0HP?G>m}(RwWgU z2%YHDjpn_Wr=B*&iLys)+zf-79;ok7^7nL)ZmIdj@Kh?A&$J=;ijI^6%)&pE_X8D7FX-t-P-N4Q(sIL{kHcKNB`;D-_%| zk~DQbO!OyuY@?B^@4BVx@iQ9Z4d`x4J{@Rg2w=V&T*8CzrJeDU51#b6+~ydhEk#P7 znuK~fKj+UYixHGd+&rFRx-;drk*uJkN)vU_irlKwDbzpPurIWxSHU|d47Q$()$jLH ziTp|$5vZ)kLW?V)mV2k4hUw+N6}1xSP)EW8D?#YxdK-a704m0N7)IDcOHWiDmfL~1 zK6Q>zv41m#>(e*$iB9x(iYi$$rOUI`tt3c5*jG5eK3nMJs5x@+D}xo}{-Q{P%T3JA zV`0$i;UVJ!h3lg;oJdxV8*bdug6{uV&mN{Y4NrH$FEnbO>4e3`?D}&~3!Yr?5_weBe8E3e9RH=ZVSq&0I^t#+VRdplV^Ir+vL)(`=3gzmqL67U{4B0hOK z!6RHzb#{RMLOsNov7B_#uV222@~h2dc>)VA(K_un7seM;A5b**t$7G-h8uoZTT{{I zU-$hGNiuSWMH1=~q40P|C%~smvj_*+n6UqCV^%ynlQaf;?^4Rie$u_ci7%!(5hAC+ zbv-K~;McH71($pkBgFeC3OHjG$qKBpD+ICk!V$A{H@}nXNVW3)OjEsFyNZY%@Q8E6 zF@;>{EUum7jd^IhZAT#|Z|eZyY>ER+jIK)m>--n~f>N|$1B%*t<`xggPvvq+0(X~(NgKX5 zLuJ{;Qh)i#=Zc&se;<|o^~rBJ+@K(b1FGSP0TT0$Xqu~9jsI%()zBW{{}$DzQ&VY7 z`j}XI&YKIHyhkfgVE*=FMn=Mj-DJ@Ltd&s*Bj|*hn1IhcImP(lDOum)-0eiA1j|@O zmt+-XWS`&xtCB>#R6|i&%+65IKGJX54;u6o_;GfIfN2nhYrEAE@iyX&#mOl~Gb`I- zwTA9aJvXM!Zj|0E`qYJ24ho=%?V_wq63Fs8N{?^IP1LYhZ-jp{4V3=qS#T5Iv-2Z3 zYe0}oKjtzjDPLztgQu<`>T5)bFs)+7Tajp)>Ynqn(O1J|+OQB|*Q0_~OsNg~v}vh+ z6dY4<;@Ks|hU~S9;nTkNCg}&NT4=}XXXoJ`k9JL+sIp&0_7!c)=P+3V zzMt!^ugaowYtJ2Dr?JU`?6poa<7HB!mUpJMcA6~AKU2v{TfXT@n`OG;EoZ%vM+@8cTp#e(KMK3Hi1dpa3zy+=|H|MK(+38hobI^#d>C98o&Wq%QS0euSE7Vy4$yU-B zQuy@=MDd1)-J%Zs5_gW@A@NgRSWE&;^I1#TM9+|8*S{m%9*IECWj$LW4AngS>*BDW zOp)4R(l4>KUaBU2k>!3Osn##^W;F9OXS@|jv5K9?S(mCLS^;zUXP!stCw7ttQc}?T z&+o8Zmz9>NDu$>H7n&m$ryZ;~l!OWIVZ-4CFtH-{GUO8VCm0_;Y#;w8nCuEK6E_<& zE)oY_EvJ-ly(Yj$pqvwOj)^e#q0=v(&}U;Ojlf|{lKrOnp<1%+1+O?Fv&^XVbir-e zbbdnFkS;$5)oEe^WgwL(ytcB+Z##P)_Z2J9HrwX%0tR-4h~>MT?_lA2Q5nU%7@fCY zj0>`;Oppo1fP&I76ef|EC%wZ1AC*ED;=yC01T ztvw#h&-5ySO!q+aGEg^v#S_E06u#p9R}r)Tvaq-C#ulr2BEy4=(AD-oyw)S46J!hn z+){zfKssC6E z!lJ~@?dqt|>k%X77;nfClZm>cxBE`ZGlH6-qn)bK{Axg-h_tNqcFhntnY(HX_w_~9 z`!GRnR~>FO*rk618O1l(JNx~mTTxv`aaa5LHK#z@bMip}7gIua)3P&2G?j#(Lm@7k zZqKkrNW1q?bD1Wo2laD}xG;74aYb^~@V)wvvx;!Pzub~qL+}&)ggnJO%K;LI$;$Ju zGCu)krT45Wlp8HBS}tzo(Zbd#h8x6Y`TH{yT9Rsbq+E`BU&UEIQErEk>5D1#&r>>T zEwPgZu{(?phWY~UqjZt_QwzUs%{4B z=!ZGk@_tkbQ0)u-h78oXKCBCvU?oYl<=%HL4br3;`Gf`mR4}&tp@V-++$@#6=&ssz z4lu)~cQmVRz!n9}Je`QhDdKtAM@h@#4v?8;@^_}}`FBanmTwDlgmajh6KI)_sdh&b zo&A35%T$PJLX=)i4aB4AV%Av?FwqF(X*myxOx^RG5$?ImI!*er>8%G~fPW5Iv?z z)bJ_bn;hHjo%r;VvQm37vE%-1cPA!YUf@GUy(bk$Q}7xlX`xlc%nhUy-InCdP1g`= zL$g<3Y8ty6(?gZLW9&?5&S9Shbg>z`$9t5;(is0(RLoi}eNaoS0!6Koanp?XMl)5p zz2Jt}r!dU5wKKA2N_~{3*?<9HgV4P0|MH*c zZe>&Y7spLd;u0t}Wwi6qo-@%zS7|S&j#)d=j;x&v<5bssFcmz|%FWjyZV)KiOMY<>TO zkBv^Z{|4vA=Z}%SxFeAujmS%iSoHO}CnvQGoqD$Ir)FZ<43@cu)y$0?2<6wiJ17UKG9V@M}@Flz0 zfbav|=1JuJUu)($?JQ3F;hMh-eSIlg9roa=d!2Yb+#wu@1(5KT@7?kF3X&UP+q#%0 z!TyxLL|Vp~alRs(?8i~Ozk6Y;!pUO$NA5^1i0-~!=jOarUwY=kFFHeqH1l3hE*-`D zMrVcdB-nrI_$NRIVzakOEAkMmA=Uphg!iB>Jv;t!(1`yPt8^nn4!HNFE@FqB=bhJ@P%#1HR(*}N0P!_N(AC)|M|qWaMFC$bn9OGvmZ z(9q60VemDo0M12-YT>rd*?Y<2SF~*J;HRxtYx)`akLf6Ii5;~EC(!jpJs|~`A!sd> z@SHu@yZM>?n%mR7(von7DVYgQia1@30jRC}2a&jyoNK$bVYXgNyv>D@Z?4NC)y2PC zOmPPqE>^NBG+ZbyQxKn~RdA|&`CrYw^2@{hX#k@Qrz7kl;G|ZiWPTvcXtTx z4#f&B?ykjK+}$05Kk4_nFM0oiH$R^2obx%^d-lw%S!;HMe;g4*zQM#>UeAj}$jK4? zZ#p63di(pUK*dWRiSQbWe3O!Rybr4qZfeAe6@~WiA1lfV?dfGbZCY|ha;pl@9YYOP z8UF>>q!+-Wbkqv!qjlJRhjyWrRq9zpg`T6)UH@GQUo@AVQb-liH0!Rb6gQGKe(9)yH~ z;QiGou5)2w3q>2@DQEyAd=QN(WL|q=i(+Er+-43Rg&Tg;*L!&@G&o=IexwO3>WueK zT+m4+x8k7kdi*O0uG78eudxsX)jXck6qRm0l8Y)A1ah^+S9z2BDGc@ZAlKk$f_+BiftV?>DdU97(_U)x@&?Q`bPaH^BXVk9SqrOgU@K>eVuU*zVFhRm5{hC9pq zGnRHYw(@3Bm(vBf7VL+Q>uuzr*WKxGMt6#D<#L3uaC9cVAdDE{i5^)N|pH&e{gt5z`wi7I00LT^VnYi6R04 zvw#?B&<(S2eo-=0=!Etg7mXCbhaVj_6FQ}F2@@m3-DM`D$|D0vr%b+G%}00W$>2!Y zd;?yudxCMqjSj{fNh+mCMPxBg%!jH2%LkwTi3x+&NB#EuJFmAIii|FA-%}cIZ-j*m z&pRs2$9(;h^ImQ_g&EG}Hxg9~v;07oPdap4A++i{I%s8QJ)Lvbi5==W#6n(<9+B|& zB*7&f^JcuB>u(s1r7h}9Iwugz>04v9I<7{s6r(&ej!qfA!W3u7+1th0Cq#uqni2&_Mh1|}W z9vu!(lRkCUZP&yz)!Rz{fR4zmF-caRW)Sv!ZH2Pmfj7;_j6S;D>63L9Mhu^%+x1A5H+P0nh@$Ur0g1%OxUosRJuz4v`H)N@Aj07=8 zuY#G6@@^=9le_&ury(GkRZ6F6=yVoFITE8VI52=KiLi>5!X98&LSoBl|9t1U@z#M@ z66ca=-*%t@J$X5IJZ#({V&1L$ZAhDjr?0-w+`C$|-}h2&JT$fz7?~wjE0kutqejym z*S<>69OAm$ue0G9KX_F2)|hI2L(LhdxR|g8O9lplU{{*k5&uTjZGnEQ`;0F;Cvj*D z>I0EJ2>BSK*(%|DD-Oyie13ST(O5)&Wm``?0Qw^_giWt~$B>gb6I|$XYNaUje z+G!Nm*IqgxVYw^~qyo_GnBg`bqVyK~O5j;7hvP!eW})4347Y|?Byb1wnQPupoWg`3 zqJRqzS7dN`>rB*Al5j)Xnm-!qz%9oxkvncSAIhnN6@z5?W|Gf_zp&5V;r%zOdu6J? zz6Q}g?ovCe9d62Fq0dEzMf0Jt;_a`YRxIz4fYQ$t4)SVdCGKnRQAhb##aHUSBbVtamfT=rym+Z{)5iGrYI5B?WZBX zjH6hRuMLB8aOxVpu1&KM&zg${@?0#Wq$WgblIcKyoQ&s@hxv!;hXs+?q#u&Qt;th8 zRJ;M?{FF7bLGnikHM0F({%XfqE-KHnsy^b!vh8cLKNM&ButH^l29OL3IPN|k7vXouX2Pu|c>R~^xlQ8wrc=EZF% zz1wu$_L$U-DRt22blz{z7a&!8ou)vJ{&2Pyk@=-ksN70W$;dLbshVjI{Lr35`hV^U z;{s8SxHT{O2cc>83N6jg=`!SP-o*DdZ~G)e;*Z?cXMVzr(Kl_ZSjsDRykhgpYola3 zjGi|(2pOEJV30O{?gN`xiBYh9NCzFNhMbi|6kyc(9f`dE=JJ4HQhCDad!aIgh=T%1 zb8i&s1z6gC+lL38iFXw`Kug#^yZp5{@(MJ`YQ;qYEEL=Gsf}z(T=rQjd4SlV|1l|$ z1NzgGw{DiFoJ`Jp={!~fWZ8UochPXf8wsc|X}T5km1}{YFmI4!iC2|y=+#{{ELBMb#!y7WFUO43YtJRg2;^$8nAzDhCQP#yvKb16hNa1`m7k{ftKngPsl+?t%-ehu%EN1^!9sYV*f$2^xX6y^A+d0;Ru2^kP=_(NKhdGt4Bo%pb-FY}ad+^mXalnbB z!UkdIA*849qpy&o9Q~b<-1g!*m;?~gz_kE-bVuB0ma_`U_2VK!MxC;gG~(NTG;Ar& z4=31OUpD)6^j`Q62#Os2Q}^DK9wDZ{@p!=;XUY{TQs z9vT`4#T{@cPcLEs{0>9*`;42PJbe2(gDgW;B7%!3DXB`s;iF}sr|Kxyo?38J{=0PN zCrkT!#rgb$C$1+@vGyAFna~cg$^*9q-w0V$=pOu{oEjq0I2MmHi2X%_SDq4Z%e4}F z$o|V%wD#sFsnGQOG{sH$41e!YX|}*`=^G`V(3@-Xve!ETXuqxWo8y~_z}v!T0mYzL zNug)hY(gQHrp1wCv6*F(rS|7WX2S~VD@4ALF zH}clg3Ec{nT6I5w>Fpv&R7*Z;Ni}S{0i9IMIePE5yyuyCp_ObE+}pID*2xed!7W*h z@S$zW%Eelk70Fn!=Ud{bA}5|}A~yK9OKu+}qy)+R;6uhE{?GP+s>XQ-Qz(iUih`(9 z7_WIrjMaSkU~(EZx@Ju`nGahEDhB8%hbu|%$PhaU`!$}S=U=31j*CVJy$L@Y;YeGh=?4-9-~8ahbFV+}|FMBF?|#nPaa&CdMJfEO zmH;{0<2`rIjDq9Ty?53G-y%2txZ!-w5|ue<-`1loOWZp|DNmQSWZ^VE57k0dBY|2B z-0K`xG*5$rA2Jn5!k+%joJqVc-2*~^s=cG$7vg%@O{iz{eCtOe_4cd~(1v`jSm&jy z9QI-?er%kMuF~`$q++5K<;7KEDn{b-+9U-2Xp%UADYljv6!oEhQ>WG?kHK4$51~3su*m(Mm4L3a~&9 zd~_&Mc`<9M5f7K+cUl0j_~{mmnFL?1(k3Xfq&)7OLFXJ)mI`uLh}c5SKjx)zY0sp* z0s|8_f0r|laErqD(#23pBW5qvQKUCLkfB_FMl3paB(d3JeG<*F3NMf!x9 zWV`Vks?>M@X~kCgoSEuj9ZG#OkcoF~N$w6jtd z)sjTuB;d#7@;!v%Anzvixf2rnLmXfxg`71}+$E0K?xw{46sWRJLE;mL8L~~&^{RvU zq$miw&;PNVx8$c@5otSP+WXw9;eJLa>rBk9_JphAj4JO~^T;R2gLGg4@f{*dx4e2> z?L|*5I`VjJw=|rdp0rrT1G}Odo*e>GNgoRqz;Nd52HZsa&w+{6Kw6W%P?97wGOhci zO8I{%WfD*b;$sOiKDh@s6TYPp+U2hy$XX=}BMSvL$-^&hmM@YMIIX@v00!vZqWL>& zjpggA6X#7;2X$_l>}~Ots*3!@ZO5(DIE@94#sh?jhB7Bo?ZY=TR72Nbx=2B~F{A#M z57$B~@KWP6XG8Hqw=*Hws%Q^?oGghxY|6GjVcUuZ%8=8miU$Qy4-z!6$#XM5)W|2p z7$HR(P)oT=ha-)&h-x{$>$BqT1QgU7q-8}_2M}PuOPyQbOAD;hBKMZM`}0s z;`ig<_4ikMkGTgjfE%RY&RoRdkn$U!4b3V2vFU zD-Tp*%6Q!OxISP);0L>3bl9yYD0@z=KN1Cx3FOL$X40OTC9QjKzs(UrI!93mxVx2w zG~J`I{&4B@%7NAj$BDT*?BY=7s*g~`8o_0Fwu5PS{6bvV6fK`c@*9=%MPS#$$*r5&qv{E8B@Fa`Z)F5|Wfg7Ets**F@Qr zj_=#i<4ySgCpdx~zIz$nP{dZ{E8Pp3n*o+6;mL2<$mQqU^&FvutzcIN>0y-wu9=T~ zX^wR-KoCY_SEtzVr`aeW@y^(f^o*2*Eg3~8JS;95e0J6>cfJoH8KK9ivB&3Gn5teytXGONId(7%ljp?t~Q?YwS6Y(NV*bmDjkgt zP@1_>3$#7z(1-Q;fHEht#x=w583A8*;7QLzg71fLok^<`manwMwX)4~^9?pPTuw%8 zFQFG&(-RtFv_4lzCMaw2=4czI>5>>~tw;B=B8PBqPU#H^Nq1DBd!yAxQ+HefqywNl z*{2_}!f9Hn1H_U~Gp+D{P8Vr9!#Lk2E1&nj1&E{hSe2(SCK~VT8ZKtk(^{Ru4pK8g zs~9;t%tu}YcvHqEMw68bv?gcoA2o!(-d>E(+w&k;`7(qYkOM6o z^$n5g{P4|?b^2qDaVquq0!_z^_z~<3G0QcR7;KXf8$}pHBus-ueF93E8(pkKg?u5{ zn>^J;S$%i_)#%-4p;)bN+?q$9p_*Hs;e z@z35IPFO^)Yr%8rylU^ILB&_)=R&!te#%ggA*fSx*N+`p;b?rl0;sAy06lFw+d`uo zHMiy7U@UP*YUXe-e=;@EI#*7QBq?XV(|KZD*;~-k_2AvrWy(IMClNcCKmA3hTTw=K z@XiOJYk3_1CM>%DK|28vF{VF5drFfjrv@wLXMH0{p2~y>D}!0guJFyLFJD4^?R0hj z?IhAa#)+K!u>_6j2Y4<^e=5)aWGEkK?H`;?J;XlrP~lHNDZpH2l0TVIYBHBEC+r-n zeUmSw805Jms=wj!qGhfFtE;^cSJTwDE-ZfxXd5B%hN-=v+ZQlJ2D&<@23h~o@je$B zB`ISyg**Om5^^-aVZY{$3Jr|9T~?$u$5*3Y-=~Gv{nj38gOKuh^fc<4xz|5poXxjv z=mR^_TB(#cU;4O|3;A=Ya|#Tf-f(GYh0DwJ=}vVu`RtK^#68gQW8{+ImlU$PK6I=? zx_V(`8*S%`8)u(sXs@XzlfEexY{g^H8f-;>fsuexiV-W(V!9VI6F|qLhR-L6mCe2g zrz?+Lm2H5S0{1IjVP%#zQ_?|K4sfQlX$|wa7&H!6C$sm4xN9f*$JJmjr%rG7q&4v8vc8)Pd z@!DH)q_|L8)hjP>^@<7@msYEpe2l2wz|<&g+VzjU)g=1XE=zYkLY)2SEivc*s;KGS zypARe)&+_tV0uC4R-(Sh#L6Mv}!N; z95VCTg@%u)h*6xTZTb9V3FpSDB4OM-q(Np(Vits-pK1)OqSxGz2~P)*n&Wyxznfck zdxT9cB&+WVk5g?X};`FuC`|Zo23RE{{+pYIP;PXF9AQsUhwzFVFBUKufhK+ zU>?ZcEU{=dyK2ehds)n^E2G* z^(7Vn(Th*kmd?ryJe8WL5nJcBGS-7^+Mw~>Y929Q>Fy)POuTIFHjvcg(vAY#@`u9` ziJI*KhUtzN@utp7%S~^dKAPcuajT}!4lvYi0bx#|GyKXiK)!dEmui)Tc;N0IDFYM@ zgt7_Ph>c}8i>7`gG@0#W4j@dWnXNit#$vc2WFw1!$7aG&eZY z@Y=#7gPoKnq%Sjc?1`XYpzj2qJvQ6$mQH-P(|Io~uL_MYeG9&!T|FStkW@;n{U5jqTyvwfcL@YSH3WYlq$!f8azM_3!AH-p2IU z?&yS39F4u-r5Uxajua}Fn$f%_;I|Hg5c?>p$pn^o8-q3)Fnq3Jb=m?WnL0nb(u7eQCwP!mSc{^rcu=DrEy+ zo%aYdph6m3X*}aH!_Os^<1(pdgA})S_BJFS#MdN3NNetD{WPHC&Pq)HDc^lKd@uvB zX8Mh-@O3Cuqy{N)B%X5rK3KXrpOjJ)@IZHl^Iro|)jX$@-=!tr2g9>#NOXkzobRl@7lb0&|7N zio!_HnNaq3%oM3}(k9n!>tGbfxgnVI2~AUKEd{qt2fHzlwIm z>a3ijY_(J>Jn6>8?? zrm+410p(I{G;nRBq_maQ?BN9ps$TQEUZK(^Fel?m3!I+KVqQ@)4AB1i(~%wwhWk2i zN#z%`Mv1EPoEV0&klEC4>;7>z*&C~6>E?27=vnF<;ALi|2 zfrCi<<MSa-g$Y-o&9ABl?nZwa?Y@`Toq`Z}4V>gt zp$KHZsl=F|YZPji{Fe!STaP;P?=T|v*hKNz-xU9e$BF27ciH3Rs^I=v>a11al-2-x zYK2dQ!A1v6o?_LfV=#?t$=Ubu>vBxd{1>M$R+INMY>d*sO}9#=Rh37^QB6}c>@gjM z>}FgAG2ba-2w5tF3ydx4+HHaCVnM8`>Tl{LysHhwc5K4nVoRkud={2OpIixezjgHSRBfCA})gL^*z$x<5n8EwQ}I+|mV=yB*lgG7Bn zL%W;p_#>%_q^Te!-BGFzDbWIRwLO(nMU56`=Wt8LgOOlXrbpJ{47)<{$Fd5E53(D% zN2&J}_BT;XiS}FYj~oKL))2KBrx6B4p+~S~qT`@y1JMYn;;8xU=N5`jLMPzOq#iHY zU)1t~;sx?bEi%Y=z!@H2(fS1>3Wou#*PR@M=br6twx!1U=4O@Wu7GYwHkH-dBr%Rzl3!m#weEREaH3% zLPPyweo?(pR%Sz>(^d*(avDE(p?T}EBglSRhgA(+E2*UPX|IGrcF4Qs3ljm#m#ObO zv5pfT<|K;G51@RI|5HK;!guo+8?qTk=KF1xr>>wYh+gb~FGBNdEZX1pdV*4aivyEC zDqh@%&i$O&dJxZd`3Yd+(3RA3nSFRbd@dihdGDf3(T6^oTPVz83_>nHDhlbNK%Hn2)@lfViVV^c}cj<7a)eUkp$qtl$>R62yz(`%JfQ4-Y(8;C!zJ1ux+?>jUX*L_zf9-`AW4d{K3{WPFEpWz>%#XePs6SKDGOUERUm9n z&Dqxvw+n$R743IA!EZN8b~1kCMwR!xN%lA{bE86FYaXxj6?JVFgn(uwJVJG42z?aIfdL zy#-aR@zg*XB;fnmBH{3S>=i=MSI(Mq8#~7$qHalt>F^z!b@JfCH_gS zmPy(-)}17?pvNEI>7u%V0a&?B7mE>oOr$H;aSCbS;K=zM)%zd3@$&on2gSeKJ*_~+ zbX#gB&EV-2RES+tLz?GiLHvW*KdL|;irQU`_kThLHXfx>lty~qaxJ-txr+tcnkD6q z+WGQzYLz}didet5d|lQZSZO2`M8*L9`9b?65#Az01?0GRI!_-hTP+q?b#oQLXRz`< z{u*~@U8OWKjrsab4PQ7WJGugGE1sH}Mt;Z#KEjt9-T4_+=Su@fx&j%G(+D2hbC}!4 z^J!O5|INPzjdBTl>WIG8WwOP;L`xoq6HCUD}hS?ZuM+CqPD|lTJr8{n5gm6mJO@L%&w+1Kh1hIx%-a$xEr?c_X>&^48#(Cp_ z4Z2^vtuC*=-lr!K6AYgRLfa)jm4d1I^fHmkLa_9kNkjNoJR{-#xhj2ZSj)`RD|2)Z zq6Rz)eZ0(`ehs6Z@ww+u^fiaetALWJ`n1*k<&-5ICAi_obHXq+aP4gu@lr+Yj$*aT zp0xf;;>hXz2w6OSw;c1UC!ZmG-jf?3i%My*I}WS07EO$l*3N+87>fL#DJK>8~ zE55`T3T9w#czvT$Ej1@D;VGrTeKO)KK0gu_WA8%l79sbR*n#Vs{Iafpk8)d35n|the$6JUkzd))G2+4Z@v9|MnPNRGGbb$bZ;9m% zc)ZyqgfpmgGD{zeh|dGbquEf{JqY~lLVCQ+G}Y;eyXAY zt7PR$tvMDhA#YjoZm6yMU_aW71{^3`+-<-)v1a;Re(i7>bFA{>ANZtHX7O890`MPN@$$eLMSSSVJ72r5n`y1~0#vGf&mks= zzF*pLgyv2_&$UkH7Qd-k8SKJjP8PQUfr5K(UIYE~-_kK@(rs~GXfcx)e~MTjc#VBw z*v?|#RC15iI~VOO+*oKz=8dysa|xtzWd4ld$kGQaDsXqk>L)m?;JOg48KK?S4(#AC zMQWZC#Pu#NqhRJhAs&$TZ?5O^B@sfFsV{?yLjSuVK`qzfzGiQFr-iO_!00HQv#)4d z&@FCfG*k?zn;aE&H+y;dHhXTrOA{ad=Zi5t^Rgndt4v7^VZ`WUuQLRzmKyIC+|8zMw%JVjo|6?FaJ;ld zuQC2N&K3QrSbKi&* z)>{qXWXNq+kBtv(IPI|-Ut)8fP6O1n8%3)Wbf--E+^q$ivD0O;_m57|@c!zy6__%j zLW4MPtykyL5}s z@8`j9Ob7`~heK)~$V6vFgdi69&HEhbuxq#{VW=3JHM6A8HUHy&_F;wJ-i6T4bZJIb z2)k3M4vch4$Da=t@2u;jArmSV8~&+=g$fCS;2tqe-us0%<$!qTBDpfG$svTA=jDQN z>sTK5_n>Pm4DT)KKxSPLNH1+!UGxvs3MZ#p%Q=$y-*(Z{e-vmjLts|8F1jZVGRz)xx8ETb_7ZY><99I& z6oxcya9FwtoboZ0mEo@vYkO|BMl9QKEN1KouLe4%Llu_fM0X_>3S)WxwvMIfW-Z`- z9Z>;F9=tBH~_tr6wzImAvXV%npgr&_8IMl`iI zP@wc;9pdn7qKoHtBIH)j7z1WlzsX}QrM5nZSKz;?61{B(N_wyRU~4{$55?<6%dbIy z28Na&Kb(ET$W5W>&Cs)a>P5-Rq#+|6w4-I!L-fhTnbP#(ghLD2Pde|EZ&T<->+r@$ z5O{gr zBi#P&>-fm8v|3Mwm(wqB`n>Z-Pg0~sS^39&$pKfK-Rk1Xg8fct>w)fVlzRHN{n}}hKKVS1M>aLD=W(3Al zUY#i}6R>za@?=fRL&O)n>YPyYBEG6-bHcFxx^2BsBiKEY?cpWA1^0kY`OEt6+pD0j z78f2X@%wD4_gGs%?a2c#MYk=k>MpVN#g&LVxF2OWPGmG9N@-*q3tHn0$s^?1_T{fC zEg)Zu6Kg)x@X;_9@@}p&z^S@Gja|XnB>w5EWUWqR zrw)GikyTG=3f9PcOILxOs`ue(5Sq0vo9hc^nML1;?7vB6z+P+$sT*kB4t__kr6wg& z$kK=rzA|=TTYrnq&+a34u5^Dl13s^lcEluY)!3jS;7vxbZ22!yJ-xSej#oRn{)1Ek z`r{+AT7IVk77jN71cTLPhLZLza=QHPHqnCP%PSF=!LI@w*YDpEqGj zWoWnb04|chdgj~BSRb$D<#^EpQ&dwoW<~%?J<~`wgYuuAwjyfe;PmG-dHpVMy}!7H zrYBR3S!Z1|mlD3)^$iOgX_>;G3q`mnUgs#+o~iK%qqY{M4+dy?^?L zMA}5J6BR~7YvK|{%in(pNm0hfM5ovz&-0lhl1(w-1@F)>b0HMScVTb3+^%DxZEfxt zs}&!w80@Y`un;x`s_v~qT9gUF_Sc{Y?)q=e5w%_`{#^lw@~GLZ6hRWP$HS!_2&V)1DDtcl&4+aIbn*%}4dZQ(X6{0P zHNEGx%Q2A>YkW=@Yu|oDGf{%wHH}ZhcB5OPBfk6mo}AUoZFE4FBG`B3bHdt7+MAH-rrpV zX}~d3;Ej;d;wo;j^~q%0#2XzzC(RKPI$-?s1+n43`vb7PzlwBU3A8)kb&esB3=CrP z>X(Vm0>Y?%VCp%=8cc}KVV+ZEjTJd}`DQo2^mD&Kz~_p7F`6Wx^HC#YfoJEuyf?K> zbB!RoRlNs7p3G*|Mq?^h8D0zHGqv*=S+>0uiq=^loSw|oQLc~Tvnw}PB^hVz(^p|h z_HQm6lnU!14mC_a0uK+5Nz3{nj5v5_!&k{bmJ}Skl&GU2ss0jCgsGoiXyB{M-bYtk zT8o{)X^nAOBa0CBKRZezErK$d>Rpeh)!)m0<7LxC4<;~>4^4gpaCNBsCo_e%|M;jU zc7Ej;_0qN7SA?B~$RCz&#l4v-%`xfNWzYFk&O&f^9M{k*S$I~drP<^`5h!Z+QT@u3 zieHE+`|rm3SCZMwI>xZ+0he3E!Ay;cbP-Wg)hBGSaF#5loQRc_0FGz8^TEam@#O(ZvGH}fRPX$8=Jgz*dPlEYj+(o6a}(^7hJ^=*`NSP18DZncX0&wJ@%uzZB*==uDA ze%>9Z{}T%2KsyPb{Hyg7BJNXx`rq*$+W*8f22in0nj!4pA<)iH@1eWTKR1J3#Vf)4 zCx`jxi+;p^vN-6~P&$c!!h^rV<=FmlGJmfcL}L8&fWJ3UjsE}3t>M>S-<*77O_SU` RuY3djNQ*0oRfE0;{y(OM#}xno literal 0 HcmV?d00001 diff --git a/HybridRAG/docker_compose/intel/hpu/gaudi/README.md b/HybridRAG/docker_compose/intel/hpu/gaudi/README.md new file mode 100644 index 0000000000..3f9fc045ef --- /dev/null +++ b/HybridRAG/docker_compose/intel/hpu/gaudi/README.md @@ -0,0 +1,161 @@ +# Example HybridRAG deployments on an Intel® Gaudi® Platform + +This example covers the single-node on-premises deployment of the HybridRAG example using OPEA components. There are various ways to enable HybridRAG, but this example will focus on four options available for deploying the HybridRAG pipeline to Intel® Gaudi® AI Accelerators. + +**Note** This example requires access to a properly installed Intel® Gaudi® platform with a functional Docker service configured to use the habanalabs-container-runtime. Please consult the [Intel® Gaudi® software Installation Guide](https://docs.habana.ai/en/v1.20.0/Installation_Guide/Driver_Installation.html) for more information. + +## HybridRAG Quick Start Deployment + +This section describes how to quickly deploy and test the HybridRAG service manually on an Intel® Gaudi® platform. The basic steps are: + +### Access the Code + +Clone the GenAIExample repository and access the HybridRAG Intel® Gaudi® platform Docker Compose files and supporting scripts: + +``` +git clone https://github.com/opea-project/GenAIExamples.git +cd GenAIExamples/HybridRAG/docker_compose/intel/hpu/gaudi/ +``` + +Checkout a released version, such as v1.4: + +``` +git checkout v1.4 +``` + +### Generate a HuggingFace Access Token + +Some HuggingFace resources, such as some models, are only accessible if you have an access token. If you do not already have a HuggingFace access token, you can create one by first creating an account by following the steps provided at [HuggingFace](https://huggingface.co/) and then generating a [user access token](https://huggingface.co/docs/transformers.js/en/guides/private#step-1-generating-a-user-access-token). + +### Configure the Deployment Environment + +To set up environment variables for deploying HybridRAG services, source the _setup_env.sh_ script in this directory: + +``` +source ./set_env.sh +``` + +### Deploy the Services Using Docker Compose + +To deploy the HybridRAG services, execute the `docker compose up` command with the appropriate arguments. For a default deployment, execute: + +```bash +docker compose up -d +``` + +The HybridRAG docker images should automatically be downloaded from the `OPEA registry` and deployed on the Intel® Gaudi® Platform: + +``` +[+] Running 9/9 + ✔ Container redis-vector-db Healthy 6.4s + ✔ Container vllm-service Started 0.4s + ✔ Container tei-embedding-server Started 0.9s + ✔ Container neo4j-apoc Healthy 11.4s + ✔ Container tei-reranking-server Started 0.8s + ✔ Container retriever-redis-server Started 1.0s + ✔ Container dataprep-redis-server Started 6.5s + ✔ Container text2cypher-gaudi-container Started 12.2s + ✔ Container hybridrag-xeon-backend-server Started 12.4s +``` + +To rebuild the docker image for the hybridrag-xeon-backend-server container: + +``` +cd GenAIExamples/HybridRAG +docker build --no-cache -t opea/hybridrag:latest -f Dockerfile . +``` + +### Check the Deployment Status + +After running docker compose, check if all the containers launched via docker compose have started: + +``` +docker ps -a +``` + +For the default deployment, the following 10 containers should have started: + +``` +CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES +a9286abd0015 opea/hybridrag:latest "python hybridrag.py" 15 hours ago Up 15 hours 0.0.0.0:8888->8888/tcp, :::8888->8888/tcp hybridrag-xeon-backend-server +8477b154dc72 opea/text2cypher-gaudi:latest "/bin/sh -c 'bash ru…" 15 hours ago Up 15 hours 0.0.0.0:11801->9097/tcp, [::]:11801->9097/tcp text2cypher-gaudi-container +688e01a431fa opea/dataprep:latest "sh -c 'python $( [ …" 15 hours ago Up 15 hours 0.0.0.0:6007->5000/tcp, [::]:6007->5000/tcp dataprep-redis-server +54f574fe54bb opea/retriever:latest "python opea_retriev…" 15 hours ago Up 15 hours 0.0.0.0:7000->7000/tcp, :::7000->7000/tcp retriever-redis-server +5028eb66617c ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 "text-embeddings-rou…" 15 hours ago Up 15 hours 0.0.0.0:8808->80/tcp, [::]:8808->80/tcp tei-reranking-server +a9dbf8a13365 opea/vllm:latest "python3 -m vllm.ent…" 15 hours ago Up 15 hours (healthy) 0.0.0.0:9009->80/tcp, [::]:9009->80/tcp vllm-service +43f44830f47b neo4j:latest "tini -g -- /startup…" 15 hours ago Up 15 hours (healthy) 0.0.0.0:7474->7474/tcp, :::7474->7474/tcp, 7473/tcp, 0.0.0.0:7687->7687/tcp, :::7687->7687/tcp neo4j-apoc +867feabb6f11 redis/redis-stack:7.2.0-v9 "/entrypoint.sh" 15 hours ago Up 15 hours (healthy) 0.0.0.0:6379->6379/tcp, :::6379->6379/tcp, 0.0.0.0:8001->8001/tcp, :::8001->8001/tcp redis-vector-db +23cd7f16453b ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 "text-embeddings-rou…" 15 hours ago Up 15 hours 0.0.0.0:6006->80/tcp, [::]:6006->80/tcp tei-embedding-server +``` + +### Test the Pipeline + +Once the HybridRAG services are running, run data ingestion. The following command is ingesting unstructure data: + +```bash +cd GenAIExamples/HybridRAG/tests +curl -X POST -H "Content-Type: multipart/form-data" \ + -F "files=@./Diabetes.txt" \ + -F "files=@./Acne_Vulgaris.txt" \ + -F "chunk_size=300" \ + -F "chunk_overlap=20" \ + http://${host_ip}:6007/v1/dataprep/ingest +``` + +The data files (Diabetes.txt and Acne_Vulgaris.txt) are samples downloaded from Wikipedia, and they are here to facilitate the pipeline tests. Users are encouraged to download their own datasets, and the command above should be updated with the proper file names. + +As for the structured data, the application is pre-seeded with structured data and schema by default. To create a knowledge graph with custom data and schema, set the cypher_insert environment variable prior to application deployment. + +```bash +export cypher_insert=' + LOAD CSV WITH HEADERS FROM "https://docs.google.com/spreadsheets/d/e/2PACX-1vQCEUxVlMZwwI2sn2T1aulBrRzJYVpsM9no8AEsYOOklCDTljoUIBHItGnqmAez62wwLpbvKMr7YoHI/pub?gid=0&single=true&output=csv" AS rows + MERGE (d:disease {name:rows.Disease}) + MERGE (dt:diet {name:rows.Diet}) + MERGE (d)-[:HOME_REMEDY]->(dt) + + MERGE (m:medication {name:rows.Medication}) + MERGE (d)-[:TREATMENT]->(m) + + MERGE (s:symptoms {name:rows.Symptom}) + MERGE (d)-[:MANIFESTATION]->(s) + + MERGE (p:precaution {name:rows.Precaution}) + MERGE (d)-[:PREVENTION]->(p) +' +``` + +If the graph database is already populated, you can skip the knowledge graph generation by setting the refresh_db environment variable: + +```bash +export refresh_db='False' +``` + +Now test the pipeline using the following command: + +```bash +curl -s -X POST -d '{"messages": "what are the symptoms for Diabetes?"}' \ + -H 'Content-Type: application/json' \ + "${host_ip}:8888/v1/hybridrag" +``` + +To collect per request latency for the pipeline, run the following: + +```bash +curl -o /dev/null -s -w "Total Time: %{time_total}s\n" \ + -X POST \ + -d '{"messages": "what are the symptoms for Diabetes?"}' \ + -H 'Content-Type: application/json' \ + "${host_ip}:8888/v1/hybridrag" +``` + +**Note** The value of _host_ip_ was set using the _set_env.sh_ script and can be found in the _.env_ file. + +### Cleanup the Deployment + +To stop the containers associated with the deployment, execute the following command: + +``` +docker compose -f compose.yaml down +``` + +All the HybridRAG containers will be stopped and then removed on completion of the "down" command. diff --git a/HybridRAG/docker_compose/intel/hpu/gaudi/compose.yaml b/HybridRAG/docker_compose/intel/hpu/gaudi/compose.yaml new file mode 100644 index 0000000000..2ae35cf5ed --- /dev/null +++ b/HybridRAG/docker_compose/intel/hpu/gaudi/compose.yaml @@ -0,0 +1,240 @@ +# Copyright (C) 2025 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +services: + text2cypher-gaudi: + image: ${REGISTRY:-opea}/text2cypher-gaudi:${TAG:-latest} + container_name: text2cypher-gaudi-container + ports: + - "11801:9097" + depends_on: + neo4j-apoc: + condition: service_healthy + ipc: host + environment: + no_proxy: ${no_proxy} + http_proxy: ${http_proxy} + https_proxy: ${https_proxy} + INDEX_NAME: ${INDEX_NAME} + HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} + LOGFLAG: ${LOGFLAG:-False} + HABANA_VISIBLE_DEVICES: all + OMPI_MCA_btl_vader_single_copy_mechanism: none + TOKENIZERS_PARALLELISM: False + NEO4J_URI: ${NEO4J_URI} + NEO4J_URL: ${NEO4J_URI} + NEO4J_USERNAME: ${NEO4J_USERNAME} + NEO4J_PASSWORD: ${NEO4J_PASSWORD} + host_ip: ${host_ip} + runtime: habana + cap_add: + - SYS_NICE + restart: unless-stopped + neo4j-apoc: + image: neo4j:latest + container_name: neo4j-apoc + ports: + - "${NEO4J_PORT1:-7474}:7474" + - "${NEO4J_PORT2:-7687}:7687" + volumes: + - ./data/neo4j/logs:/logs + - ./data/neo4j/config:/config + - ./data/neo4j/data:/data + - ./data/neo4j/plugins:/plugins + ipc: host + environment: + - no_proxy=${no_proxy} + - http_proxy=${http_proxy} + - https_proxy=${https_proxy} + - NEO4J_AUTH=${NEO4J_USERNAME}/${NEO4J_PASSWORD} + - NEO4J_PLUGINS=["apoc"] + - NEO4J_apoc_export_file_enabled=true + - NEO4J_apoc_import_file_enabled=true + - NEO4J_apoc_import_file_use__neo4j__config=true + - NEO4J_dbms_security_procedures_unrestricted=apoc.\* + - NEO4J_server_bolt_advertised__address=localhost:${NEO4J_PORT2} + restart: always + healthcheck: + test: wget http://localhost:7474 || exit 1 + interval: 5s + timeout: 10s + retries: 20 + start_period: 3s + redis-vector-db: + image: redis/redis-stack:7.2.0-v9 + container_name: redis-vector-db + ports: + - "6379:6379" + - "8001:8001" + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 5s + timeout: 3s + retries: 10 + dataprep-redis-service: + image: ${REGISTRY:-opea}/dataprep:${TAG:-latest} + container_name: dataprep-redis-server + depends_on: + redis-vector-db: + condition: service_healthy + tei-embedding-service: + condition: service_started + ports: + - "6007:5000" + environment: + no_proxy: ${no_proxy} + http_proxy: ${http_proxy} + https_proxy: ${https_proxy} + REDIS_URL: redis://redis-vector-db:6379 + REDIS_HOST: redis-vector-db + INDEX_NAME: ${INDEX_NAME} + TEI_ENDPOINT: http://tei-embedding-service:80 + HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + tei-embedding-service: + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + container_name: tei-embedding-server + ports: + - "6006:80" + volumes: + - "${MODEL_CACHE:-./data}:/data" + shm_size: 1g + environment: + no_proxy: ${no_proxy} + http_proxy: ${http_proxy} + https_proxy: ${https_proxy} + command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate + retriever: + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} + container_name: retriever-redis-server + depends_on: + - redis-vector-db + ports: + - "7000:7000" + ipc: host + environment: + no_proxy: ${no_proxy} + http_proxy: ${http_proxy} + https_proxy: ${https_proxy} + REDIS_URL: redis://redis-vector-db:6379 + REDIS_HOST: redis-vector-db + INDEX_NAME: ${INDEX_NAME} + TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 + HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" + restart: unless-stopped + tei-reranking-service: + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 + container_name: tei-reranking-server + ports: + - "8808:80" + volumes: + - "${MODEL_CACHE:-./data}:/data" + shm_size: 1g + environment: + no_proxy: ${no_proxy} + http_proxy: ${http_proxy} + https_proxy: ${https_proxy} + HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_HUB_DISABLE_PROGRESS_BARS: 1 + HF_HUB_ENABLE_HF_TRANSFER: 0 + command: --model-id ${RERANK_MODEL_ID} --auto-truncate + vllm-service: + image: ${REGISTRY:-opea}/vllm:${TAG:-latest} + container_name: vllm-service + ports: + - "9009:80" + volumes: + - "${MODEL_CACHE:-./data}:/root/.cache/huggingface/hub" + shm_size: 128g + environment: + no_proxy: ${no_proxy} + http_proxy: ${http_proxy} + https_proxy: ${https_proxy} + HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + LLM_MODEL_ID: ${LLM_MODEL_ID} + VLLM_TORCH_PROFILER_DIR: "/mnt" + VLLM_CPU_KVCACHE_SPACE: 40 + healthcheck: + test: ["CMD-SHELL", "curl -f http://$host_ip:9009/health || exit 1"] + interval: 10s + timeout: 10s + retries: 100 + command: --model $LLM_MODEL_ID --host 0.0.0.0 --port 80 + hybridrag-xeon-backend-server: + image: ${REGISTRY:-opea}/hybridrag:${TAG:-latest} + container_name: hybridrag-xeon-backend-server + depends_on: + - redis-vector-db + - retriever + - tei-reranking-service + - vllm-service + - neo4j-apoc + - text2cypher-gaudi + - tei-embedding-service + #- struct2graph + ports: + - "8888:8888" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + - MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP} + - EMBEDDING_SERVER_HOST_IP=${EMBEDDING_SERVER_HOST_IP} + - EMBEDDING_SERVER_PORT=${EMBEDDING_SERVER_PORT} + - RETRIEVER_SERVER_HOST_IP=${RETRIEVER_SERVER_HOST_IP} + - RETRIEVER_SERVER_PORT=${RETRIEVER_SERVER_PORT} + - RERANK_SERVER_HOST_IP=${RERANK_SERVER_HOST_IP} + - RERANK_SERVER_PORT=${RERANK_SERVER_PORT} + - LLM_SERVER_HOST_IP=${LLM_SERVER_HOST_IP} + - LLM_SERVER_PORT=${LLM_SERVER_PORT} + - TEXT2CYPHER_SERVER_HOST_IP=${TEXT2CYPHER_SERVER_HOST_IP} + - TEXT2CYPHER_SERVER_PORT=${TEXT2CYPHER_SERVER_PORT:-9097} + - LLM_MODEL=${LLM_MODEL_ID} + - LOGFLAG=${LOGFLAG} + - NEO4J_URL=${NEO4J_URL} + - NEO4J_server_directories_import=import + - NEO4J_PLUGINS=["apoc"] + - NEO4J_dbms_security_allow__csv__import__from__file__urls=true + - NEO4J_dbms_security_procedures_unrestricted=apoc.* + ipc: host + restart: always + hybridrag-xeon-ui-server: + image: ${REGISTRY:-opea}/hybridrag-ui:${TAG:-latest} + container_name: hybridrag-xeon-ui-server + depends_on: + - hybridrag-xeon-backend-server + ports: + - "5173:5173" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + ipc: host + restart: always + hybridrag-xeon-nginx-server: + image: ${REGISTRY:-opea}/nginx:${TAG:-latest} + container_name: hybridrag-xeon-nginx-server + depends_on: + - hybridrag-xeon-backend-server + - hybridrag-xeon-ui-server + ports: + - "${NGINX_PORT:-80}:80" + environment: + - no_proxy=${no_proxy} + - https_proxy=${https_proxy} + - http_proxy=${http_proxy} + - FRONTEND_SERVICE_IP=hybridrag-xeon-ui-server + - FRONTEND_SERVICE_PORT=5173 + - BACKEND_SERVICE_NAME=hybridrag + - BACKEND_SERVICE_IP=hybridrag-xeon-backend-server + - BACKEND_SERVICE_PORT=8888 + - DATAPREP_SERVICE_IP=dataprep-redis-service + - DATAPREP_SERVICE_PORT=5000 + ipc: host + restart: always + +networks: + default: + driver: bridge diff --git a/HybridRAG/docker_compose/intel/hpu/gaudi/set_env.sh b/HybridRAG/docker_compose/intel/hpu/gaudi/set_env.sh new file mode 100644 index 0000000000..a828fb565d --- /dev/null +++ b/HybridRAG/docker_compose/intel/hpu/gaudi/set_env.sh @@ -0,0 +1,56 @@ +#!/usr/bin/env bash + +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +export host_ip=$(hostname -I | awk '{print $1}') +export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HUGGINGFACEHUB_API_TOKEN} + +export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" +export RERANK_MODEL_ID="BAAI/bge-reranker-base" +export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" +export INDEX_NAME="rag-redis" +# Set it as a non-null string, such as true, if you want to enable logging facility, +# otherwise, keep it as "" to disable it. +export LOGFLAG="" +# Set OpenTelemetry Tracing Endpoint +export JAEGER_IP=$(ip route get 8.8.8.8 | grep -oP 'src \K[^ ]+') +export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=grpc://$JAEGER_IP:4317 +export TELEMETRY_ENDPOINT=http://$JAEGER_IP:4318/v1/traces +# Set no proxy +export no_proxy="$no_proxy,hybridrag-gaudi-ui-server,hybridrag-gaudi-backend-server,dataprep-redis-service,tei-embedding-service,retriever,tei-reranking-service,tgi-service,vllm-service,jaeger,prometheus,grafana,node-exporter,localhost,127.0.0.1,$JAEGER_IP,${host_ip}" + + +export MEGA_SERVICE_HOST_IP=${host_ip} +export EMBEDDING_SERVER_HOST_IP=${host_ip} +export RETRIEVER_SERVER_HOST_IP=${host_ip} +export RERANK_SERVER_HOST_IP=${host_ip} +export LLM_SERVER_HOST_IP=${host_ip} +export TEXT2CYPHER_SERVER_HOST_IP=${host_ip} +export REDIS_SERVER_HOST_IP=${host_ip} + +export MEGA_SERVICE_PORT=8888 +export EMBEDDING_SERVER_PORT=6006 +export RETRIEVER_SERVER_PORT=7000 +export RERANK_SERVER_PORT=8808 +export LLM_SERVER_PORT=9009 +export TEXT2CYPHER_SERVER_PORT=11801 +export REDIS_SERVER_PORT=6379 + +export LLM_ENDPOINT_PORT=8010 +export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}" +export HYBRIDRAG_REDIS_VECTOR_PORT=6379 +export HYBRIDRAG_REDIS_VECTOR_INSIGHT_PORT=8001 +export HYBRIDRAG_FRONTEND_SERVICE_PORT=5173 +export HYBRIDRAG_BACKEND_SERVICE_ENDPOINT=http://${host_ip}:8888/v1/hybridrag +export NGINX_PORT=80 +export FAQGen_COMPONENT_NAME="OpeaFaqGenvLLM" + +export NEO4J_PORT1=7474 +export NEO4J_PORT2=7687 +export NEO4J_URI="bolt://${host_ip}:${NEO4J_PORT2}" +export NEO4J_URL="bolt://${host_ip}:${NEO4J_PORT2}" +export NEO4J_USERNAME="neo4j" +export NEO4J_PASSWORD="neo4jtest" +export LOGFLAG=True diff --git a/HybridRAG/docker_image_build/build.yaml b/HybridRAG/docker_image_build/build.yaml new file mode 100644 index 0000000000..7c23807736 --- /dev/null +++ b/HybridRAG/docker_image_build/build.yaml @@ -0,0 +1,51 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +services: + hybridrag: + build: + args: + IMAGE_REPO: ${REGISTRY} + BASE_TAG: ${TAG} + http_proxy: ${http_proxy} + https_proxy: ${https_proxy} + no_proxy: ${no_proxy} + context: ../ + dockerfile: ./Dockerfile + image: ${REGISTRY:-opea}/hybridrag:${TAG:-latest} + text2cypher: + build: + context: GenAIComps + dockerfile: comps/text2cypher/src/Dockerfile.intel_hpu + extends: hybridrag + image: ${REGISTRY:-opea}/text2cypher-gaudi:${TAG:-latest} + vllm: + build: + context: vllm + dockerfile: docker/Dockerfile.cpu + extends: hybridrag + image: ${REGISTRY:-opea}/vllm:${TAG:-latest} + dataprep: + build: + context: GenAIComps + dockerfile: comps/dataprep/src/Dockerfile + extends: hybridrag + image: ${REGISTRY:-opea}/dataprep:${TAG:-latest} + retriever: + build: + context: GenAIComps + dockerfile: comps/retrievers/src/Dockerfile + extends: hybridrag + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} + nginx: + build: + context: GenAIComps + dockerfile: comps/third_parties/nginx/src/Dockerfile + extends: hybridrag + image: ${REGISTRY:-opea}/nginx:${TAG:-latest} + hybridrag-ui: + build: + context: ../ui + dockerfile: ./docker/Dockerfile + extends: hybridrag + image: ${REGISTRY:-opea}/hybridrag-ui:${TAG:-latest} diff --git a/HybridRAG/hybridrag.py b/HybridRAG/hybridrag.py new file mode 100644 index 0000000000..a7d48cd809 --- /dev/null +++ b/HybridRAG/hybridrag.py @@ -0,0 +1,464 @@ +# Copyright (C) 2025 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +import argparse +import ast +import asyncio +import json +import os +import re +import time + +import requests +from comps import MegaServiceEndpoint, MicroService, ServiceOrchestrator, ServiceRoleType, ServiceType +from comps.cores.mega.utils import handle_message +from comps.cores.proto.api_protocol import ( + ChatCompletionRequest, + ChatCompletionResponse, + ChatCompletionResponseChoice, + ChatMessage, + UsageInfo, +) +from comps.cores.proto.docarray import LLMParams, RerankerParms, RetrieverParms +from fastapi import Request +from fastapi.responses import StreamingResponse +from langchain_core.prompts import PromptTemplate + + +class ChatTemplate: + @staticmethod + def generate_rag_prompt(question, documents): + context_str = "\n".join(documents) + if context_str and len(re.findall("[\u4e00-\u9fff]", context_str)) / len(context_str) >= 0.3: + # chinese context + template = """ +### 你将扮演一个乐于助人、尊重他人并诚实的助手,你的目标是帮助用户解答问题。有效地利用来自本地知识库的搜索结果。确保你的回答中只包含相关信息。如果你不确定问题的答案,请避免分享不准确的信息。 +### 搜索结果:{context} +### 问题:{question} +### 回答: +""" + else: + template = """ +### You are a helpful, respectful and honest assistant to help the user with questions. \ +Please refer to the search results obtained from the local knowledge base. \ +But be careful to not incorporate the information that you think is not relevant to the question. \ +If you don't know the answer to a question, please don't share false information. \n +### Search results: {context} \n +### Question: {question} \n +### Answer: +""" + return template.format(context=context_str, question=question) + + @staticmethod + def generate_fuse_prompt(question, unstruct_documents, struct_str): + unstruct_str = "\n".join(unstruct_documents) + context_str = f"Structured: {struct_str} Unstructured: {unstruct_str}" + if context_str and len(re.findall("[\u4e00-\u9fff]", context_str)) / len(context_str) >= 0.3: + # chinese context + template = """ +您是一位知識豐富的助手,經過訓練來整合結構化和非結構化的信息,以統一的方式回答問題。在答案中不要區分結構化和非結構化的信息。 +回答問題: {question}。指示: 僅使用提供的結構化和非結構化檢索結果來回答問題。 +{context} +""" + else: + template = """ +You are a knowledgeable assistant trained to integrate information for both structured and unstructured retrieval results to answer questions in a unified manner. Do not differentiate structured and unstructured information in the answer. Answer the question: {question}. Instructions: Use only the provided structured and unstructured results to answer the question. {context}. +""" + return template.format(context=context_str, question=question) + + +MEGA_SERVICE_PORT = int(os.getenv("MEGA_SERVICE_PORT", 8888)) +EMBEDDING_SERVER_HOST_IP = os.getenv("EMBEDDING_SERVER_HOST_IP", "0.0.0.0") +EMBEDDING_SERVER_PORT = int(os.getenv("EMBEDDING_SERVER_PORT", 80)) +RETRIEVER_SERVER_HOST_IP = os.getenv("RETRIEVER_SERVER_HOST_IP", "0.0.0.0") +RETRIEVER_SERVER_PORT = int(os.getenv("RETRIEVER_SERVER_PORT", 7000)) +RERANK_SERVER_HOST_IP = os.getenv("RERANK_SERVER_HOST_IP", "0.0.0.0") +RERANK_SERVER_PORT = int(os.getenv("RERANK_SERVER_PORT", 80)) +LLM_SERVER_HOST_IP = os.getenv("LLM_SERVER_HOST_IP", "0.0.0.0") +LLM_SERVER_PORT = int(os.getenv("LLM_SERVER_PORT", 80)) +TEXT2CYPHER_SERVER_HOST_IP = os.getenv("TEXT2CYPHER_SERVER_HOST_IP", "0.0.0.0") +TEXT2CYPHER_SERVER_PORT = int(os.getenv("TEXT2CYPHER_SERVER_PORT", 11801)) +REDIS_SERVER_HOST_IP = os.getenv("REDIS_SERVER_HOST_IP", "0.0.0.0") +REDIS_SERVER_PORT = int(os.getenv("REDIS_SERVER_PORT", 6379)) +refresh_db = os.getenv("refresh_db", "True") +cypher_insert = os.getenv("cypher_insert", None) + +LLM_MODEL = os.getenv("LLM_MODEL", "meta-llama/Meta-Llama-3-8B-Instruct") + + +def align_inputs(self, inputs, cur_node, runtime_graph, llm_parameters_dict, **kwargs): + if self.services[cur_node].service_type == ServiceType.EMBEDDING: + inputs["inputs"] = inputs["text"] + del inputs["text"] + elif self.services[cur_node].service_type == ServiceType.RETRIEVER: + # prepare the retriever params + retriever_parameters = kwargs.get("retriever_parameters", None) + if retriever_parameters: + inputs.update(retriever_parameters.dict()) + elif self.services[cur_node].service_type == ServiceType.LLM: + # convert TGI/vLLM to unified OpenAI /v1/chat/completions format + next_inputs = {} + next_inputs["model"] = LLM_MODEL + next_inputs["messages"] = [{"role": "user", "content": inputs["inputs"]}] + next_inputs["max_tokens"] = llm_parameters_dict["max_tokens"] + next_inputs["top_p"] = llm_parameters_dict["top_p"] + next_inputs["stream"] = inputs["stream"] + next_inputs["frequency_penalty"] = inputs["frequency_penalty"] + next_inputs["temperature"] = inputs["temperature"] + inputs = next_inputs + return inputs + + +def align_outputs(self, data, cur_node, inputs, runtime_graph, llm_parameters_dict, **kwargs): + next_data = {} + if self.services[cur_node].service_type == ServiceType.EMBEDDING: + assert isinstance(data, list) + next_data = {"text": inputs["inputs"], "embedding": data[0]} + elif self.services[cur_node].service_type == ServiceType.RETRIEVER: + + docs = [doc["text"] for doc in data["retrieved_docs"]] + + with_rerank = runtime_graph.downstream(cur_node)[0].startswith("rerank") + if with_rerank and docs: + # forward to rerank + # prepare inputs for rerank + next_data["query"] = data["initial_query"] + next_data["texts"] = [doc["text"] for doc in data["retrieved_docs"]] + else: + # forward to llm + if not docs and with_rerank: + # delete the rerank from retriever -> rerank -> llm + for ds in reversed(runtime_graph.downstream(cur_node)): + for nds in runtime_graph.downstream(ds): + runtime_graph.add_edge(cur_node, nds) + runtime_graph.delete_node_if_exists(ds) + + # handle template + # if user provides template, then format the prompt with it + # otherwise, use the default template + prompt = data["initial_query"] + chat_template = llm_parameters_dict["chat_template"] + if chat_template: + prompt_template = PromptTemplate.from_template(chat_template) + input_variables = prompt_template.input_variables + if sorted(input_variables) == ["context", "question"]: + prompt = prompt_template.format(question=data["initial_query"], context="\n".join(docs)) + elif input_variables == ["question"]: + prompt = prompt_template.format(question=data["initial_query"]) + else: + print(f"{prompt_template} not used, we only support 2 input variables ['question', 'context']") + prompt = ChatTemplate.generate_rag_prompt(data["initial_query"], docs) + else: + prompt = ChatTemplate.generate_rag_prompt(data["initial_query"], docs) + + next_data["inputs"] = prompt + + elif self.services[cur_node].service_type == ServiceType.RERANK: + # rerank the inputs with the scores + reranker_parameters = kwargs.get("reranker_parameters", None) + prompt = inputs["query"] + hybridrag = kwargs.get("hybridrag", None) + # retrieve structured from cache + timeout = 120 # seconds + interval = 1 # polling interval in seconds + elapsed = 0 + + retrieved = None + structured_result = "" + while elapsed < timeout: + retrieved = hybridrag.cache + if retrieved is not None: + break + time.sleep(interval) + elapsed += interval + if retrieved: + structured_result = retrieved + + # reset the cache + hybridrag.cache = None + + top_n = reranker_parameters.top_n if reranker_parameters else 1 + docs = inputs["texts"] + reranked_docs = [] + for best_response in data[:top_n]: + reranked_docs.append(docs[best_response["index"]]) + + unstruct_str = "\n".join(reranked_docs) + fused = f"Structured: {structured_result} Unstructured: {unstruct_str}" + + # handle template + # if user provides template, then format the prompt with it + # otherwise, use the default template + chat_template = llm_parameters_dict["chat_template"] + if chat_template: + prompt_template = PromptTemplate.from_template(chat_template) + input_variables = prompt_template.input_variables + if sorted(input_variables) == ["context", "question"]: + prompt = prompt_template.format(question=prompt, context=fused) + elif input_variables == ["question"]: + prompt = prompt_template.format(question=prompt) + else: + print(f"{prompt_template} not used, we only support 2 input variables ['question', 'context']") + prompt = ChatTemplate.generate_fuse_prompt(prompt, reranked_docs, structured_result) + else: + prompt = ChatTemplate.generate_fuse_prompt(prompt, reranked_docs, structured_result) + + next_data["inputs"] = prompt + + elif self.services[cur_node].service_type == ServiceType.LLM and not llm_parameters_dict["stream"]: + if "faqgen" in self.services[cur_node].endpoint: + next_data = data + else: + next_data["text"] = data["choices"][0]["message"]["content"] + else: + next_data = data + + return next_data + + +def align_generator(self, gen, **kwargs): + # OpenAI response format + # b'data:{"id":"","object":"text_completion","created":1725530204,"model":"meta-llama/Meta-Llama-3-8B-Instruct","system_fingerprint":"2.0.1-native","choices":[{"index":0,"delta":{"role":"assistant","content":"?"},"logprobs":null,"finish_reason":null}]}\n\n' + for line in gen: + line = line.decode("utf-8") + start = line.find("{") + end = line.rfind("}") + 1 + + json_str = line[start:end] + try: + # sometimes yield empty chunk, do a fallback here + json_data = json.loads(json_str) + if "ops" in json_data and "op" in json_data["ops"][0]: + if "value" in json_data["ops"][0] and isinstance(json_data["ops"][0]["value"], str): + yield f"data: {repr(json_data['ops'][0]['value'].encode('utf-8'))}\n\n" + else: + pass + elif ( + json_data["choices"][0]["finish_reason"] != "eos_token" + and "content" in json_data["choices"][0]["delta"] + ): + yield f"data: {repr(json_data['choices'][0]['delta']['content'].encode('utf-8'))}\n\n" + except Exception as e: + yield f"data: {repr(json_str.encode('utf-8'))}\n\n" + yield "data: [DONE]\n\n" + + +class HybridRAGService: + def __init__(self, host="0.0.0.0", port=8000): + self.host = host + self.port = port + self.cache = None + ServiceOrchestrator.align_inputs = align_inputs + ServiceOrchestrator.align_outputs = align_outputs + ServiceOrchestrator.align_generator = align_generator + self.megaservice = ServiceOrchestrator() + self.endpoint = str(MegaServiceEndpoint.HYBRID_RAG) + + async def exec_text2cypher(self, prompt): + url = f"http://{TEXT2CYPHER_SERVER_HOST_IP}:{TEXT2CYPHER_SERVER_PORT}/v1/text2cypher" + headers = {"Content-Type": "application/json"} + if refresh_db == "False": + data = {"input_text": prompt, "seeding": {"refresh_db": "False"}} + elif cypher_insert is not None: + data = {"input_text": prompt, "seeding": {"cypher_insert": "'${cypher_insert}'", "refresh_db": "True"}} + else: + data = {"input_text": prompt} + response = requests.post(url, json=data) + data = response.json() + data_str = str(data) + start_marker = "['" + end_marker = "']" + + # Find the start and end indices + start_index = data_str.find(start_marker) + len(start_marker) # Move past the start marker + end_index = data_str.find(end_marker, start_index) # Find the end marker + + # Extract the substring + substring = data_str[start_index:end_index] + + # Clean up the substring + structured = ",".join(item.strip().strip("'") for item in substring.split(",")) + + # save to cache + self.cache = structured + return structured + + def add_remote_service(self): + + embedding = MicroService( + name="embedding", + host=EMBEDDING_SERVER_HOST_IP, + port=EMBEDDING_SERVER_PORT, + endpoint="/embed", + use_remote_service=True, + service_type=ServiceType.EMBEDDING, + ) + + text2cypher = MicroService( + name="text2cypher", + host=TEXT2CYPHER_SERVER_HOST_IP, + port=TEXT2CYPHER_SERVER_PORT, + endpoint="/text2cypher", + use_remote_service=True, + service_type=ServiceType.TEXT2CYPHER, + ) + + retriever = MicroService( + name="retriever", + host=RETRIEVER_SERVER_HOST_IP, + port=RETRIEVER_SERVER_PORT, + endpoint="/v1/retrieval", + use_remote_service=True, + service_type=ServiceType.RETRIEVER, + ) + + rerank = MicroService( + name="rerank", + host=RERANK_SERVER_HOST_IP, + port=RERANK_SERVER_PORT, + endpoint="/rerank", + use_remote_service=True, + service_type=ServiceType.RERANK, + ) + + llm = MicroService( + name="llm", + host=LLM_SERVER_HOST_IP, + port=LLM_SERVER_PORT, + endpoint="/v1/chat/completions", + use_remote_service=True, + service_type=ServiceType.LLM, + ) + + # Add the microservices to the megaservice orchestrator and define the flow + self.megaservice.add(embedding).add(retriever).add(rerank).add(llm) + self.megaservice.flow_to(embedding, retriever) + self.megaservice.flow_to(retriever, rerank) + self.megaservice.flow_to(rerank, llm) + + async def read_streaming_response(self, response: StreamingResponse): + """Reads the streaming response from a StreamingResponse object. + + Parameters: + - self: Reference to the current instance of the class. + - response: The StreamingResponse object to read from. + + Returns: + - str: The complete response body as a decoded string. + """ + body = b"" # Initialize an empty byte string to accumulate the response chunks + async for chunk in response.body_iterator: + body += chunk # Append each chunk to the body + return body.decode("utf-8") # Decode the accumulated byte string to a regular string + + async def process_prompt(self, prompt, llm_parameters, retriever_parameters, reranker_parameters): + # Create tasks for concurrent execution + exec_task = asyncio.create_task(self.exec_text2cypher(prompt)) + schedule_task = asyncio.create_task( + self.megaservice.schedule( + initial_inputs={"text": prompt}, + llm_parameters=llm_parameters, + retriever_parameters=retriever_parameters, + reranker_parameters=reranker_parameters, + hybridrag=self, + ) + ) + + # Wait for both tasks to complete + structured_result = await exec_task + result_dict, runtime_graph = await schedule_task + + return result_dict, runtime_graph + + async def handle_request(self, request: Request): + """Handles the incoming request, processes it through the appropriate microservices, + and returns the response. + + Parameters: + - self: Reference to the current instance of the class. + - request: The incoming request object. + + Returns: + - ChatCompletionResponse: The response from the LLM microservice. + """ + # Parse the incoming request data + data = await request.json() + + # Get the stream option from the request data, default to True if not provided + stream_opt = data.get("stream", True) + + # Validate and parse the chat request data + chat_request = ChatCompletionRequest.model_validate(data) # parse_obj(data) + + # Handle the chat messages to generate the prompt + prompt = handle_message(chat_request.messages) + + # Define the LLM parameters + llm_parameters = LLMParams( + max_tokens=chat_request.max_tokens if chat_request.max_tokens else 2048, + top_k=chat_request.top_k if chat_request.top_k else 10, + top_p=chat_request.top_p if chat_request.top_p else 0.95, + temperature=chat_request.temperature if chat_request.temperature else 0.01, + frequency_penalty=chat_request.frequency_penalty if chat_request.frequency_penalty else 0.0, + presence_penalty=chat_request.presence_penalty if chat_request.presence_penalty else 0.0, + repetition_penalty=chat_request.repetition_penalty if chat_request.repetition_penalty else 1.03, + stream=stream_opt, + chat_template=chat_request.chat_template if chat_request.chat_template else None, + model=chat_request.model if chat_request.model else None, + ) + + # Define the retriever parameters + retriever_parameters = RetrieverParms( + search_type=chat_request.search_type if chat_request.search_type else "similarity", + k=chat_request.k if chat_request.k else 4, + distance_threshold=chat_request.distance_threshold if chat_request.distance_threshold else None, + fetch_k=chat_request.fetch_k if chat_request.fetch_k else 20, + lambda_mult=chat_request.lambda_mult if chat_request.lambda_mult else 0.5, + score_threshold=chat_request.score_threshold if chat_request.score_threshold else 0.2, + ) + + # Define the reranker parameters + reranker_parameters = RerankerParms( + top_n=chat_request.top_n if chat_request.top_n else 1, + ) + + result_dict, runtime_graph = await self.process_prompt( + prompt, llm_parameters, retriever_parameters, reranker_parameters + ) + for node, response in result_dict.items(): + if isinstance(response, StreamingResponse): + return response + last_node = runtime_graph.all_leaves()[-1] + response = result_dict[last_node]["text"] + choices = [] + usage = UsageInfo() + choices.append( + ChatCompletionResponseChoice( + index=0, + message=ChatMessage(role="assistant", content=response), + finish_reason="stop", + ) + ) + return ChatCompletionResponse(model="hybridrag", choices=choices, usage=usage) + + def start(self): + + self.service = MicroService( + self.__class__.__name__, + service_role=ServiceRoleType.MEGASERVICE, + host=self.host, + port=self.port, + endpoint=self.endpoint, + input_datatype=ChatCompletionRequest, + output_datatype=ChatCompletionResponse, + ) + + self.service.add_route(self.endpoint, self.handle_request, methods=["POST"]) + self.service.start() + + +if __name__ == "__main__": + hybridrag = HybridRAGService(port=MEGA_SERVICE_PORT) + hybridrag.add_remote_service() + + hybridrag.start() diff --git a/HybridRAG/tests/data/Acne_Vulgaris.txt b/HybridRAG/tests/data/Acne_Vulgaris.txt new file mode 100644 index 0000000000..7c24616a3d --- /dev/null +++ b/HybridRAG/tests/data/Acne_Vulgaris.txt @@ -0,0 +1 @@ +Acne (/ˈækni/ ACK-nee), also known as acne vulgaris, is a long-term skin condition that occurs when dead skin cells and oil from the skin clog hair follicles. Typical features of the condition include blackheads or whiteheads, pimples, oily skin, and possible scarring. It primarily affects skin with a relatively high number of oil glands, including the face, upper part of the chest, and back. The resulting appearance can lead to lack of confidence, anxiety, reduced self-esteem, and, in extreme cases, depression or thoughts of suicide.Susceptibility to acne is primarily genetic in 80% of cases. The roles of diet and cigarette smoking in the condition are unclear, and neither cleanliness nor exposure to sunlight are associated with acne. In both sexes, hormones called androgens appear to be part of the underlying mechanism, by causing increased production of sebum. Another common factor is the excessive growth of the bacterium Cutibacterium acnes, which is present on the skin.Treatments for acne are available, including lifestyle changes, medications, and medical procedures. Eating fewer simple carbohydrates such as sugar may minimize the condition. Treatments applied directly to the affected skin, such as azelaic acid, benzoyl peroxide, and salicylic acid, are commonly used. Antibiotics and retinoids are available in formulations that are applied to the skin and taken by mouth for the treatment of acne. However, resistance to antibiotics may develop as a result of antibiotic therapy. Several types of birth control pills help prevent acne in women. Medical professionals typically reserve isotretinoin pills for severe acne, due to greater potential side effects. Early and aggressive treatment of acne is advocated by some in the medical community to decrease the overall long-term impact on individuals.In 2015, acne affected approximately 633 million people globally, making it the eighth-most common disease worldwide. Acne commonly occurs in adolescence and affects an estimated 80–90% of teenagers in the Western world. Some rural societies report lower rates of acne than industrialized ones. Children and adults may also be affected before and after puberty. Although acne becomes less common in adulthood, it persists in nearly half of affected people into their twenties and thirties, and a smaller group continues to have difficulties in their forties. Classification The severity of acne vulgaris (Gr. ἀκμή, "point" + L. vulgaris, "common") can be classified as mild, moderate, or severe to determine an appropriate treatment regimen. There is no universally accepted scale for grading acne severity. The presence of clogged skin follicles (known as comedones) limited to the face with occasional inflammatory lesions defines mild acne. Moderate severity acne is said to occur when a higher number of inflammatory papules and pustules occur on the face, compared to mild cases of acne, and appear on the trunk of the body. Severe acne is said to occur when nodules (the painful 'bumps' lying under the skin) are the characteristic facial lesions, and involvement of the trunk is extensive.The lesions are usually, polymorphic, meaning they can take many forms, including open or closed comedones (commonly known as blackheads and whiteheads), papules, pustules, and even nodules or cysts so that these lesions often leave behind sequelae, or abnormal conditions resulting from a previous disease, such as scarring or hyperpigmentation.Large nodules were previously called cysts. The term nodulocystic has been used in the medical literature to describe severe cases of inflammatory acne. True cysts are rare in those with acne, and the term severe nodular acne is now the preferred terminology.Acne inversa (L. invertō, "upside-down") and acne rosacea (rosa, "rose-colored" + -āceus, "forming") are not forms of acne and are alternate names that respectively refer to the skin conditions hidradenitis suppurativa (HS) and rosacea. Although HS shares certain overlapping features with acne vulgaris, such as a tendency to clog skin follicles with skin cell debris, the condition otherwise lacks the hallmark features of acne and is therefore considered a distinct skin disorder. Signs and symptoms Typical features of acne include increased secretion of oily sebum by the skin, microcomedones, comedones, papules, nodules (large papules), pustules, and often results in scarring. The appearance of acne varies with skin color. It may result in psychological and social problems. Scars Acne scars are caused by inflammation within the dermis and are estimated to affect 95% of people with acne vulgaris. Abnormal healing and dermal inflammation create the scar. Scarring is most likely to take place with severe acne but may occur with any form of acne vulgaris. Acne scars are classified based on whether the abnormal healing response following dermal inflammation leads to excess collagen deposition or loss at the site of the acne lesion.Atrophic acne scars have lost collagen from the healing response and are the most common type of acne scar (accounting for approximately 75% of all acne scars). Ice-pick scars, boxcar scars, and rolling scars are subtypes of atrophic acne scars. Boxcar scars are round or ovoid indented scars with sharp borders and vary in size from 1.5–4 mm across. Ice-pick scars are narrow (less than 2 mm across), deep scars that extend into the dermis. Rolling scars are broader than ice-pick and boxcar scars (4–5 mm across) and have a wave-like pattern of depth in the skin.Hypertrophic scars are uncommon and are characterized by increased collagen content after the abnormal healing response. They are described as firm and raised from the skin. Hypertrophic scars remain within the original margins of the wound, whereas keloid scars can form scar tissue outside of these borders. Keloid scars from acne occur more often in men and people with darker skin, and usually occur on the trunk of the body. Pigmentation After an inflamed nodular acne lesion resolves, it is common for the skin to darken in that area, which is known as postinflammatory hyperpigmentation (PIH). The inflammation stimulates specialized pigment-producing skin cells (known as melanocytes) to produce more melanin pigment, which leads to the skin's darkened appearance. PIH occurs more frequently in people with darker skin color. Pigmented scar is a common term used for PIH, but is misleading as it suggests the color change is permanent. Often, PIH can be prevented by avoiding any aggravation of the nodule and can fade with time. However, untreated PIH can last for months, years, or even be permanent if deeper layers of skin are affected. Even minimal skin exposure to the sun's ultraviolet rays can sustain hyperpigmentation. Daily use of SPF 15 or higher sunscreen can minimize such a risk. Whitening agents like azelaic acid, arbutin or else may be used to improve hyperpigmentation. Causes Risk factors for the development of acne, other than genetics, have not been conclusively identified. Possible secondary contributors include hormones, infections, diet, and stress. Studies investigating the impact of smoking on the incidence and severity of acne have been inconclusive. Cleanliness (hygiene) and sunlight are not associated with acne. Genes Acne appears to be highly heritable; genetics explain 81% of the variation in the population. Studies performed in affected twins and first-degree relatives further demonstrate the strongly inherited nature of acne. Acne susceptibility is likely due to the influence of multiple genes, as the disease does not follow a classic (Mendelian) inheritance pattern. These gene candidates include certain variations in tumor necrosis factor-alpha (TNF-alpha), IL-1 alpha, and CYP1A1 genes, among others. The 308 G/A single nucleotide polymorphism variation in the gene for TNF is associated with an increased risk for acne. Acne can be a feature of rare genetic disorders such as Apert's syndrome. Severe acne may be associated with XYY syndrome. Hormones Hormonal activity, such as occurs during menstrual cycles and puberty, may contribute to the formation of acne. During puberty, an increase in sex hormones called androgens causes the skin follicle glands to grow larger and make more oily sebum. The androgen hormones testosterone, dihydrotestosterone (DHT), and dehydroepiandrosterone (DHEA) are all linked to acne. High levels of growth hormone (GH) and insulin-like growth factor 1 (IGF-1) are also associated with worsened acne. Both androgens and IGF-1 seem to be essential for acne to occur, as acne does not develop in individuals with complete androgen insensitivity syndrome (CAIS) or Laron syndrome (insensitivity to GH, resulting in very low IGF-1 levels).Medical conditions that commonly cause a high-androgen state, such as polycystic ovary syndrome, congenital adrenal hyperplasia, and androgen-secreting tumors, can cause acne in affected individuals. Conversely, people who lack androgenic hormones or are insensitive to the effects of androgens rarely have acne. Pregnancy can increase androgen levels, and consequently, oily sebum synthesis. Acne can be a side effect of testosterone replacement therapy or anabolic steroid use. Over-the-counter bodybuilding and dietary supplements often contain illegally added anabolic steroids. Infections The anaerobic bacterial species Cutibacterium acnes (formerly Propionibacterium acnes) contributes to the development of acne, but its exact role is not well understood. There are specific sub-strains of C. acnes associated with normal skin and others with moderate or severe inflammatory acne. It is unclear whether these undesirable strains evolve on-site or are acquired, or possibly both depending on the person. These strains have the capability of changing, perpetuating, or adapting to the abnormal cycle of inflammation, oil production, and inadequate sloughing of dead skin cells from acne pores. Infection with the parasitic mite Demodex is associated with the development of acne. It is unclear whether eradication of the mite improves acne. Diet High-glycemic-load diets have been found to have different degrees of effect on acne severity. Multiple randomized controlled trials and nonrandomized studies have found a lower-glycemic-load diet to be effective in reducing acne. There is weak observational evidence suggesting that dairy milk consumption is positively associated with a higher frequency and severity of acne. Milk contains whey protein and hormones such as bovine IGF-1 and precursors of dihydrotestosterone. Studies suggest these components promote the effects of insulin and IGF-1 and thereby increase the production of androgen hormones, sebum, and promote the formation of comedones. Available evidence does not support a link between eating chocolate or salt and acne severity. Few studies have examined the relationship between obesity and acne. Vitamin B12 may trigger skin outbreaks similar to acne (acneiform eruptions), or worsen existing acne when taken in doses exceeding the recommended daily intake. Stress There are few high-quality studies to demonstrate that stress causes or worsens acne. Despite being controversial, some research indicates that increased acne severity is associated with high stress levels in certain contexts, such as hormonal changes seen in premenstrual syndrome. Other Some individuals experience severe intensification of their acne when they are exposed to hot humid climates; this is due to bacteria and fungus thriving in warm, moist environments. This climate-induced acne exacerbation has been termed tropical acne.Mechanical obstruction of skin follicles with helmets or chinstraps can worsen pre-existing acne. However, acne caused by mechanical obstruction is technically not acne vulgaris, but another acneiform eruption known as acne mechanica.Several medications can also worsen pre-existing acne; this condition is the acne medicamentosaform of acne. Examples of such medications include lithium, hydantoin, isoniazid, glucocorticoids, iodides, bromides, and testosterone. When acne medicamentosa is specifically caused by anabolic–androgenic steroids it can simply be referred to as steroid acne. Genetically susceptible individuals can get acne breakouts as a result of polymorphous light eruption; a condition triggered by sunlight and artificial UV light exposure. This form of acne is called Acne aestivalis and is specifically caused by intense UVA light exposure. Affected individuals usually experience seasonal acne breakouts on their upper arms, shoulder girdle, back, and chest. The breakouts typically occur one-to-three days after exposure to intese UVA radiation. Unlike other forms of acne, the condition spares the face; this could possibly be a result of the pathogenesis of polymorphous light eruption, in which areas of the skin that are newly exposed to intense ultraviolet radiation are affected. Since faces are typically left uncovered at all stages of life, there is little-to-no likelihood for an eruption to appear there. Studies show that both polymorphous light eruption outbreaks and the acne aestivalis breakout response can be prevented by topical antioxidants combined with the application of a broad spectrum sunscreen. Pathophysiology Acne vulgaris is a chronic skin disease of the pilosebaceous unit and develops due to blockages in the skin's hair follicles.Traditionally seen as a disease of adolescence, acne vulgaris is also observed in adults, including post-menopausal women. Acne vulgaris manifested in adult female is called adult female acne (AFA), defined as a chronic inflammatory disease of the pilosebaceous unit. Particularly in AFA, during the menopausal transition, a relative increase in androgen levels occurs as estrogen levels begin to decline, so that this hormonal shift can manifest as acne; while most women with AFA exhibit few acne lesions and have normal androgen levels, baseline investigations, including an androgen testing panel, can help rule out associated comorbidities such as polycystic ovarian syndrome, congenital adrenal hyperplasia, or tumors.The blockages in the skin's hair follicles that cause acne vulgaris manifestations occur as a result of the following four abnormal processes: increased oily sebum production (influenced by androgens), excessive deposition of the protein keratin leading to comedo formation, colonization of the follicle by Cutibacterium acnes (C. acnes) bacteria, and the local release of pro-inflammatory chemicals in the skin.The earliest pathologic change is the formation of a plug (a microcomedone), which is driven primarily by excessive growth, reproduction, and accumulation of skin cells in the hair follicle. In healthy skin, the skin cells that have died come up to the surface and exit the pore of the hair follicle. In people with acne, the increased production of oily sebum causes the dead skin cells to stick together. The accumulation of dead skin cell debris and oily sebum blocks the pore of the hair follicle, thus forming the microcomedone. The C. acnes biofilm within the hair follicle worsens this process. If the microcomedone is superficial within the hair follicle, the skin pigment melanin is exposed to air, resulting in its oxidation and dark appearance (known as a blackhead or open comedo). In contrast, if the microcomedone occurs deep within the hair follicle, this causes the formation of a whitehead (known as a closed comedo).The main hormonal driver of oily sebum production in the skin is dihydrotestosterone. Another androgenic hormone responsible for increased sebaceous gland activity is DHEA-S. The adrenal glands secrete higher amounts of DHEA-S during adrenarche (a stage of puberty), and this leads to an increase in sebum production. In a sebum-rich skin environment, the naturally occurring and largely commensal skin bacterium C. acnes readily grows and can cause inflammation within and around the follicle due to activation of the innate immune system. C. acnes triggers skin inflammation in acne by increasing the production of several pro-inflammatory chemical signals (such as IL-1α, IL-8, TNF-α, and LTB4); IL-1α is essential to comedo formation.C. acnes' ability to bind and activate a class of immune system receptors known as toll-like receptors (TLRs), especially TLR2 and TLR4, is a core mechanism of acne-related skin inflammation. Activation of TLR2 and TLR4 by C. acnes leads to increased secretion of IL-1α, IL-8, and TNF-α. The release of these inflammatory signals attracts various immune cells to the hair follicle, including neutrophils, macrophages, and Th1 cells. IL-1α stimulates increased skin cell activity and reproduction, which, in turn, fuels comedo development. Furthermore, sebaceous gland cells produce more antimicrobial peptides, such as HBD1 and HBD2, in response to the binding of TLR2 and TLR4.C. acnes also provokes skin inflammation by altering the fatty composition of oily sebum. Oxidation of the lipid squalene by C. acnes is of particular importance. Squalene oxidation activates NF-κB (a protein complex) and consequently increases IL-1α levels. Additionally, squalene oxidation increases 5-lipoxygenase enzyme activity, which catalyzes the conversion of arachidonic acid to leukotriene B4 (LTB4). LTB4 promotes skin inflammation by acting on the peroxisome proliferator-activated receptor alpha (PPARα) protein. PPARα increases the activity of activator protein 1 (AP-1) and NF-κB, thereby leading to the recruitment of inflammatory T cells. C. acnes' ability to convert sebum triglycerides to pro-inflammatory free fatty acids via secretion of the enzyme lipase further explains its inflammatory properties. These free fatty acids spur increased production of cathelicidin, HBD1, and HBD2, thus leading to further inflammation.This inflammatory cascade typically leads to the formation of inflammatory acne lesions, including papules, infected pustules, or nodules. If the inflammatory reaction is severe, the follicle can break into the deeper layers of the dermis and subcutaneous tissue and cause the formation of deep nodules. The involvement of AP-1 in the aforementioned inflammatory cascade activates matrix metalloproteinases, which contribute to local tissue destruction and scar formation.Along with the bacteria C. acnes, the bacterial species Staphylococcus epidermidis (S. epidermidis) also takes a part in the physiopathology of acne vulgaris. The proliferation of S. epidermidis with C. acnes causes the formation of biofilms, which blocks the hair follicles and pores, creating an anaerobic environment under the skin. This enables for increased growth of both C. acnes and S. epidermidis under the skin. The proliferation of C. acnes causes the formation of biofilms and a biofilm matrix, making it even harder to treat the acne. Diagnosis Acne vulgaris is diagnosed based on a medical professional's clinical judgment. The evaluation of a person with suspected acne should include taking a detailed medical history about a family history of acne, a review of medications taken, signs or symptoms of excessive production of androgen hormones, cortisol, and growth hormone. Comedones (blackheads and whiteheads) must be present to diagnose acne. In their absence, an appearance similar to that of acne would suggest a different skin disorder. Microcomedones (the precursor to blackheads and whiteheads) are not visible to the naked eye when inspecting the skin and require a microscope to be seen. Many features may indicate that a person's acne vulgaris is sensitive to hormonal influences. Historical and physical clues that may suggest hormone-sensitive acne include onset between ages 20 and 30; worsening the week before a woman's period; acne lesions predominantly over the jawline and chin; and inflammatory/nodular acne lesions.Several scales exist to grade the severity of acne vulgaris, but disagreement persists about the ideal one for diagnostic use. Cook's acne grading scale uses photographs to grade severity from 0 to 8, with higher numbers representing more severe acne. This scale was the first to use a standardized photographic protocol to assess acne severity; since its creation in 1979, the scale has undergone several revisions. The Leeds acne grading technique counts acne lesions on the face, back, and chest and categorizes them as inflammatory or non-inflammatory. Leeds scores range from 0 (least severe) to 10 (most severe) though modified scales have a maximum score of 12. The Pillsbury acne grading scale classifies the severity of the acne from grade 1 (least severe) to grade 4 (most severe). Differential diagnosis Many skin conditions can mimic acne vulgaris, and these are collectively known as acneiform eruptions. Such conditions include angiofibromas, epidermal cysts, flat warts, folliculitis, keratosis pilaris, milia, perioral dermatitis, and rosacea, among others. Age is one factor that may help distinguish between these disorders. Skin disorders such as perioral dermatitis and keratosis pilaris can appear similar to acne but tend to occur more frequently in childhood. Rosacea tends to occur more frequently in older adults. Facial redness triggered by heat or the consumption of alcohol or spicy food is also more suggestive of rosacea. The presence of comedones helps health professionals differentiate acne from skin disorders that are similar in appearance. Chloracne and occupational acne due to exposure to certain chemicals & industrial compounds, may look very similar to acne vulgaris. Management Many different treatments exist for acne. These include alpha hydroxy acid, anti-androgen medications, antibiotics, antiseborrheic medications, azelaic acid, benzoyl peroxide, hormonal treatments, keratolytic soaps, nicotinamide, retinoids, and salicylic acid. Acne treatments work in at least four different ways, including the following: reducing inflammation, hormonal manipulation, killing C. acnes, and normalizing skin cell shedding and sebum production in the pore to prevent blockage. Typical treatments include topical therapies such as antibiotics, benzoyl peroxide, and retinoids, and systemic therapies, including antibiotics, hormonal agents, and oral retinoids.Recommended therapies for first-line use in acne vulgaris treatment include topical retinoids, benzoyl peroxide, and topical or oral antibiotics. Procedures such as light therapy and laser therapy are not first-line treatments and typically have only an add on role due to their high cost and limited evidence. Blue light therapy is of unclear benefit. Medications for acne target the early stages of comedo formation and are generally ineffective for visible skin lesions; acne generally improves between eight and twelve weeks after starting therapy.People often view acne as a short-term condition, some expecting it to disappear after puberty. This misconception can lead to depending on self-management or problems with long-term adherence to treatment. Communicating the long-term nature of the condition and better access to reliable information about acne can help people know what to expect from treatments. Skin care In general, it is recommended that people with acne do not wash affected skin more than twice daily. The application of a fragrance-free moisturizer to sensitive and acne-prone skin may reduce irritation. Skin irritation from acne medications typically peaks at two weeks after onset of use and tends to improve with continued use. Dermatologists recommend using cosmetic products that specifically say non-comedogenic, oil-free, and will not clog pores.Acne vulgaris patients, even those with oily skin, should moisturize in order to support the skin's moisture barrier since skin barrier dysfunction may contribute to acne. Moisturizers, especially ceramide-containing moisturizers, as an adjunct therapy are particularly helpful for the dry skin and irritation that commonly results from topical acne treatment. Studies show that ceramide-containing moisturizers are important for optimal skin care; they enhance acne therapy adherence and complement existing acne therapies. In a study where acne patients used 1.2% clindamycin phosphate / 2.5% benzoyl peroxide gel in the morning and applied a micronized 0.05% tretinoin gel in the evening the overwhelming majority of patients experienced no cutaneous adverse events throughout the study. It was concluded that using ceramide cleanser and ceramide moisturizing cream caused the favorable tolerability, did not interfere with the treatment efficacy, and improved adherence to the regimen. The importance of preserving the acidic mantle and its barrier functions is widely accepted in the scientific community. Thus, maintaining a pH in the range 4.5 – 5.5 is essential in order to keep the skin surface in its optimal, healthy conditions. Diet Causal relationship is rarely observed with diet/nutrition and dermatologic conditions. Rather, associations – some of them compelling – have been found between diet and outcomes including disease severity and the number of conditions experienced by a patient. Evidence is emerging in support of medical nutrition therapy as a way of reducing the severity and incidence of dermatologic diseases, including acne. Researchers observed a link between high glycemic index diets and acne. Dermatologists also recommend a diet low in simple sugars as a method of improving acne. As of 2014, the available evidence is insufficient to use milk restriction for this purpose. Medications Benzoyl peroxide Benzoyl peroxide (BPO) is a first-line treatment for mild and moderate acne due to its effectiveness and mild side-effects (mainly skin irritation). In the skin follicle, benzoyl peroxide kills C. acnes by oxidizing its proteins through the formation of oxygen free radicals and benzoic acid. These free radicals likely interfere with the bacterium's metabolism and ability to make proteins. Additionally, benzoyl peroxide is mildly effective at breaking down comedones and inhibiting inflammation. Combination products use benzoyl peroxide with a topical antibiotic or retinoid, such as benzoyl peroxide/clindamycin and benzoyl peroxide/adapalene, respectively. Topical benzoyl peroxide is effective at treating acne.Side effects include increased skin photosensitivity, dryness, redness, and occasional peeling. Sunscreen use is often advised during treatment, to prevent sunburn. Lower concentrations of benzoyl peroxide are just as effective as higher concentrations in treating acne but are associated with fewer side effects. Unlike antibiotics, benzoyl peroxide does not appear to generate bacterial antibiotic resistance. Retinoids Retinoids are medications that reduce inflammation, normalize the follicle cell life cycle, and reduce sebum production. They are structurally related to vitamin A. Studies show dermatologists and primary care doctors underprescribe them for acne. The retinoids appear to influence the cell life cycle in the follicle lining. This helps prevent the accumulation of skin cells within the hair follicle that can create a blockage. They are a first-line acne treatment, especially for people with dark-colored skin. Retinoids are known to lead to faster improvement of postinflammatory hyperpigmentation.Topical retinoids include adapalene, retinol, retinaldehyde, isotretinoin, tazarotene, trifarotene, and tretinoin. They often cause an initial flare-up of acne and facial flushing and can cause significant skin irritation. Generally speaking, retinoids increase the skin's sensitivity to sunlight and are therefore recommended for use at night. Tretinoin is the least expensive of the topical retinoids and is the most irritating to the skin, whereas adapalene is the least irritating but costs significantly more. Most formulations of tretinoin are incompatible for use with benzoyl peroxide. Tazarotene is the most effective and expensive topical retinoid but is usually not as well tolerated. In 2019 a tazarotene lotion formulation, marketed to be a less irritating option, was approved by the FDA. Retinol is a form of vitamin A that has similar but milder effects and is present in many over-the-counter moisturizers and other topical products.Isotretinoin is an oral retinoid that is very effective for severe nodular acne, and moderate acne that is stubborn to other treatments. One to two months of use is typically adequate to see improvement. Acne often resolves completely or is much milder after a 4–6 month course of oral isotretinoin. After a single round of treatment, about 80% of people report an improvement, with more than 50% reporting complete remission. About 20% of people require a second course, but 80% of those report improvement, resulting in a cumulative 96% efficacy rate.There are concerns that isotretinoin is linked to adverse effects, like depression, suicidality, and anemia. There is no clear evidence to support some of these claims. Isotretinoin has been found in some studies to be superior to antibiotics or placebo in reducing acne lesions. However, a 2018 review comparing inflammatory lesions after treatment with antibiotics or isotretinoin found no difference. The frequency of adverse events was about twice as high with isotretinoin use, although these were mostly dryness-related events. No increased risk of suicide or depression was conclusively found.Medical authorities strictly regulate isotretinoin use in women of childbearing age due to its known harmful effects in pregnancy. For such a woman to be considered a candidate for isotretinoin, she must have a confirmed negative pregnancy test and use an effective form of birth control. In 2008, the United States started the iPLEDGE program to prevent isotretinoin use during pregnancy. iPledge requires the woman to have two negative pregnancy tests and to use two types of birth control for at least one month before isotretinoin therapy begins and one month afterward. The effectiveness of the iPledge program is controversial due to continued instances of contraception nonadherence. Antibiotics People may apply antibiotics to the skin or take them orally to treat acne. They work by killing C. acnes and reducing inflammation. Although multiple guidelines call for healthcare providers to reduce the rates of prescribed oral antibiotics, many providers do not follow this guidance. Oral antibiotics remain the most commonly prescribed systemic therapy for acne. Widespread broad-spectrum antibiotic overuse for acne has led to higher rates of antibiotic-resistant C. acnes strains worldwide, especially to the commonly used tetracycline (e.g., doxycycline) and macrolide antibiotics (e.g., topical erythromycin). Therefore, dermatologists prefer antibiotics as part of combination therapy and not for use alone.Commonly used antibiotics, either applied to the skin or taken orally, include clindamycin, erythromycin, metronidazole, sulfacetamide, and tetracyclines (e.g., doxycycline or minocycline). Doxycycline 40 milligrams daily (low-dose) appears to have similar efficacy to 100 milligrams daily and has fewer gastrointestinal side effects. However, low-dose doxycycline is not FDA-approved for the treatment of acne. Antibiotics applied to the skin are typically used for mild to moderately severe acne. Oral antibiotics are generally more effective than topical antibiotics and produce faster resolution of inflammatory acne lesions than topical applications. The Global Alliance to Improve Outcomes in Acne recommends that topical and oral antibiotics are not used together.Oral antibiotics are recommended for no longer than three months as antibiotic courses exceeding this duration are associated with the development of antibiotic resistance and show no clear benefit over shorter durations. If long-term oral antibiotics beyond three months are used, then it is recommended that benzoyl peroxide or a retinoid be used at the same time to limit the risk of C. acnes developing antibiotic resistance.The antibiotic dapsone is effective against inflammatory acne when applied to the skin. It is generally not a first-line choice due to its higher cost and a lack of clear superiority over other antibiotics. Topical dapsone is sometimes a preferred therapy in women or for people with sensitive or darker-toned skin. It is not recommended for use with benzoyl peroxide due to the risk of causing yellow-orange skin discoloration with this combination. Minocycline is an effective acne treatment, but it is not a first-line antibiotic due to a lack of evidence that it is better than other treatments, and concerns about its safety compared to other tetracyclines.Sarecycline is the most recent oral antibiotic developed specifically for the treatment of acne, and is FDA-approved for the treatment of moderate to severe inflammatory acne in patients nine years of age and older. It is a narrow-spectrum tetracycline antibiotic that exhibits the necessary antibacterial activity against pathogens related to acne vulgaris and a low propensity for inducing antibiotic resistance. In clinical trials, sarecycline demonstrated clinical efficacy in reducing inflammatory acne lesions as early as three weeks and reduced truncal (back and chest) acne. Hormonal agents In women, the use of combined birth control pills can improve acne. These medications contain an estrogen and a progestin. They work by decreasing the production of androgen hormones by the ovaries and by decreasing the free and hence biologically active fractions of androgens, resulting in lowered skin production of sebum and consequently reduce acne severity. First-generation progestins such as norethindrone and norgestrel have androgenic properties and may worsen acne. Although oral estrogens decrease IGF-1 levels in some situations, which could theoretically improve acne symptoms, combined birth control pills do not appear to affect IGF-1 levels in fertile women. Cyproterone acetate-containing birth control pills seem to decrease total and free IGF-1 levels. Combinations containing third- or fourth-generation progestins, including desogestrel, dienogest, drospirenone, or norgestimate, as well as birth control pills containing cyproterone acetate or chlormadinone acetate, are preferred for women with acne due to their stronger antiandrogenic effects. Studies have shown a 40 to 70% reduction in acne lesions with combined birth control pills. A 2014 review found that oral antibiotics appear to be somewhat more effective than birth control pills at reducing the number of inflammatory acne lesions at three months. However, the two therapies are approximately equal in efficacy at six months for decreasing the number of inflammatory, non-inflammatory, and total acne lesions. The authors of the analysis suggested that birth control pills may be a preferred first-line acne treatment, over oral antibiotics, in certain women due to similar efficacy at six months and a lack of associated antibiotic resistance. In contrast to combined birth control pills, progestogen-only birth control forms that contain androgenic progestins have been associated with worsened acne.Antiandrogens such as cyproterone acetate and spironolactone can successfully treat acne, especially in women with signs of excessive androgen production, such as increased hairiness or skin production of sebum, or scalp hair loss. Spironolactone is an effective treatment for acne in adult women. Unlike combined birth control pills, it is not approved by the United States Food and Drug Administration for this purpose. Spironolactone is an aldosterone antagonist and is a useful acne treatment due to its ability to additionally block the androgen receptor at higher doses. Alone or in combination with a birth control pill, spironolactone has shown a 33 to 85% reduction in acne lesions in women. The effectiveness of spironolactone for acne appears to be dose-dependent. High-dose cyproterone acetate alone reportedly decreases acne symptoms in women by 75 to 90% within three months. It is usually combined with an estrogen to avoid menstrual irregularities and estrogen deficiency. The medication appears to be effective in the treatment of acne in males, with one study finding that a high dosage reduced inflammatory acne lesions by 73%. However, spironolactone and cyproterone acetate's side effects in males, such as gynecomastia, sexual dysfunction, and decreased bone mineral density, generally make their use for male acne impractical.Pregnant and lactating women should not receive antiandrogens for their acne due to a possibility of birth disorders such as hypospadias and feminization of male babies. Women who are sexually active and who can or may become pregnant should use an effective method of contraception to prevent pregnancy while taking an antiandrogen. Antiandrogens are often combined with birth control pills for this reason, which can result in additive efficacy. The FDA added a black-box warning to spironolactone about possible tumor risks based on preclinical research with very high doses (>100-fold clinical doses) and cautioned that unnecessary use of the medication should be avoided. However, several large epidemiological studies subsequently found no greater risk of tumors in association with spironolactone in humans. Conversely, strong associations of cyproterone acetate with certain brain tumors have been discovered and its use has been restricted. The brain tumor risk with cyproterone acetate is due to its strong progestogenic actions and is not related to antiandrogenic activity nor shared by other antiandrogens.Flutamide, a pure antagonist of the androgen receptor, is effective in treating acne in women. It appears to reduce acne symptoms by 80 to 90% even at low doses, with several studies showing complete acne clearance. In one study, flutamide decreased acne scores by 80% within three months, whereas spironolactone decreased symptoms by only 40% in the same period. In a large long-term study, 97% of women reported satisfaction with the control of their acne with flutamide. Although effective, flutamide has a risk of serious liver toxicity, and cases of death in women taking even low doses of the medication to treat androgen-dependent skin and hair conditions have occurred. As such, the use of flutamide for acne has become increasingly limited, and it has been argued that continued use of flutamide for such purposes is unethical. Bicalutamide, a pure androgen receptor antagonist with the same mechanism as flutamide and with comparable or superior antiandrogenic efficacy but with a far lower risk of liver toxicity, is an alternative option to flutamide in the treatment of androgen-dependent skin and hair conditions in women.Clascoterone is a topical antiandrogen that has demonstrated effectiveness in the treatment of acne in both males and females and was approved for clinical use for this indication in August 2020. It has shown no systemic absorption or associated antiandrogenic side effects. In a small direct head-to-head comparison, clascoterone showed greater effectiveness than topical isotretinoin. 5α-Reductase inhibitors such as finasteride and dutasteride may be useful for the treatment of acne in both males and females but have not been adequately evaluated for this purpose. Moreover, 5α-reductase inhibitors have a strong potential for producing birth defects in male babies and this limits their use in women. However, 5α-reductase inhibitors are frequently used to treat excessive facial/body hair in women and can be combined with birth control pills to prevent pregnancy. There is no evidence as of 2010 to support the use of cimetidine or ketoconazole in the treatment of acne.Hormonal treatments for acne such as combined birth control pills and antiandrogens may be considered first-line therapy for acne under many circumstances, including desired contraception, known or suspected hyperandrogenism, acne during adulthood, acne that flares premenstrually, and when symptoms of significant sebum production (seborrhea) are co-present. Hormone therapy is effective for acne both in women with hyperandrogenism and in women with normal androgen levels. Azelaic acid Azelaic acid is effective for mild to moderate acne when applied topically at a 15–20% concentration. Treatment twice daily for six months is necessary, and is as effective as topical benzoyl peroxide 5%, isotretinoin 0.05%, and erythromycin 2%. Azelaic acid is an effective acne treatment due to its ability to reduce skin cell accumulation in the follicle and its antibacterial and anti-inflammatory properties. It has a slight skin-lightening effect due to its ability to inhibit melanin synthesis. Therefore, it is useful in treating individuals with acne who are also affected by post-inflammatory hyperpigmentation. Azelaic acid may cause skin irritation. It is less effective and more expensive than retinoids. Azelaic acid also led to worse treatment response when compared to benzoyl peroxide. When compared to tretinoin, azelaic acid makes little or no treatment response. Salicylic acid Salicylic acid is a topically applied beta-hydroxy acid that stops bacteria from reproducing and has keratolytic properties. It is less effective than retinoid therapy. Salicylic acid opens obstructed skin pores and promotes the shedding of epithelial skin cells. Dry skin is the most commonly seen side effect with topical application, though darkening of the skin can occur in individuals with darker skin types. Other medications Topical and oral preparations of nicotinamide (the amide form of vitamin B3) are alternative medical treatments. Nicotinamide reportedly improves acne due to its anti-inflammatory properties, its ability to suppress sebum production, and its wound healing properties. Topical and oral preparations of zinc are suggested treatments for acne; evidence to support their use for this purpose is limited. Zinc's capacities to reduce inflammation and sebum production as well as inhibit C. acnes growth are its proposed mechanisms for improving acne. Antihistamines may improve symptoms among those already taking isotretinoin due to their anti-inflammatory properties and their ability to suppress sebum production.Hydroquinone lightens the skin when applied topically by inhibiting tyrosinase, the enzyme responsible for converting the amino acid tyrosine to the skin pigment melanin, and is used to treat acne-associated post-inflammatory hyperpigmentation. By interfering with the production of melanin in the epidermis, hydroquinone leads to less hyperpigmentation as darkened skin cells are naturally shed over time. Improvement in skin hyperpigmentation is typically seen within six months when used twice daily. Hydroquinone is ineffective for hyperpigmentation affecting deeper layers of skin such as the dermis. The use of a sunscreen with SPF 15 or higher in the morning with reapplication every two hours is recommended when using hydroquinone. Its application only to affected areas lowers the risk of lightening the color of normal skin but can lead to a temporary ring of lightened skin around the hyperpigmented area. Hydroquinone is generally well-tolerated; side effects are typically mild (e.g., skin irritation) and occur with the use of a higher than the recommended 4% concentration. Most preparations contain the preservative sodium metabisulfite, which has been linked to rare cases of allergic reactions, including anaphylaxis and severe asthma exacerbations in susceptible people. In extremely rare cases, the frequent and improper application of high-dose hydroquinone has been associated with a systemic condition known as exogenous ochronosis (skin discoloration and connective tissue damage from the accumulation of homogentisic acid). Combination therapy Combination therapy—using medications of different classes together, each with a different mechanism of action—has been demonstrated to be a more effective approach to acne treatment than monotherapy. The use of topical benzoyl peroxide and antibiotics together is more effective than antibiotics alone. Similarly, using a topical retinoid with an antibiotic clears acne lesions faster than the use of antibiotics alone. Frequently used combinations include the following: antibiotic and benzoyl peroxide, antibiotic and topical retinoid, or topical retinoid and benzoyl peroxide. Dermatologists generally prefer combining benzoyl peroxide with a retinoid over the combination of a topical antibiotic with a retinoid. Both regimens are effective, but benzoyl peroxide does not lead to antibiotic resistance. Pregnancy Although sebaceous gland activity in the skin increases during the late stages of pregnancy, pregnancy has not been reliably associated with worsened acne severity. In general, topically applied medications are considered the first-line approach to acne treatment during pregnancy, as they have little systemic absorption and are therefore unlikely to harm a developing fetus. Highly recommended therapies include topically applied benzoyl peroxide (pregnancy category C) and azelaic acid (category B). Salicylic acid carries a category C safety rating due to higher systemic absorption (9–25%), and an association between the use of anti-inflammatory medications in the third trimester and adverse effects to the developing fetus including too little amniotic fluid in the uterus and early closure of the babies' ductus arteriosus blood vessel. Prolonged use of salicylic acid over significant areas of the skin or under occlusive (sealed) dressings is not recommended as these methods increase systemic absorption and the potential for fetal harm. Tretinoin (category C) and adapalene (category C) are very poorly absorbed, but certain studies have suggested teratogenic effects in the first trimester. The data examining the association between maternal topical retinoid exposure in the first trimester of pregnancy and adverse pregnancy outcomes is limited. A systematic review of observational studies concluded that such exposure does not appear to increase the risk of major birth defects, miscarriages, stillbirths, premature births, or low birth weight. Similarly, in studies examining the effects of topical retinoids during pregnancy, fetal harm has not been seen in the second and third trimesters. Nevertheless, since rare harms from topical retinoids are not ruled out, they are not recommended for use during pregnancy due to persistent safety concerns. Retinoids contraindicated for use during pregnancy include the topical retinoid tazarotene, and oral retinoids isotretinoin and acitretin (all category X). Spironolactone is relatively contraindicated for use during pregnancy due to its antiandrogen effects. Finasteride is not recommended as it is highly teratogenic.Topical antibiotics deemed safe during pregnancy include clindamycin, erythromycin, and metronidazole (all category B), due to negligible systemic absorption. Nadifloxacin and dapsone (category C) are other topical antibiotics that may be used to treat acne in pregnant women but have received less study. No adverse fetal events have been reported from the topical use of dapsone. If retinoids are used there is a high risk of abnormalities occurring in the developing fetus; women of childbearing age are therefore required to use effective birth control if retinoids are used to treat acne. Oral antibiotics deemed safe for pregnancy (all category B) include azithromycin, cephalosporins, and penicillins. Tetracyclines (category D) are contraindicated during pregnancy as they are known to deposit in developing fetal teeth, resulting in yellow discoloration and thinned tooth enamel. Their use during pregnancy has been associated with the development of acute fatty liver of pregnancy and is further avoided for this reason. Procedures Limited evidence supports comedo extraction, but it is an option for comedones that do not improve with standard treatment. Another procedure for immediate relief is the injection of a corticosteroid into an inflamed acne comedo. Electrocautery and electrofulguration are effective alternative treatments for comedones.Light therapy is a treatment method that involves delivering certain specific wavelengths of light to an area of skin affected by acne. Both regular and laser light have been used. The evidence for light therapy as a treatment for acne is weak and inconclusive. Various light therapies appear to provide a short-term benefit, but data for long-term outcomes, and outcomes in those with severe acne, are sparse; it may have a role for individuals whose acne has been resistant to topical medications. A 2016 meta-analysis was unable to conclude whether light therapies were more beneficial than placebo or no treatment, nor the duration of benefit.When regular light is used immediately following the application of a sensitizing substance to the skin such as aminolevulinic acid or methyl aminolevulinate, the treatment is referred to as photodynamic therapy (PDT). PDT has the most supporting evidence of all light therapy modalities. PDT treats acne by using various forms of light (e.g., blue light or red light) that preferentially target the pilosebaceous unit. Once the light activates the sensitizing substance, this generates free radicals and reactive oxygen species in the skin, which purposefully damage the sebaceous glands and kill C. acnes bacteria. Many different types of nonablative lasers (i.e., lasers that do not vaporize the top layer of the skin but rather induce a physiologic response in the skin from the light) have been used to treat acne, including those that use infrared wavelengths of light. Ablative lasers (such as CO2 and fractional types) have also been used to treat active acne and its scars. When ablative lasers are used, the treatment is often referred to as laser resurfacing because, as mentioned previously, the entire upper layers of the skin are vaporized. Ablative lasers are associated with higher rates of adverse effects compared with non-ablative lasers, with examples being post-inflammatory hyperpigmentation, persistent facial redness, and persistent pain. Physiologically, certain wavelengths of light, used with or without accompanying topical chemicals, are thought to kill bacteria and decrease the size and activity of the glands that produce sebum. Disadvantages of light therapy can include its cost, the need for multiple visits, the time required to complete the procedure(s), and pain associated with some of the treatment modalities. Typical side effects include skin peeling, temporary reddening of the skin, swelling, and post-inflammatory hyperpigmentation.Dermabrasion is an effective therapeutic procedure for reducing the appearance of superficial atrophic scars of the boxcar and rolling varieties. Ice-pick scars do not respond well to treatment with dermabrasion due to their depth. The procedure is painful and has many potential side effects such as skin sensitivity to sunlight, redness, and decreased pigmentation of the skin. Dermabrasion has fallen out of favor with the introduction of laser resurfacing. Unlike dermabrasion, there is no evidence that microdermabrasion is an effective treatment for acne.Dermal or subcutaneous fillers are substances injected into the skin to improve the appearance of acne scars. Fillers are used to increase natural collagen production in the skin and to increase skin volume and decrease the depth of acne scars. Examples of fillers used for this purpose include hyaluronic acid; poly(methyl methacrylate) microspheres with collagen; human and bovine collagen derivatives, and fat harvested from the person's own body (autologous fat transfer).Microneedling is a procedure in which an instrument with multiple rows of tiny needles is rolled over the skin to elicit a wound healing response and stimulate collagen production to reduce the appearance of atrophic acne scars in people with darker skin color. Notable adverse effects of microneedling include post-inflammatory hyperpigmentation and tram track scarring (described as discrete slightly raised scars in a linear distribution similar to a tram track). The latter is thought to be primarily attributable to improper technique by the practitioner, including the use of excessive pressure or inappropriately large needles.Subcision is useful for the treatment of superficial atrophic acne scars and involves the use of a small needle to loosen the fibrotic adhesions that result in the depressed appearance of the scar.Chemical peels can be used to reduce the appearance of acne scars. Mild peels include those using glycolic acid, lactic acid, salicylic acid, Jessner's solution, or a lower concentration (20%) of trichloroacetic acid. These peels only affect the epidermal layer of the skin and can be useful in the treatment of superficial acne scars as well as skin pigmentation changes from inflammatory acne. Higher concentrations of trichloroacetic acid (30–40%) are considered to be medium-strength peels and affect the skin as deep as the papillary dermis. Formulations of trichloroacetic acid concentrated to 50% or more are considered to be deep chemical peels. Medium-strength and deep-strength chemical peels are more effective for deeper atrophic scars but are more likely to cause side effects such as skin pigmentation changes, infection, and small white superficial cysts known as milia. Alternative medicine Researchers are investigating complementary therapies as treatment for people with acne. Low-quality evidence suggests topical application of tea tree oil or bee venom may reduce the total number of skin lesions in those with acne. Tea tree oil appears to be approximately as effective as benzoyl peroxide or salicylic acid but is associated with allergic contact dermatitis. Proposed mechanisms for tea tree oil's anti-acne effects include antibacterial action against C. acnes and anti-inflammatory properties. Numerous other plant-derived therapies have demonstrated positive effects against acne (e.g., basil oil; oligosaccharides from seaweed; however, few well-done studies have examined their use for this purpose. There is a lack of high-quality evidence for the use of acupuncture, herbal medicine, or cupping therapy for acne. Self-care Many over-the-counter treatments in many forms are available, which are often known as cosmeceuticals. Certain types of makeup may be useful to mask acne. In those with oily skin, a water-based product is often preferred. Prognosis Acne usually improves around the age of 20 but may persist into adulthood. Permanent physical scarring may occur. Rare complications from acne or its treatment include the formation of pyogenic granulomas, osteoma cutis, and acne with facial edema. Early and aggressive treatment of acne is advocated by some in the medical community to reduce the chances of these poor outcomes. Mental health impact There is good evidence to support the idea that acne and associated scarring negatively affect a person's psychological state, worsen mood, lower self-esteem, and are associated with a higher risk of anxiety disorders, depression, and suicidal thoughts.Misperceptions about acne's causative and aggravating factors are common, and people with acne often blame themselves, and others often blame those with acne for their condition. Such blame can worsen the affected person's sense of self-esteem. Until the 20th century, even among dermatologists, the list of causes was believed to include excessive sexual thoughts and masturbation. Dermatology's association with sexually transmitted infections, especially syphilis, contributed to the stigma.Another psychological complication of acne vulgaris is acne excoriée, which occurs when a person persistently picks and scratches pimples, irrespective of the severity of their acne. This can lead to significant scarring, changes in the affected person's skin pigmentation, and a cyclic worsening of the affected person's anxiety about their appearance. Epidemiology Globally, acne affects approximately 650 million people, or about 9.4% of the population, as of 2010. It affects nearly 90% of people in Western societies during their teenage years, but can occur before adolescence and may persist into adulthood. While acne that first develops between the ages of 21 and 25 is uncommon, it affects 54% of women and 40% of men older than 25 years of age and has a lifetime prevalence of 85%. About 20% of those affected have moderate or severe cases. It is slightly more common in females than males (9.8% versus 9.0%). In those over 40 years old, 1% of males and 5% of females still have problems.Rates appear to be lower in rural societies. While some research has found it affects people of all ethnic groups, acne may not occur in the non-Westernized peoples of Papua New Guinea and Paraguay.Acne affects 40–50 million people in the United States (16%) and approximately 3–5 million in Australia (23%). Severe acne tends to be more common in people of Caucasian or Amerindian descent than in people of African descent. History Historical records indicate Pharaohs had acne, which may be the earliest known reference to the disease. Sulfur's usefulness as a topical remedy for acne dates back to at least the reign of Cleopatra (69–30 BCE). The sixth-century Greek physician Aëtius of Amida reportedly coined the term "ionthos" (ίονθωξ,) or "acnae", which seems to be a reference to facial skin lesions that occur during "the 'acme' of life" (puberty).In the 16th century, the French physician and botanist François Boissier de Sauvages de Lacroix provided one of the earlier descriptions of acne. He used the term "psydracia achne" to describe small, red, and hard tubercles that altered a person's facial appearance during adolescence and were neither itchy nor painful.The recognition and characterization of acne progressed in 1776 when Josef Plenck (an Austrian physician) published a book that proposed the novel concept of classifying skin diseases by their elementary (initial) lesions. In 1808 the English dermatologist Robert Willan refined Plenck's work by providing the first detailed descriptions of several skin disorders using morphologic terminology that remains in use today. Thomas Bateman continued and expanded on Robert Willan's work as his student and provided the first descriptions and illustrations of acne accepted as accurate by modern dermatologists. Erasmus Wilson, in 1842, was the first to make the distinction between acne vulgaris and rosacea. The first professional medical monograph dedicated entirely to acne was written by Lucius Duncan Bulkley and published in New York in 1885.Scientists initially hypothesized that acne represented a disease of the skin's hair follicle, and occurred due to blockage of the pore by sebum. During the 1880s, they observed bacteria by microscopy in skin samples from people with acne. Investigators believed the bacteria caused comedones, sebum production, and ultimately acne. During the mid-twentieth century, dermatologists realized that no single hypothesized factor (sebum, bacteria, or excess keratin) fully accounted for the disease in its entirety. This led to the current understanding that acne could be explained by a sequence of related events, beginning with blockage of the skin follicle by excessive dead skin cells, followed by bacterial invasion of the hair follicle pore, changes in sebum production, and inflammation.The approach to acne treatment underwent significant changes during the twentieth century. Retinoids became a medical treatment for acne in 1943. Benzoyl peroxide was first proposed as a treatment in 1958 and remains a staple of acne treatment. The introduction of oral tetracycline antibiotics (such as minocycline) modified acne treatment in the 1950s. These reinforced the idea amongst dermatologists that bacterial growth on the skin plays an important role in causing acne. Subsequently, in the 1970s, tretinoin (original trade name Retin A) was found to be an effective treatment. The development of oral isotretinoin (sold as Accutane and Roaccutane) followed in 1980. After its introduction in the United States, scientists identified isotretinoin as a medication highly likely to cause birth defects if taken during pregnancy. In the United States, more than 2,000 women became pregnant while taking isotretinoin between 1982 and 2003, with most pregnancies ending in abortion or miscarriage. Approximately 160 babies were born with birth defects due to maternal use of isotretinoin during pregnancy.Treatment of acne with topical crushed dry ice, known as cryoslush, was first described in 1907 but is no longer performed commonly. Before 1960, the use of X-rays was also a common treatment. Society and culture The costs and social impact of acne are substantial. In the United States, acne vulgaris is responsible for more than 5 million doctor visits and costs over US$2.5 billion each year in direct costs. Similarly, acne vulgaris is responsible for 3.5 million doctor visits each year in the United Kingdom. Sales for the top ten leading acne treatment brands in the US in 2015 amounted to $352 million.Acne vulgaris and its resultant scars are associated with significant social and academic difficulties that can last into adulthood. During the Great Depression, dermatologists discovered that young men with acne had difficulty obtaining jobs. Until the 1930s, many people viewed acne as a trivial problem among middle-class girls because, unlike smallpox and tuberculosis, no one died from it, and a feminine problem, because boys were much less likely to seek medical assistance for it. During World War II, some soldiers in tropical climates developed such severe and widespread tropical acne on their bodies that they were declared medically unfit for duty. Research Efforts to better understand the mechanisms of sebum production are underway. This research aims to develop medications that target and interfere with the hormones that are known to increase sebum production (e.g., IGF-1 and alpha-melanocyte-stimulating hormone). Other sebum-lowering medications such as topical antiandrogens, peroxisome proliferator-activated receptor modulators, and inhibitors of the stearoyl-CoA desaturase-1 enzyme are also a focus of research efforts. Particles that release nitric oxide into the skin to decrease skin inflammation caused by C. acnes and the immune system have shown promise for improving acne in early clinical trials. Another avenue of early-stage research has focused on how to best use laser and light therapy to selectively destroy sebum-producing glands in the skin's hair follicles to reduce sebum production and improve acne appearance.The use of antimicrobial peptides against C. acnes is under investigation as a treatment for acne to overcoming antibiotic resistance. In 2007, scientists reported the first genome sequencing of a C. acnes bacteriophage (PA6). The authors proposed applying this research toward the development of bacteriophage therapy as an acne treatment to overcome the problems associated with long-term antibiotic use, such as bacterial resistance. Oral and topical probiotics are under evaluation as treatments for acne. Probiotics may have therapeutic effects for those affected by acne due to their ability to decrease skin inflammation and improve skin moisture by increasing the skin's ceramide content. As of 2014, knowledge of the effects of probiotics on acne in humans was limited.Decreased levels of retinoic acid in the skin may contribute to comedo formation. Researchers are investigating methods to increase the skin's production of retinoic acid to address this deficiency. A vaccine against inflammatory acne has shown promising results in mice and humans. Some have voiced concerns about creating a vaccine designed to neutralize a stable community of normal skin bacteria that is known to protect the skin from colonization by more harmful microorganisms. Other animals Acne can occur on cats, dogs, and horses. References Further reading External links Acne Support. Expert, impartial advice on acne by the British Association of Dermatologists (BAD). \ No newline at end of file diff --git a/HybridRAG/tests/data/Diabetes.txt b/HybridRAG/tests/data/Diabetes.txt new file mode 100644 index 0000000000..a4dac012bd --- /dev/null +++ b/HybridRAG/tests/data/Diabetes.txt @@ -0,0 +1 @@ +Diabetes mellitus, often known simply as diabetes, is a group of common endocrine diseases characterized by sustained high blood sugar levels. Diabetes is due to either the pancreas not producing enough insulin, or the cells of the body becoming unresponsive to the hormone's effects. Classic symptoms include thirst, polyuria, weight loss, and blurred vision. If left untreated, the disease can lead to various health complications, including disorders of the cardiovascular system, eye, kidney, and nerves. Diabetes accounts for approximately 4.2 million deaths every year, with an estimated 1.5 million caused by either untreated or poorly treated diabetes.The major types of diabetes are type 1 and type 2. The most common treatment for type 1 is insulin replacement therapy (insulin injections), while anti-diabetic medications (such as metformin and semaglutide) and lifestyle modifications can be used to manage type 2. Gestational diabetes, a form that arises during pregnancy in some women, normally resolves shortly after delivery.As of 2021, an estimated 537 million people had diabetes worldwide accounting for 10.5% of the adult population, with type 2 making up about 90% of all cases. The World Health Organization has reported that diabetes was "among the top 10 causes of death in 2021, following a significant percentage increase of 95% since 2000." It is estimated that by 2045, approximately 783 million adults, or 1 in 8, will be living with diabetes, representing a 46% increase from the current figures. The prevalence of the disease continues to increase, most dramatically in low- and middle-income nations. Rates are similar in women and men, with diabetes being the seventh leading cause of death globally. The global expenditure on diabetes-related healthcare is an estimated US$760 billion a year. Signs and symptoms The classic symptoms of untreated diabetes are polyuria, thirst, and weight loss. Several other non-specific signs and symptoms may also occur, including fatigue, blurred vision, sweet smelling urine/semen and genital itchiness due to Candida infection. About half of affected individuals may also be asymptomatic. Type 1 presents abruptly following a pre-clinical phase, while type 2 has a more insidious onset; patients may remain asymptomatic for many years.Diabetic ketoacidosis is a medical emergency that occurs most commonly in type 1, but may also occur in type 2 if it has been longstanding or if the individual has significant β-cell dysfunction. Excessive production of ketone bodies leads to signs and symptoms including nausea, vomiting, abdominal pain, the smell of acetone in the breath, deep breathing known as Kussmaul breathing, and in severe cases decreased level of consciousness. Hyperosmolar hyperglycemic state is another emergency characterized by dehydration secondary to severe hyperglycemia, with resultant hypernatremia leading to an altered mental state and possibly coma.Hypoglycemia is a recognized complication of insulin treatment used in diabetes. An acute presentation can include mild symptoms such as sweating, trembling, and palpitations, to more serious effects including impaired cognition, confusion, seizures, coma, and rarely death. Recurrent hypoglycemic episodes may lower the glycemic threshold at which symptoms occur, meaning mild symptoms may not appear before cognitive deterioration begins to occur. Long-term complications The major long-term complications of diabetes relate to damage to blood vessels at both macrovascular and microvascular levels. Diabetes doubles the risk of cardiovascular disease, and about 75% of deaths in people with diabetes are due to coronary artery disease. Other macrovascular morbidities include stroke and peripheral artery disease.Microvascular disease affects the eyes, kidneys, and nerves. Damage to the retina, known as diabetic retinopathy, is the most common cause of blindness in people of working age. The eyes can also be affected in other ways, including development of cataract and glaucoma. It is recommended that people with diabetes visit an optometrist or ophthalmologist once a year.Diabetic nephropathy is a major cause of chronic kidney disease, accounting for over 50% of patients on dialysis in the United States. Diabetic neuropathy, damage to nerves, manifests in various ways, including sensory loss, neuropathic pain, and autonomic dysfunction (such as postural hypotension, diarrhoea, and erectile dysfunction). Loss of pain sensation predisposes to trauma that can lead to diabetic foot problems (such as ulceration), the most common cause of non-traumatic lower-limb amputation.Hearing loss is another long-term complication associated with diabetes.Based on extensive data and numerous cases of gallstone disease, it appears that a causal link might exist between type 2 diabetes and gallstones. People with diabetes are at a higher risk of developing gallstones compared to those without diabetes.There is a link between cognitive deficit and diabetes; studies have shown that diabetic individuals are at a greater risk of cognitive decline, and have a greater rate of decline compared to those without the disease. The condition also predisposes to falls in the elderly, especially those treated with insulin. Causes Diabetes is classified by the World Health Organization into six categories: type 1 diabetes, type 2 diabetes, hybrid forms of diabetes (including slowly evolving, immune-mediated diabetes of adults and ketosis-prone type 2 diabetes), hyperglycemia first detected during pregnancy, "other specific types", and "unclassified diabetes". Diabetes is a more variable disease than once thought, and individuals may have a combination of forms. Type 1 Type 1 accounts for 5 to 10% of diabetes cases and is the most common type diagnosed in patients under 20 years; however, the older term "juvenile-onset diabetes" is no longer used as onset in adulthood is not unusual. The disease is characterized by loss of the insulin-producing beta cells of the pancreatic islets, leading to severe insulin deficiency, and can be further classified as immune-mediated or idiopathic (without known cause). The majority of cases are immune-mediated, in which a T cell-mediated autoimmune attack causes loss of beta cells and thus insulin deficiency. Patients often have irregular and unpredictable blood sugar levels due to very low insulin and an impaired counter-response to hypoglycemia. Type 1 diabetes is partly inherited, with multiple genes, including certain HLA genotypes, known to influence the risk of diabetes. In genetically susceptible people, the onset of diabetes can be triggered by one or more environmental factors, such as a viral infection or diet. Several viruses have been implicated, but to date there is no stringent evidence to support this hypothesis in humans.Type 1 diabetes can occur at any age, and a significant proportion is diagnosed during adulthood. Latent autoimmune diabetes of adults (LADA) is the diagnostic term applied when type 1 diabetes develops in adults; it has a slower onset than the same condition in children. Given this difference, some use the unofficial term "type 1.5 diabetes" for this condition. Adults with LADA are frequently initially misdiagnosed as having type 2 diabetes, based on age rather than a cause. LADA leaves adults with higher levels of insulin production than type 1 diabetes, but not enough insulin production for healthy blood sugar levels. Type 2 Type 2 diabetes is characterized by insulin resistance, which may be combined with relatively reduced insulin secretion. The defective responsiveness of body tissues to insulin is believed to involve the insulin receptor. However, the specific defects are not known. Diabetes mellitus cases due to a known defect are classified separately. Type 2 diabetes is the most common type of diabetes mellitus accounting for 95% of diabetes. Many people with type 2 diabetes have evidence of prediabetes (impaired fasting glucose and/or impaired glucose tolerance) before meeting the criteria for type 2 diabetes. The progression of prediabetes to overt type 2 diabetes can be slowed or reversed by lifestyle changes or medications that improve insulin sensitivity or reduce the liver's glucose production.Type 2 diabetes is primarily due to lifestyle factors and genetics. A number of lifestyle factors are known to be important to the development of type 2 diabetes, including obesity (defined by a body mass index of greater than 30), lack of physical activity, poor diet, stress, and urbanization. Excess body fat is associated with 30% of cases in people of Chinese and Japanese descent, 60–80% of cases in those of European and African descent, and 100% of Pima Indians and Pacific Islanders. Even those who are not obese may have a high waist–hip ratio.Dietary factors such as sugar-sweetened drinks are associated with an increased risk. The type of fats in the diet is also important, with saturated fat and trans fats increasing the risk and polyunsaturated and monounsaturated fat decreasing the risk. Eating white rice excessively may increase the risk of diabetes, especially in Chinese and Japanese people. Lack of physical activity may increase the risk of diabetes in some people.Adverse childhood experiences, including abuse, neglect, and household difficulties, increase the likelihood of type 2 diabetes later in life by 32%, with neglect having the strongest effect.Antipsychotic medication side effects (specifically metabolic abnormalities, dyslipidemia and weight gain) are also potential risk factors. Gestational diabetes Gestational diabetes resembles type 2 diabetes in several respects, involving a combination of relatively inadequate insulin secretion and responsiveness. It occurs in about 2–10% of all pregnancies and may improve or disappear after delivery. It is recommended that all pregnant women get tested starting around 24–28 weeks gestation. It is most often diagnosed in the second or third trimester because of the increase in insulin-antagonist hormone levels that occurs at this time. However, after pregnancy approximately 5–10% of women with gestational diabetes are found to have another form of diabetes, most commonly type 2. Gestational diabetes is fully treatable, but requires careful medical supervision throughout the pregnancy. Management may include dietary changes, blood glucose monitoring, and in some cases, insulin may be required.Though it may be transient, untreated gestational diabetes can damage the health of the fetus or mother. Risks to the baby include macrosomia (high birth weight), congenital heart and central nervous system abnormalities, and skeletal muscle malformations. Increased levels of insulin in a fetus's blood may inhibit fetal surfactant production and cause infant respiratory distress syndrome. A high blood bilirubin level may result from red blood cell destruction. In severe cases, perinatal death may occur, most commonly as a result of poor placental perfusion due to vascular impairment. Labor induction may be indicated with decreased placental function. A caesarean section may be performed if there is marked fetal distress or an increased risk of injury associated with macrosomia, such as shoulder dystocia. Other types Maturity onset diabetes of the young (MODY) is a rare autosomal dominant inherited form of diabetes, due to one of several single-gene mutations causing defects in insulin production. It is significantly less common than the three main types, constituting 1–2% of all cases. The name of this disease refers to early hypotheses as to its nature. Being due to a defective gene, this disease varies in age at presentation and in severity according to the specific gene defect; thus, there are at least 13 subtypes of MODY. People with MODY often can control it without using insulin.Some cases of diabetes are caused by the body's tissue receptors not responding to insulin (even when insulin levels are normal, which is what separates it from type 2 diabetes); this form is very uncommon. Genetic mutations (autosomal or mitochondrial) can lead to defects in beta cell function. Abnormal insulin action may also have been genetically determined in some cases. Any disease that causes extensive damage to the pancreas may lead to diabetes (for example, chronic pancreatitis and cystic fibrosis). Diseases associated with excessive secretion of insulin-antagonistic hormones can cause diabetes (which is typically resolved once the hormone excess is removed). Many drugs impair insulin secretion and some toxins damage pancreatic beta cells, whereas others increase insulin resistance (especially glucocorticoids which can provoke "steroid diabetes"). The ICD-10 (1992) diagnostic entity, malnutrition-related diabetes mellitus (ICD-10 code E12), was deprecated by the World Health Organization (WHO) when the current taxonomy was introduced in 1999. Yet another form of diabetes that people may develop is double diabetes. This is when a type 1 diabetic becomes insulin resistant, the hallmark for type 2 diabetes or has a family history for type 2 diabetes. It was first discovered in 1990 or 1991.The following is a list of disorders that may increase the risk of diabetes: Pathophysiology Insulin is the principal hormone that regulates the uptake of glucose from the blood into most cells of the body, especially liver, adipose tissue and muscle, except smooth muscle, in which insulin acts via the IGF-1. Therefore, deficiency of insulin or the insensitivity of its receptors play a central role in all forms of diabetes mellitus.The body obtains glucose from three main sources: the intestinal absorption of food; the breakdown of glycogen (glycogenolysis), the storage form of glucose found in the liver; and gluconeogenesis, the generation of glucose from non-carbohydrate substrates in the body. Insulin plays a critical role in regulating glucose levels in the body. Insulin can inhibit the breakdown of glycogen or the process of gluconeogenesis, it can stimulate the transport of glucose into fat and muscle cells, and it can stimulate the storage of glucose in the form of glycogen.Insulin is released into the blood by beta cells (β-cells), found in the islets of Langerhans in the pancreas, in response to rising levels of blood glucose, typically after eating. Insulin is used by about two-thirds of the body's cells to absorb glucose from the blood for use as fuel, for conversion to other needed molecules, or for storage. Lower glucose levels result in decreased insulin release from the beta cells and in the breakdown of glycogen to glucose. This process is mainly controlled by the hormone glucagon, which acts in the opposite manner to insulin.If the amount of insulin available is insufficient, or if cells respond poorly to the effects of insulin (insulin resistance), or if the insulin itself is defective, then glucose is not absorbed properly by the body cells that require it, and is not stored appropriately in the liver and muscles. The net effect is persistently high levels of blood glucose, poor protein synthesis, and other metabolic derangements, such as metabolic acidosis in cases of complete insulin deficiency.When there is too much glucose in the blood for a long time, the kidneys cannot absorb it all (reach a threshold of reabsorption) and the extra glucose gets passed out of the body through urine (glycosuria). This increases the osmotic pressure of the urine and inhibits reabsorption of water by the kidney, resulting in increased urine production (polyuria) and increased fluid loss. Lost blood volume is replaced osmotically from water in body cells and other body compartments, causing dehydration and increased thirst (polydipsia). In addition, intracellular glucose deficiency stimulates appetite leading to excessive food intake (polyphagia). Diagnosis Diabetes mellitus is diagnosed with a test for the glucose content in the blood, and is diagnosed by demonstrating any one of the following:Fasting plasma glucose level ≥ 7.0 mmol/L (126 mg/dL). For this test, blood is taken after a period of fasting, i.e. in the morning before breakfast, after the patient had sufficient time to fast overnight or at least 8 hours before the test.Plasma glucose ≥ 11.1 mmol/L (200 mg/dL) two hours after a 75 gram oral glucose load as in a glucose tolerance test (OGTT)Symptoms of high blood sugar and plasma glucose ≥ 11.1 mmol/L (200 mg/dL) either while fasting or not fastingGlycated hemoglobin (HbA1C) ≥ 48 mmol/mol (≥ 6.5 DCCT %).A positive result, in the absence of unequivocal high blood sugar, should be confirmed by a repeat of any of the above methods on a different day. It is preferable to measure a fasting glucose level because of the ease of measurement and the considerable time commitment of formal glucose tolerance testing, which takes two hours to complete and offers no prognostic advantage over the fasting test. According to the current definition, two fasting glucose measurements at or above 7.0 mmol/L (126 mg/dL) is considered diagnostic for diabetes mellitus.Per the WHO, people with fasting glucose levels from 6.1 to 6.9 mmol/L (110 to 125 mg/dL) are considered to have impaired fasting glucose. People with plasma glucose at or above 7.8 mmol/L (140 mg/dL), but not over 11.1 mmol/L (200 mg/dL), two hours after a 75 gram oral glucose load are considered to have impaired glucose tolerance. Of these two prediabetic states, the latter in particular is a major risk factor for progression to full-blown diabetes mellitus, as well as cardiovascular disease. The American Diabetes Association (ADA) since 2003 uses a slightly different range for impaired fasting glucose of 5.6 to 6.9 mmol/L (100 to 125 mg/dL).Glycated hemoglobin is better than fasting glucose for determining risks of cardiovascular disease and death from any cause. Prevention There is no known preventive measure for type 1 diabetes. However, islet autoimmunity and multiple antibodies can be a strong predictor of the onset of type 1 diabetes. Type 2 diabetes—which accounts for 85–90% of all cases worldwide—can often be prevented or delayed by maintaining a normal body weight, engaging in physical activity, and eating a healthy diet. Higher levels of physical activity (more than 90 minutes per day) reduce the risk of diabetes by 28%. Dietary changes known to be effective in helping to prevent diabetes include maintaining a diet rich in whole grains and fiber, and choosing good fats, such as the polyunsaturated fats found in nuts, vegetable oils, and fish. Limiting sugary beverages and eating less red meat and other sources of saturated fat can also help prevent diabetes. Tobacco smoking is also associated with an increased risk of diabetes and its complications, so smoking cessation can be an important preventive measure as well.The relationship between type 2 diabetes and the main modifiable risk factors (excess weight, unhealthy diet, physical inactivity and tobacco use) is similar in all regions of the world. There is growing evidence that the underlying determinants of diabetes are a reflection of the major forces driving social, economic and cultural change: globalization, urbanization, population aging, and the general health policy environment. Comorbidity Diabetes patients' comorbidities have a significant impact on medical expenses and related costs. It has been demonstrated that patients with diabetes are more likely to experience respiratory, urinary tract, and skin infections, develop atherosclerosis, hypertension, and chronic kidney disease, putting them at increased risk of infection and complications that require medical attention. Patients with diabetes mellitus are more likely to experience certain infections, such as COVID-19, with prevalence rates ranging from 5.3 to 35.5%. Maintaining adequate glycemic control is the primary goal of diabetes management since it is critical to managing diabetes and preventing or postponing such complications.People with type 1 diabetes have higher rates of autoimmune disorders than the general population. An analysis of a type 1 diabetes registry found that 27% of the 25,000 participants had other autoimmune disorders. Between 2% and 16% of people with type 1 diabetes also have celiac disease. Management Diabetes management concentrates on keeping blood sugar levels close to normal, without causing low blood sugar. This can usually be accomplished with dietary changes, exercise, weight loss, and use of appropriate medications (insulin, oral medications).Learning about the disease and actively participating in the treatment is important, since complications are far less common and less severe in people who have well-managed blood sugar levels. The goal of treatment is an A1C level below 7%. Attention is also paid to other health problems that may accelerate the negative effects of diabetes. These include smoking, high blood pressure, metabolic syndrome obesity, and lack of regular exercise. Specialized footwear is widely used to reduce the risk of diabetic foot ulcers by relieving the pressure on the foot. Foot examination for patients living with diabetes should be done annually which includes sensation testing, foot biomechanics, vascular integrity and foot structure.Concerning those with severe mental illness, the efficacy of type 2 diabetes self-management interventions is still poorly explored, with insufficient scientific evidence to show whether these interventions have similar results to those observed in the general population. Lifestyle People with diabetes can benefit from education about the disease and treatment, dietary changes, and exercise, with the goal of keeping both short-term and long-term blood glucose levels within acceptable bounds. In addition, given the associated higher risks of cardiovascular disease, lifestyle modifications are recommended to control blood pressure.Weight loss can prevent progression from prediabetes to diabetes type 2, decrease the risk of cardiovascular disease, or result in a partial remission in people with diabetes. No single dietary pattern is best for all people with diabetes. Healthy dietary patterns, such as the Mediterranean diet, low-carbohydrate diet, or DASH diet, are often recommended, although evidence does not support one over the others. According to the ADA, "reducing overall carbohydrate intake for individuals with diabetes has demonstrated the most evidence for improving glycemia", and for individuals with type 2 diabetes who cannot meet the glycemic targets or where reducing anti-glycemic medications is a priority, low or very-low carbohydrate diets are a viable approach. For overweight people with type 2 diabetes, any diet that achieves weight loss is effective.A 2020 Cochrane systematic review compared several non-nutritive sweeteners to sugar, placebo and a nutritive low-calorie sweetener (tagatose), but the results were unclear for effects on HbA1c, body weight and adverse events. The studies included were mainly of very low-certainty and did not report on health-related quality of life, diabetes complications, all-cause mortality or socioeconomic effects.Exercise has demonstrated to impact people’s lives for a better health outcome. However, fear of hypoglycemia can negatively impact exercise view on youth that have been diagnosed with diabetes. Managing insulin, carbohydrate intake, and physical activity becomes a task that drive youth away benefitting from enjoying exercises. With different studies, an understanding of what can be done and applied to the youth population diagnosed with Type 1 Diabetes has been conducted. A study’s aim was to focus on the impact of an exercise education on physical activity. During the length of a 12-month program, youth and their parents participated in 4 education sessions learning about the benefits, safe procedures, glucose control, and physical activity. With a survey conducted in the beginning, youth and parents demonstrated their fear of hypoglycemia. At the end of the program, most of the youth and parents showed confidence on how to manage and handle situations regarding hypoglycemia. In some instances, youth provided feedback that a continuation of the sessions would be beneficial. In two other studies, exercise was the aim to investigate on how it affects adolescents with T1D. In one of those studies, the impact was assessed in the changes of glucose in exercise by how many minutes per day, intensity, duration, and heart rate. Also, glucose was monitored to see changes during exercise, post exercise, and overnight. The other study investigated how types of exercises can affect glucose levels. The exercise types were continuous moderate exercise and interval-high-intensity exercise. Both types consisted of 2 sets of 10-minute work at different pedaling paces. The continuous pedaled at a 50% and had a 5-minute passive recovery. The high-intensity pedaled at 150% for 15 seconds and was intermixed with a 30-second passive recovery. So, when studies finished collecting data and were able to analyze it, the following were the results. For the studies comparing the different intensities, it was seen that insulin and carbohydrate intake did not have a significant difference before or after exercise. In regards of glucose content, there was a greater drop of blood glucose post exercise in the high intensity (-1.47mmol/L). During recovery, the continuous exercise showed a greater decrease in blood glucose. With all these, continuous exercise resulted in being more favorable for managing blood glucose levels. In the other study, it is mentioned that exercise also contributed to a notable impact on glucose levels. Post-exercise measurements, there was a low mean glucose level that occurred 12 to 16 hours after exercising. Although, with participants exercising for longer sessions (≥90 minutes), hypoglycemia rates were higher. With all these, participants showed well-managed glucose control by intaking proper carbohydrates amount without any insulin adjustments. Lastly, the study, that educated youth and parents about exercise important and management of hypoglycemia, showed many youths feeling confident to continue to exercise regularly and being able to manage their glucose levels. Therefore, as important as exercising is, showing youth and parents that being physical active is possible. That can be done in specific intensities and with proper understanding on how to handle glucose control over time. Diabetes and youth Youth dealing with diabetes face unique challenges. These can include the emotional, psychological, and social implications as a result of managing a chronic condition at such a young age. Both forms of diabetes can have long-term risks for complications like cardiovascular disease, kidney damage, and nerve damage. This is why early intervention and impactful management important to improving long-term health. Physical activity plays a vital role in managing diabetes, improving glycemic control, and enhancing the overall quality of life for children and adolescents.Younger children and adolescents with T1D tend to be more physically active compared to older individuals. This possibly because of the more demanding schedules and sedentary lifestyles of older adolescents, who are often in high school or university. This age-related decrease in physical activity is a potential challenge to keeping up with the ideal healthy lifestyle. People who have had T1D for a longer amount of time also have a tendency to be less active. As diabetes progresses, people may face more barriers to engaging in physical activity. Examples of this could include anxiety about experiencing hypoglycemic events during exercise or the physical challenges posed by the long-term complications that diabetes cause. Increased physical activity in youth with T1D can be associated with improved health. These outcomes can include better lipid profiles (higher HDL-C and lower triglycerides), healthier body composition (reduced waist circumference and BMI), and improved overall physical health. These benefits are especially important during childhood and adolescence because this is when proper growth and development are occurring.Younger people with type 2 diabetes have a tendency to have lower levels of physical activity and CRF compared to their peers without diabetes. This contributes to their poorer overall health and increases the risk of cardiovascular and metabolic complications. Despite recommendations for physical activity as part of diabetes management, many youth and young adolesents with type 2 diabetes do not meet the guidelines, hindering their ability to effectively manage blood glucose levels and improve their health. CRF is a key health indicator. Higher levels of CRF is associated with better health outcomes. This means that increasing CRF through exercise can provide important benefits for managing type 2 diabetes. There is a need for targeted interventions that promote physical activity and improve CRF in youth with type 2 diabetes to help reduce the risk of long-term complications.When it comes to resistance training, it is found to have no significant effect on insulin sensitivity in children and adolescents, despite it having positive trends. Intervention length, training intensity, and the participants' physical maturation might explain the mixed results. Longer and higher-intensity programs showed more promising results. Future research could focus on more dire metabolic conditions like type II diabetes, investigate the role of physical maturation, and think about including longer intervention periods. While resistance training complements aerobic exercise, its standalone effects on insulin sensitivity remain unclear. Medications Glucose control Most medications used to treat diabetes act by lowering blood sugar levels through different mechanisms. There is broad consensus that when people with diabetes maintain tight glucose control – keeping the glucose levels in their blood within normal ranges – they experience fewer complications, such as kidney problems or eye problems. There is, however, debate as to whether this is appropriate and cost effective for people later in life in whom the risk of hypoglycemia may be more significant.There are a number of different classes of anti-diabetic medications. Type 1 diabetes requires treatment with insulin, ideally using a "basal bolus" regimen that most closely matches normal insulin release: long-acting insulin for the basal rate and short-acting insulin with meals. Type 2 diabetes is generally treated with medication that is taken by mouth (e.g. metformin) although some eventually require injectable treatment with insulin or GLP-1 agonists.Metformin is generally recommended as a first-line treatment for type 2 diabetes, as there is good evidence that it decreases mortality. It works by decreasing the liver's production of glucose, and increasing the amount of glucose stored in peripheral tissue. Several other groups of drugs, mainly oral medication, may also decrease blood sugar in type 2 diabetes. These include agents that increase insulin release (sulfonylureas), agents that decrease absorption of sugar from the intestines (acarbose), agents that inhibit the enzyme dipeptidyl peptidase-4 (DPP-4) that inactivates incretins such as GLP-1 and GIP (sitagliptin), agents that make the body more sensitive to insulin (thiazolidinedione) and agents that increase the excretion of glucose in the urine (SGLT2 inhibitors). When insulin is used in type 2 diabetes, a long-acting formulation is usually added initially, while continuing oral medications.Some severe cases of type 2 diabetes may also be treated with insulin, which is increased gradually until glucose targets are reached. Blood pressure lowering Cardiovascular disease is a serious complication associated with diabetes, and many international guidelines recommend blood pressure treatment targets that are lower than 140/90 mmHg for people with diabetes. However, there is only limited evidence regarding what the lower targets should be. A 2016 systematic review found potential harm to treating to targets lower than 140 mmHg, and a subsequent systematic review in 2019 found no evidence of additional benefit from blood pressure lowering to between 130 – 140mmHg, although there was an increased risk of adverse events.2015 American Diabetes Association recommendations are that people with diabetes and albuminuria should receive an inhibitor of the renin-angiotensin system to reduce the risks of progression to end-stage renal disease, cardiovascular events, and death. There is some evidence that angiotensin converting enzyme inhibitors (ACEIs) are superior to other inhibitors of the renin-angiotensin system such as angiotensin receptor blockers (ARBs), or aliskiren in preventing cardiovascular disease. Although a more recent review found similar effects of ACEIs and ARBs on major cardiovascular and renal outcomes. There is no evidence that combining ACEIs and ARBs provides additional benefits. Aspirin The use of aspirin to prevent cardiovascular disease in diabetes is controversial. Aspirin is recommended by some in people at high risk of cardiovascular disease; however, routine use of aspirin has not been found to improve outcomes in uncomplicated diabetes. 2015 American Diabetes Association recommendations for aspirin use (based on expert consensus or clinical experience) are that low-dose aspirin use is reasonable in adults with diabetes who are at intermediate risk of cardiovascular disease (10-year cardiovascular disease risk, 5–10%). National guidelines for England and Wales by the National Institute for Health and Care Excellence (NICE) recommend against the use of aspirin in people with type 1 or type 2 diabetes who do not have confirmed cardiovascular disease. Surgery Weight loss surgery in those with obesity and type 2 diabetes is often an effective measure. Many are able to maintain normal blood sugar levels with little or no medications following surgery and long-term mortality is decreased. There is, however, a short-term mortality risk of less than 1% from the surgery. The body mass index cutoffs for when surgery is appropriate are not yet clear. It is recommended that this option be considered in those who are unable to get both their weight and blood sugar under control.A pancreas transplant is occasionally considered for people with type 1 diabetes who have severe complications of their disease, including end stage kidney disease requiring kidney transplantation.Diabetic peripheral neuropathy (DPN) affects 30% of all diabetes patients. When DPN is superimposed with nerve compression, DPN may be treatable with multiple nerve decompressions. The theory is that DPN predisposes peripheral nerves to compression at anatomical sites of narrowing, and that the majority of DPN symptoms are actually attributable to nerve compression, a treatable condition, rather than DPN itself. The surgery is associated with lower pain scores, higher two-point discrimination (a measure of sensory improvement), lower rate of ulcerations, fewer falls (in the case of lower extremity decompression), and fewer amputations. Self-management and support In countries using a general practitioner system, such as the United Kingdom, care may take place mainly outside hospitals, with hospital-based specialist care used only in case of complications, difficult blood sugar control, or research projects. In other circumstances, general practitioners and specialists share care in a team approach. Evidence has shown that social prescribing led to slight improvements in blood sugar control for people with type 2 diabetes. Home telehealth support can be an effective management technique.The use of technology to deliver educational programs for adults with type 2 diabetes includes computer-based self-management interventions to collect for tailored responses to facilitate self-management. There is no adequate evidence to support effects on cholesterol, blood pressure, behavioral change (such as physical activity levels and dietary), depression, weight and health-related quality of life, nor in other biological, cognitive or emotional outcomes. Epidemiology In 2017, 425 million people had diabetes worldwide, up from an estimated 382 million people in 2013 and from 108 million in 1980. Accounting for the shifting age structure of the global population, the prevalence of diabetes is 8.8% among adults, nearly double the rate of 4.7% in 1980. Type 2 makes up about 90% of the cases. Some data indicate rates are roughly equal in women and men, but male excess in diabetes has been found in many populations with higher type 2 incidence, possibly due to sex-related differences in insulin sensitivity, consequences of obesity and regional body fat deposition, and other contributing factors such as high blood pressure, tobacco smoking, and alcohol intake.The WHO estimates that diabetes resulted in 1.5 million deaths in 2012, making it the 8th leading cause of death. However, another 2.2 million deaths worldwide were attributable to high blood glucose and the increased risks of cardiovascular disease and other associated complications (e.g. kidney failure), which often lead to premature death and are often listed as the underlying cause on death certificates rather than diabetes. For example, in 2017, the International Diabetes Federation (IDF) estimated that diabetes resulted in 4.0 million deaths worldwide, using modeling to estimate the total number of deaths that could be directly or indirectly attributed to diabetes.Diabetes occurs throughout the world but is more common (especially type 2) in more developed countries. The greatest increase in rates has, however, been seen in low- and middle-income countries, where more than 80% of diabetic deaths occur. The fastest prevalence increase is expected to occur in Asia and Africa, where most people with diabetes will probably live in 2030. The increase in rates in developing countries follows the trend of urbanization and lifestyle changes, including increasingly sedentary lifestyles, less physically demanding work and the global nutrition transition, marked by increased intake of foods that are high energy-dense but nutrient-poor (often high in sugar and saturated fats, sometimes referred to as the "Western-style" diet). The global number of diabetes cases might increase by 48% between 2017 and 2045.As of 2020, 38% of all US adults had prediabetes. Prediabetes is an early stage of diabetes. History Diabetes was one of the first diseases described, with an Egyptian manuscript from c. 1500 BCE mentioning "too great emptying of the urine." The Ebers papyrus includes a recommendation for a drink to take in such cases. The first described cases are believed to have been type 1 diabetes. Indian physicians around the same time identified the disease and classified it as madhumeha or "honey urine", noting the urine would attract ants.The term "diabetes" or "to pass through" was first used in 230 BCE by the Greek Apollonius of Memphis. The disease was considered rare during the time of the Roman empire, with Galen commenting he had only seen two cases during his career. This is possibly due to the diet and lifestyle of the ancients, or because the clinical symptoms were observed during the advanced stage of the disease. Galen named the disease "diarrhea of the urine" (diarrhea urinosa).The earliest surviving work with a detailed reference to diabetes is that of Aretaeus of Cappadocia (2nd or early 3rd century CE). He described the symptoms and the course of the disease, which he attributed to the moisture and coldness, reflecting the beliefs of the "Pneumatic School". He hypothesized a correlation between diabetes and other diseases, and he discussed differential diagnosis from the snakebite, which also provokes excessive thirst. His work remained unknown in the West until 1552, when the first Latin edition was published in Venice.Two types of diabetes were identified as separate conditions for the first time by the Indian physicians Sushruta and Charaka in 400–500 CE with one type being associated with youth and another type with being overweight. Effective treatment was not developed until the early part of the 20th century when Canadians Frederick Banting and Charles Best isolated and purified insulin in 1921 and 1922. This was followed by the development of the long-acting insulin NPH in the 1940s. Etymology The word diabetes ( or ) comes from Latin diabētēs, which in turn comes from Ancient Greek διαβήτης (diabētēs), which literally means "a passer through; a siphon". Ancient Greek physician Aretaeus of Cappadocia (fl. 1st century CE) used that word, with the intended meaning "excessive discharge of urine", as the name for the disease. Ultimately, the word comes from Greek διαβαίνειν (diabainein), meaning "to pass through", which is composed of δια- (dia-), meaning "through" and βαίνειν (bainein), meaning "to go". The word "diabetes" is first recorded in English, in the form diabete, in a medical text written around 1425.The word mellitus ( or ) comes from the classical Latin word mellītus, meaning "mellite" (i.e. sweetened with honey; honey-sweet). The Latin word comes from mell-, which comes from mel, meaning "honey"; sweetness; pleasant thing, and the suffix -ītus, whose meaning is the same as that of the English suffix "-ite". It was Thomas Willis who in 1675 added "mellitus" to the word "diabetes" as a designation for the disease, when he noticed the urine of a person with diabetes had a sweet taste (glycosuria). This sweet taste had been noticed in urine by the ancient Greeks, Chinese, Egyptians, and Indians. Society and culture The 1989 "St. Vincent Declaration" was the result of international efforts to improve the care accorded to those with diabetes. Doing so is important not only in terms of quality of life and life expectancy but also economically – expenses due to diabetes have been shown to be a major drain on health – and productivity-related resources for healthcare systems and governments.Several countries established more and less successful national diabetes programmes to improve treatment of the disease. Diabetes stigma Diabetes stigma describes the negative attitudes, judgment, discrimination, or prejudice against people with diabetes. Often, the stigma stems from the idea that diabetes (particularly Type 2 diabetes) resulted from poor lifestyle and unhealthy food choices rather than other causal factors like genetics and social determinants of health. Manifestation of stigma can be seen throughout different cultures and contexts. Scenarios include diabetes statuses affecting marriage proposals, workplace-employment, and social standing in communities.Stigma is also seen internally, as people with diabetes can also have negative beliefs about themselves. Often these cases of self-stigma are associated with higher diabetes-specific distress, lower self-efficacy, and poorer provider-patient interactions during diabetes care. Racial and economic inequalities Racial and ethnic minorities are disproportionately affected with higher prevalence of diabetes compared to non-minority individuals. While US adults overall have a 40% chance of developing type 2 diabetes, Hispanic/Latino adults chance is more than 50%. African Americans also are much more likely to be diagnosed with diabetes compared to White Americans. Asians have increased risk of diabetes as diabetes can develop at lower BMI due to differences in visceral fat compared to other races. For Asians, diabetes can develop at a younger age and lower body fat compared to other groups. Additionally, diabetes is highly underreported in Asian American people, as 1 in 3 cases are undiagnosed compared to the average 1 in 5 for the nation.People with diabetes who have neuropathic symptoms such as numbness or tingling in feet or hands are twice as likely to be unemployed as those without the symptoms.In 2010, diabetes-related emergency room (ER) visit rates in the United States were higher among people from the lowest income communities (526 per 10,000 population) than from the highest income communities (236 per 10,000 population). Approximately 9.4% of diabetes-related ER visits were for the uninsured. Naming The term "type 1 diabetes" has replaced several former terms, including childhood-onset diabetes, juvenile diabetes, and insulin-dependent diabetes mellitus. Likewise, the term "type 2 diabetes" has replaced several former terms, including adult-onset diabetes, obesity-related diabetes, and noninsulin-dependent diabetes mellitus. Beyond these two types, there is no agreed-upon standard nomenclature.Diabetes mellitus is also occasionally known as "sugar diabetes" to differentiate it from diabetes insipidus. Other animals Diabetes can occur in mammals or reptiles. Birds do not develop diabetes because of their unusually high tolerance for elevated blood glucose levels.In animals, diabetes is most commonly encountered in dogs and cats. Middle-aged animals are most commonly affected. Female dogs are twice as likely to be affected as males, while according to some sources, male cats are more prone than females. In both species, all breeds may be affected, but some small dog breeds are particularly likely to develop diabetes, such as Miniature Poodles.Feline diabetes is strikingly similar to human type 2 diabetes. The Burmese, Russian Blue, Abyssinian, and Norwegian Forest cat breeds are at higher risk than other breeds. Overweight cats are also at higher risk.The symptoms may relate to fluid loss and polyuria, but the course may also be insidious. Diabetic animals are more prone to infections. The long-term complications recognized in humans are much rarer in animals. The principles of treatment (weight loss, oral antidiabetics, subcutaneous insulin) and management of emergencies (e.g. ketoacidosis) are similar to those in humans. See also Outline of diabetesDiabetic footBlood glucose monitoring References External links American Diabetes AssociationIDF Diabetes AtlasNational Diabetes Education ProgramADA's Standards of Medical Care in Diabetes 2019Polonsky KS (October 2012). "The past 200 years in diabetes". The New England Journal of Medicine. 367 (14): 1332–1340. doi:10.1056/NEJMra1110560. PMID 23034021. S2CID 9456681."Diabetes". MedlinePlus. U.S. National Library of Medicine. \ No newline at end of file diff --git a/HybridRAG/tests/test_compose_on_gaudi.sh b/HybridRAG/tests/test_compose_on_gaudi.sh new file mode 100755 index 0000000000..f8c2ccf203 --- /dev/null +++ b/HybridRAG/tests/test_compose_on_gaudi.sh @@ -0,0 +1,259 @@ +#!/bin/bash +# Copyright (C) 2025 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +set -e +IMAGE_REPO=${IMAGE_REPO:-"opea"} +IMAGE_TAG=${IMAGE_TAG:-"latest"} +echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" +echo "TAG=IMAGE_TAG=${IMAGE_TAG}" +export REGISTRY=${IMAGE_REPO} +export TAG=${IMAGE_TAG} +export MODEL_CACHE=${model_cache:-"./data"} + +WORKPATH=$(dirname "$PWD") +LOG_PATH="$WORKPATH/tests" +ip_address=$(hostname -I | awk '{print $1}') + +cd $WORKPATH/docker_compose/intel/hpu/gaudi +source set_env.sh + +function build_docker_images() { + opea_branch=${opea_branch:-"main"} + # If the opea_branch isn't main, replace the git clone branch in Dockerfile. + if [[ "${opea_branch}" != "main" ]]; then + cd $WORKPATH + OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git" + NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git" + find . -type f -name "Dockerfile*" | while read -r file; do + echo "Processing file: $file" + sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file" + done + fi + + cd $WORKPATH/docker_image_build + git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git + REQ_FILE="GenAIComps/comps/text2cypher/src/requirements.txt" + sed -i \ + -e 's/^sentence-transformers\(==.*\)\?$/sentence-transformers==3.2.1/' \ + -e 's/^transformers\(==.*\)\?$/transformers==4.45.2/' \ + "$REQ_FILE" + + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s + + git clone https://github.com/vllm-project/vllm.git && cd vllm + VLLM_VER="$(git describe --tags "$(git rev-list --tags --max-count=1)" )" + VLLM_VER="v0.8.3" + echo "Check out vLLM tag ${VLLM_VER}" + git checkout ${VLLM_VER} &> /dev/null + # make sure NOT change the pwd + cd ../ + + echo "Build all the images with --no-cache, check docker_image_build.log for details..." + service_list="hybridrag hybridrag-ui dataprep retriever text2cypher vllm nginx" + docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log + + docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 + + docker images && sleep 1s +} + +function start_services() { + cd $WORKPATH/docker_compose/intel/hpu/gaudi + + # Start Docker Containers + docker compose -f compose.yaml up -d > ${LOG_PATH}/start_services_with_compose.log + n=0 + until [[ "$n" -ge 100 ]]; do + docker logs vllm-service > ${LOG_PATH}/vllm_service_start.log 2>&1 + if grep -q complete ${LOG_PATH}/vllm_service_start.log; then + break + fi + sleep 5s + n=$((n+1)) + done +} + +function dataprep() { + cd $WORKPATH/tests/data + sleep 25s + URL="http://${ip_address}:6007/v1/dataprep/ingest" + local CONTENT=$(curl -X POST -H "Content-Type: multipart/form-data" -F "files=@./Diabetes.txt" -F "files=@./Acne_Vulgaris.txt" -F "chunk_size=300" -F "chunk_overlap=20" "$URL") + if echo "$CONTENT" | grep -q "Data preparation succeeded"; then + echo "Data preparation succeeded." + else + echo "Data preparation failed." + exit 1 + fi +} + +function validate_service() { + local URL="$1" + local EXPECTED_RESULT="$2" + local SERVICE_NAME="$3" + local DOCKER_NAME="$4" + local INPUT_DATA="$5" + + local HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -X POST -d "$INPUT_DATA" -H 'Content-Type: application/json' "$URL") + + if [ "DOCKER_NAME" -eq "text2cypher-gaudi-container" ]; then + docker ps + docker logs text2cypher-gaudi-container + fi + + if [ "$HTTP_STATUS" -eq 200 ]; then + echo "[ $SERVICE_NAME ] HTTP status is 200. Checking content..." + + local CONTENT=$(curl -s -X POST -d "$INPUT_DATA" -H 'Content-Type: application/json' "$URL" | tee ${LOG_PATH}/${SERVICE_NAME}.log) + + if echo "$CONTENT" | grep -q "$EXPECTED_RESULT"; then + echo "[ $SERVICE_NAME ] Content is as expected." + else + echo "[ $SERVICE_NAME ] Content does not match the expected result: $CONTENT" + docker logs ${DOCKER_NAME} >> ${LOG_PATH}/${SERVICE_NAME}.log + if [ "DOCKER_NAME" -eq "hybridrag-xeon-backend-server" ]; then + docker ps + docker logs text2cypher-gaudi-container + fi + exit 1 + fi + else + echo "[ $SERVICE_NAME ] HTTP status is not 200. Received status was $HTTP_STATUS" + docker logs ${DOCKER_NAME} >> ${LOG_PATH}/${SERVICE_NAME}.log + if [ "DOCKER_NAME" -eq "hybridrag-xeon-backend-server" ]; then + docker ps + docker logs text2cypher-gaudi-container + fi + exit 1 + fi + sleep 1s +} + +function validate_microservices() { + # Check if the microservices are running correctly. + + # tei for embedding service + validate_service \ + "${ip_address}:6006/embed" \ + "\[\[" \ + "tei-embedding" \ + "tei-embedding-server" \ + '{"inputs":"What is Deep Learning?"}' + + sleep 1m # retrieval can't curl as expected, try to wait for more time + + # retrieval microservice + test_embedding=$(python3 -c "import random; embedding = [random.uniform(-1, 1) for _ in range(768)]; print(embedding)") + validate_service \ + "${ip_address}:7000/v1/retrieval" \ + " " \ + "retriever" \ + "retriever-redis-server" \ + "{\"text\":\"What is the revenue of Nike in 2023?\",\"embedding\":${test_embedding}}" + + # tei for rerank microservice + validate_service \ + "${ip_address}:8808/rerank" \ + '{"index":1,"score":' \ + "tei-reranking-service" \ + "tei-reranking-server" \ + '{"query":"What is Deep Learning?", "texts": ["Deep Learning is not...", "Deep learning is..."]}' + + # vllm for llm service + validate_service \ + "${ip_address}:9009/v1/chat/completions" \ + "content" \ + "vllm-service" \ + "vllm-service" \ + '{"model": "meta-llama/Meta-Llama-3-8B-Instruct", "messages": [{"role": "user", "content": "What is Deep Learning?"}], "max_tokens": 17}' +} + +function validate_megaservice() { + # Curl the Mega Service + validate_service \ + "${ip_address}:8888/v1/hybridrag" \ + "data" \ + "hybridrag-xeon-backend-server" \ + "hybridrag-xeon-backend-server" \ + '{"messages": "what are the symptoms for Diabetes?"}' + +} + +function validate_text2cypher() { + # text2cypher service + validate_service \ + "${ip_address}:11801/v1/text2cypher" \ + "\[" \ + "text2cypher-gaudi" \ + "text2cypher-gaudi-container" \ + '{"input_text": "what are the symptoms for Diabetes?"}' +} + +function validate_frontend() { + cd $WORKPATH/ui/svelte + local conda_env_name="OPEA_e2e" + export PATH=${HOME}/miniforge3/bin/:$PATH + if conda info --envs | grep -q "$conda_env_name"; then + echo "$conda_env_name exist!" + else + conda create -n ${conda_env_name} python=3.12 -y + fi + + source activate ${conda_env_name} + + sed -i "s/localhost/$ip_address/g" playwright.config.ts + + conda install -c conda-forge nodejs=22.6.0 -y + npm install && npm ci && npx playwright install --with-deps + node -v && npm -v && pip list + + exit_status=0 + npx playwright test || exit_status=$? + + if [ $exit_status -ne 0 ]; then + echo "[TEST INFO]: ---------frontend test failed---------" + exit $exit_status + else + echo "[TEST INFO]: ---------frontend test passed---------" + fi +} + +function stop_docker() { + cd $WORKPATH/docker_compose/intel/hpu/gaudi + docker compose -f compose.yaml down +} + +function main() { + + stop_docker + + if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi + start_time=$(date +%s) + start_services + end_time=$(date +%s) + duration=$((end_time-start_time)) + echo "Mega service start duration is $duration s" && sleep 1s + + validate_microservices + dataprep + + start_time=$(date +%s) + validate_megaservice + end_time=$(date +%s) + duration=$((end_time-start_time)) + echo "Mega service duration is $duration s" + + validate_frontend + + cd $WORKPATH/docker_image_build + rm -rf GenAIComps vllm + + stop_docker + echo y | docker system prune + +} + +main diff --git a/HybridRAG/ui/docker/Dockerfile b/HybridRAG/ui/docker/Dockerfile new file mode 100644 index 0000000000..1d5115f4b5 --- /dev/null +++ b/HybridRAG/ui/docker/Dockerfile @@ -0,0 +1,26 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +# Use node 20.11.1 as the base image +FROM node:20.11.1 + +# Update package manager and install Git +RUN apt-get update -y && apt-get install -y git + +# Copy the front-end code repository +COPY svelte /home/user/svelte + +# Set the working directory +WORKDIR /home/user/svelte + +# Install front-end dependencies +RUN npm install + +# Build the front-end application +RUN npm run build + +# Expose the port of the front-end application +EXPOSE 5173 + +# Run the front-end application in preview mode +CMD ["npm", "run", "preview", "--", "--port", "5173", "--host", "0.0.0.0"] diff --git a/HybridRAG/ui/svelte/.editorconfig b/HybridRAG/ui/svelte/.editorconfig new file mode 100644 index 0000000000..2b7a6637f7 --- /dev/null +++ b/HybridRAG/ui/svelte/.editorconfig @@ -0,0 +1,10 @@ +[*] +indent_style = tab + +[package.json] +indent_style = space +indent_size = 2 + +[*.md] +indent_style = space +indent_size = 2 diff --git a/HybridRAG/ui/svelte/.env b/HybridRAG/ui/svelte/.env new file mode 100644 index 0000000000..0ac5c8c83a --- /dev/null +++ b/HybridRAG/ui/svelte/.env @@ -0,0 +1,7 @@ +CHAT_BASE_URL = '/v1/hybridrag' + +UPLOAD_FILE_BASE_URL = '/v1/dataprep/ingest' + +GET_FILE = '/v1/dataprep/get' + +DELETE_FILE = '/v1/dataprep/delete' diff --git a/HybridRAG/ui/svelte/.eslintignore b/HybridRAG/ui/svelte/.eslintignore new file mode 100644 index 0000000000..38972655fa --- /dev/null +++ b/HybridRAG/ui/svelte/.eslintignore @@ -0,0 +1,13 @@ +.DS_Store +node_modules +/build +/.svelte-kit +/package +.env +.env.* +!.env.example + +# Ignore files for PNPM, NPM and YARN +pnpm-lock.yaml +package-lock.json +yarn.lock diff --git a/HybridRAG/ui/svelte/.eslintrc.cjs b/HybridRAG/ui/svelte/.eslintrc.cjs new file mode 100644 index 0000000000..a6592d11f7 --- /dev/null +++ b/HybridRAG/ui/svelte/.eslintrc.cjs @@ -0,0 +1,34 @@ +// Copyright (c) 2024 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +module.exports = { + root: true, + parser: "@typescript-eslint/parser", + extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended", "prettier"], + plugins: ["svelte3", "@typescript-eslint", "neverthrow"], + ignorePatterns: ["*.cjs"], + overrides: [{ files: ["*.svelte"], processor: "svelte3/svelte3" }], + settings: { + "svelte3/typescript": () => require("typescript"), + }, + parserOptions: { + sourceType: "module", + ecmaVersion: 2020, + }, + env: { + browser: true, + es2017: true, + node: true, + }, +}; diff --git a/HybridRAG/ui/svelte/.prettierignore b/HybridRAG/ui/svelte/.prettierignore new file mode 100644 index 0000000000..38972655fa --- /dev/null +++ b/HybridRAG/ui/svelte/.prettierignore @@ -0,0 +1,13 @@ +.DS_Store +node_modules +/build +/.svelte-kit +/package +.env +.env.* +!.env.example + +# Ignore files for PNPM, NPM and YARN +pnpm-lock.yaml +package-lock.json +yarn.lock diff --git a/HybridRAG/ui/svelte/.prettierrc b/HybridRAG/ui/svelte/.prettierrc new file mode 100644 index 0000000000..c932dd178f --- /dev/null +++ b/HybridRAG/ui/svelte/.prettierrc @@ -0,0 +1 @@ +{"pluginSearchDirs": ["."], "overrides": [{"files": "*.svelte", "options": {"parser": "svelte"}}]} diff --git a/HybridRAG/ui/svelte/README.md b/HybridRAG/ui/svelte/README.md new file mode 100644 index 0000000000..d3c26b8f0f --- /dev/null +++ b/HybridRAG/ui/svelte/README.md @@ -0,0 +1,42 @@ +# ChatQnA Customized UI + +## 📸 Project Screenshots + +![project-screenshot](../../assets/img/chat_ui_init.png) +![project-screenshot](../../assets/img/chat_ui_response.png) +![project-screenshot](../../assets/img/chat_ui_upload.png) + +## 🧐 Features + +Here're some of the project's features: + +- Start a Text Chat:Initiate a text chat with the ability to input written conversations, where the dialogue content can also be customized based on uploaded files. +- Clear: Clear the record of the current dialog box without retaining the contents of the dialog box. +- Chat history: Historical chat records can still be retained after refreshing, making it easier for users to view the context. +- Scroll to Bottom / Top: The chat automatically slides to the bottom. Users can also click the top icon to slide to the top of the chat record. +- End to End Time: Shows the time spent on the current conversation. +- Upload File: The choice between uploading locally or copying a remote link. Chat according to uploaded knowledge base. +- Delete File: Delete a certain uploaded file. + +## 🛠️ Get it Running + +1. Clone the repo. + +2. cd command to the current folder. + +3. Modify the required .env variables. + + ``` + CHAT_BASE_URL = '' + + UPLOAD_FILE_BASE_URL = '' + + GET_FILE = '' + + DELETE_FILE = '' + + ``` + +4. Execute `npm install` to install the corresponding dependencies. + +5. Execute `npm run dev` in both environments diff --git a/HybridRAG/ui/svelte/package.json b/HybridRAG/ui/svelte/package.json new file mode 100644 index 0000000000..0f19db6e56 --- /dev/null +++ b/HybridRAG/ui/svelte/package.json @@ -0,0 +1,60 @@ +{ + "name": "chat-qna", + "version": "0.0.1", + "private": true, + "scripts": { + "dev": "vite dev", + "build": "vite build", + "preview": "vite preview", + "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json", + "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch", + "lint": "prettier --check . && eslint .", + "format": "prettier --write ." + }, + "peerDependencies": { + "svelte": "^4.0.0" + }, + "devDependencies": { + "@fortawesome/free-solid-svg-icons": "6.2.0", + "@playwright/test": "^1.45.2", + "@sveltejs/adapter-auto": "^3.0.0", + "@sveltejs/kit": "^2.0.0", + "@sveltejs/vite-plugin-svelte": "^3.0.0", + "@tailwindcss/typography": "0.5.7", + "@types/debug": "4.1.7", + "@types/node": "^20.12.13", + "@typescript-eslint/eslint-plugin": "^5.27.0", + "@typescript-eslint/parser": "^5.27.0", + "autoprefixer": "^10.4.16", + "date-picker-svelte": "^2.6.0", + "debug": "4.3.4", + "postcss": "^8.4.31", + "postcss-load-config": "^4.0.1", + "postcss-preset-env": "^8.3.2", + "prettier": "^2.8.8", + "prettier-plugin-svelte": "^2.7.0", + "prettier-plugin-tailwindcss": "^0.3.0", + "svelte": "^4.2.7", + "svelte-check": "^3.6.0", + "svelte-fa": "3.0.3", + "tailwindcss": "^3.3.6", + "tslib": "^2.4.1", + "typescript": "^5.0.0", + "vite": "^5.0.11" + }, + "type": "module", + "dependencies": { + "date-fns": "^2.30.0", + "driver.js": "^1.3.0", + "flowbite": "^2.5.2", + "flowbite-svelte": "^0.38.5", + "flowbite-svelte-icons": "^1.4.0", + "fuse.js": "^6.6.2", + "lodash": "^4.17.21", + "playwright": "^1.44.0", + "ramda": "^0.29.0", + "sse.js": "^0.6.1", + "svelte-notifications": "^0.9.98", + "svrollbar": "^0.12.0" + } +} diff --git a/HybridRAG/ui/svelte/playwright.config.ts b/HybridRAG/ui/svelte/playwright.config.ts new file mode 100644 index 0000000000..e26b9f3f8c --- /dev/null +++ b/HybridRAG/ui/svelte/playwright.config.ts @@ -0,0 +1,87 @@ +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 + +import { defineConfig, devices } from "@playwright/test"; + +/** + * Read environment variables from file. + * https://github.com/motdotla/dotenv + */ +// require('dotenv').config(); + +/** + * See https://playwright.dev/docs/test-configuration. + */ +export default defineConfig({ + testDir: "./tests", + /* Maximum time one test can run for. */ + timeout: 30 * 1000, + expect: { + /** + * Maximum time expect() should wait for the condition to be met. + * For example in `await expect(locator).toHaveText();` + */ + timeout: 20000, + }, + /* Run tests in files in parallel */ + fullyParallel: true, + /* Fail the build on CI if you accidentally left test.only in the source code. */ + forbidOnly: !!process.env.CI, + /* Retry on CI only */ + retries: process.env.CI ? 2 : 0, + /* Opt out of parallel tests on CI. */ + workers: process.env.CI ? 1 : undefined, + /* Reporter to use. See https://playwright.dev/docs/test-reporters */ + reporter: [["html", { open: "never" }]], + /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ + use: { + /* Maximum time each action such as `click()` can take. Defaults to 0 (no limit). */ + actionTimeout: 0, + /* Base URL to use in actions like `await page.goto('/')`. */ + baseURL: "http://localhost:80", + + /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */ + trace: "on-first-retry", + }, + + /* Configure projects for major browsers */ + projects: [ + // { + // name: "chromium", + // use: { ...devices["Desktop Chrome"] }, + // }, + + /* Test against mobile viewports. */ + // { + // name: 'Mobile Chrome', + // use: { ...devices['Pixel 5'] }, + // }, + // { + // name: 'Mobile Safari', + // use: { ...devices['iPhone 12'] }, + // }, + + /* Test against branded browsers. */ + // { + // name: 'Microsoft Edge', + // use: { channel: 'msedge' }, + // }, + { + name: "webkit", + use: { ...devices["Desktop Safari"] }, + }, + // { + // name: 'Google Chrome', + // use: { channel: 'chrome' }, + // }, + ], + + /* Folder for test artifacts such as screenshots, videos, traces, etc. */ + // outputDir: 'test-results/', + + /* Run your local dev server before starting the tests */ + // webServer: { + // command: 'npm run start', + // port: 3000, + // }, +}); diff --git a/HybridRAG/ui/svelte/postcss.config.cjs b/HybridRAG/ui/svelte/postcss.config.cjs new file mode 100644 index 0000000000..b384b43ebe --- /dev/null +++ b/HybridRAG/ui/svelte/postcss.config.cjs @@ -0,0 +1,27 @@ +// Copyright (c) 2024 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const tailwindcss = require("tailwindcss"); +const autoprefixer = require("autoprefixer"); + +const config = { + plugins: [ + //Some plugins, like tailwindcss/nesting, need to run before Tailwind, + tailwindcss(), + //But others, like autoprefixer, need to run after, + autoprefixer, + ], +}; + +module.exports = config; diff --git a/HybridRAG/ui/svelte/src/app.d.ts b/HybridRAG/ui/svelte/src/app.d.ts new file mode 100644 index 0000000000..fa6a0abf77 --- /dev/null +++ b/HybridRAG/ui/svelte/src/app.d.ts @@ -0,0 +1,19 @@ +// Copyright (c) 2024 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// See: https://kit.svelte.dev/docs/types#app +// import { Result} from "neverthrow"; +interface Window { + deviceType: string; +} diff --git a/HybridRAG/ui/svelte/src/app.html b/HybridRAG/ui/svelte/src/app.html new file mode 100644 index 0000000000..db69926ea8 --- /dev/null +++ b/HybridRAG/ui/svelte/src/app.html @@ -0,0 +1,28 @@ + + + + + + + + + %sveltekit.head% + + +
%sveltekit.body%
+ + diff --git a/HybridRAG/ui/svelte/src/app.postcss b/HybridRAG/ui/svelte/src/app.postcss new file mode 100644 index 0000000000..963bbca4ef --- /dev/null +++ b/HybridRAG/ui/svelte/src/app.postcss @@ -0,0 +1,86 @@ +/* Write your global styles here, in PostCSS syntax */ +@tailwind base; +@tailwind components; +@tailwind utilities; + +html, body { + height: 100%; +} + +.btn { + @apply flex-nowrap; +} +a.btn { + @apply no-underline; +} +.input { + @apply text-base; +} + +.bg-dark-blue { + background-color: #004a86; +} + +.bg-light-blue { + background-color: #0068b5; +} + +.bg-turquoise { + background-color: #00a3f6; +} + +.bg-header { + background-color: #ffffff; +} + +.bg-button { + background-color: #0068b5; +} + +.bg-title { + background-color: #f7f7f7; +} + +.text-header { + color: #0068b5; +} + +.text-button { + color: #252e47; +} + +.text-title-color { + color: rgb(38,38,38); +} + +.font-intel { + font-family: "intel-clear","tahoma",Helvetica,"helvetica",Arial,sans-serif; +} + +.font-title-intel { + font-family: "intel-one","intel-clear",Helvetica,Arial,sans-serif; +} + +.bg-footer { + background-color: #e7e7e7; +} + +.bg-light-green { + background-color: #d7f3a1; +} + +.bg-purple { + background-color: #653171; +} + +.bg-dark-blue { + background-color: #224678; +} + +.border-input-color { + border-color: #605e5c; +} + +.w-12\/12 { + width: 100% +} diff --git a/HybridRAG/ui/svelte/src/lib/assets/DocManagement/LinkfolderIcon.svelte b/HybridRAG/ui/svelte/src/lib/assets/DocManagement/LinkfolderIcon.svelte new file mode 100644 index 0000000000..66d1b006a5 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/assets/DocManagement/LinkfolderIcon.svelte @@ -0,0 +1,36 @@ + + + + + diff --git a/HybridRAG/ui/svelte/src/lib/assets/DocManagement/fileIcon.svelte b/HybridRAG/ui/svelte/src/lib/assets/DocManagement/fileIcon.svelte new file mode 100644 index 0000000000..39b204bbfd --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/assets/DocManagement/fileIcon.svelte @@ -0,0 +1,30 @@ + + + + + diff --git a/HybridRAG/ui/svelte/src/lib/assets/DocManagement/folderIcon.svelte b/HybridRAG/ui/svelte/src/lib/assets/DocManagement/folderIcon.svelte new file mode 100644 index 0000000000..5fd4e14fa9 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/assets/DocManagement/folderIcon.svelte @@ -0,0 +1,30 @@ + + + + + diff --git a/HybridRAG/ui/svelte/src/lib/assets/avatar/svelte/Delete.svelte b/HybridRAG/ui/svelte/src/lib/assets/avatar/svelte/Delete.svelte new file mode 100644 index 0000000000..8847a22275 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/assets/avatar/svelte/Delete.svelte @@ -0,0 +1,30 @@ + + + + + + { + dispatch('DeleteAvatar') }} +viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" width="20" height="20"> + + diff --git a/HybridRAG/ui/svelte/src/lib/assets/chat/svelte/Assistant.svelte b/HybridRAG/ui/svelte/src/lib/assets/chat/svelte/Assistant.svelte new file mode 100644 index 0000000000..b68d2a08cd --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/assets/chat/svelte/Assistant.svelte @@ -0,0 +1,44 @@ + + + diff --git a/HybridRAG/ui/svelte/src/lib/assets/chat/svelte/PaperAirplane.svelte b/HybridRAG/ui/svelte/src/lib/assets/chat/svelte/PaperAirplane.svelte new file mode 100644 index 0000000000..d1d14077f2 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/assets/chat/svelte/PaperAirplane.svelte @@ -0,0 +1,68 @@ + + + + + + + + diff --git a/HybridRAG/ui/svelte/src/lib/assets/chat/svelte/PersonOutlined.svelte b/HybridRAG/ui/svelte/src/lib/assets/chat/svelte/PersonOutlined.svelte new file mode 100644 index 0000000000..dd2f9fdb78 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/assets/chat/svelte/PersonOutlined.svelte @@ -0,0 +1,26 @@ + + + + + diff --git a/HybridRAG/ui/svelte/src/lib/assets/layout/css/driver.css b/HybridRAG/ui/svelte/src/lib/assets/layout/css/driver.css new file mode 100644 index 0000000000..453db6082a --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/assets/layout/css/driver.css @@ -0,0 +1,94 @@ +.driverjs-theme { + background: transparent; + color: #fff; + box-shadow: none; + padding: 0; +} + +.driver-popover-arrow { + border: 10px solid transparent; + animation: blink 1s 3 steps(1); +} + +@keyframes blink { + 0% { + opacity: 1; + } + 50% { + opacity: 0.2; + } + 100% { + opacity: 1; + } +} + +.driver-popover.driverjs-theme .driver-popover-arrow-side-left.driver-popover-arrow { + border-left-color: #174ed1; +} + +.driver-popover.driverjs-theme .driver-popover-arrow-side-right.driver-popover-arrow { + border-right-color: #174ed1; +} + +.driver-popover.driverjs-theme .driver-popover-arrow-side-top.driver-popover-arrow { + border-top-color: #174ed1; +} + +.driver-popover.driverjs-theme .driver-popover-arrow-side-bottom.driver-popover-arrow { + border-bottom-color: #174ed1; +} + +.driver-popover-footer { + background: transparent; + color: #fff; +} +.driver-popover-title { + border-top-left-radius: 5px; + border-top-right-radius: 5px; +} + +.driver-popover-title, +.driver-popover-description { + display: block; + padding: 15px 15px 7px 15px; + background: #174ed1; + border: none; +} + +.driver-popover-close-btn { + color: #fff; +} + +.driver-popover-footer button:hover, +.driver-popover-footer button:focus { + background: #174ed1; + color: #fff; +} + +.driver-popover-description { + padding: 5px 15px; + border-bottom-left-radius: 5px; + border-bottom-right-radius: 5px; +} + +.driver-popover-title[style*="block"] + .driver-popover-description { + margin: 0; +} +.driver-popover-progress-text { + color: #fff; +} + +.driver-popover-footer button { + background: #174ed1; + border: 2px #174ed1 dashed; + color: #fff; + border-radius: 50%; + text-shadow: none; +} +.driver-popover-close-btn:hover, +.driver-popover-close-btn:focus { + color: #fff; +} +.driver-popover-navigation-btns button + button { + margin-left: 10px; +} diff --git a/HybridRAG/ui/svelte/src/lib/assets/upload/deleteIcon.svelte b/HybridRAG/ui/svelte/src/lib/assets/upload/deleteIcon.svelte new file mode 100644 index 0000000000..2ca57ed8aa --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/assets/upload/deleteIcon.svelte @@ -0,0 +1,22 @@ + + + diff --git a/HybridRAG/ui/svelte/src/lib/assets/upload/loading-button.svelte b/HybridRAG/ui/svelte/src/lib/assets/upload/loading-button.svelte new file mode 100644 index 0000000000..6310d81f6c --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/assets/upload/loading-button.svelte @@ -0,0 +1,25 @@ + + + + + + diff --git a/HybridRAG/ui/svelte/src/lib/assets/upload/next.svelte b/HybridRAG/ui/svelte/src/lib/assets/upload/next.svelte new file mode 100644 index 0000000000..70f4fe25e8 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/assets/upload/next.svelte @@ -0,0 +1,31 @@ + + + diff --git a/HybridRAG/ui/svelte/src/lib/assets/upload/no-file.svelte b/HybridRAG/ui/svelte/src/lib/assets/upload/no-file.svelte new file mode 100644 index 0000000000..f89f7aafbb --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/assets/upload/no-file.svelte @@ -0,0 +1,37 @@ + + + diff --git a/HybridRAG/ui/svelte/src/lib/assets/upload/previous.svelte b/HybridRAG/ui/svelte/src/lib/assets/upload/previous.svelte new file mode 100644 index 0000000000..c47d9c49da --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/assets/upload/previous.svelte @@ -0,0 +1,31 @@ + + + diff --git a/HybridRAG/ui/svelte/src/lib/assets/voice/svg/paste.svg b/HybridRAG/ui/svelte/src/lib/assets/voice/svg/paste.svg new file mode 100644 index 0000000000..8910f0ea64 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/assets/voice/svg/paste.svg @@ -0,0 +1 @@ + diff --git a/HybridRAG/ui/svelte/src/lib/assets/voice/svg/uploadFile.svg b/HybridRAG/ui/svelte/src/lib/assets/voice/svg/uploadFile.svg new file mode 100644 index 0000000000..9a77286a8f --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/assets/voice/svg/uploadFile.svg @@ -0,0 +1 @@ + diff --git a/HybridRAG/ui/svelte/src/lib/modules/chat/ChatMessage.svelte b/HybridRAG/ui/svelte/src/lib/modules/chat/ChatMessage.svelte new file mode 100644 index 0000000000..b136997083 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/modules/chat/ChatMessage.svelte @@ -0,0 +1,70 @@ + + + + +
+
+ +
+
+
+

+ {@html msg.content} +

+
+
+
+{#if time} +
+ { + dispatch("scrollTop"); + }} + /> +
+{/if} + + diff --git a/HybridRAG/ui/svelte/src/lib/modules/chat/MessageAvatar.svelte b/HybridRAG/ui/svelte/src/lib/modules/chat/MessageAvatar.svelte new file mode 100644 index 0000000000..0f6a24b96d --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/modules/chat/MessageAvatar.svelte @@ -0,0 +1,30 @@ + + + + +{#if role === MessageRole.User} + +{:else} + +{/if} diff --git a/HybridRAG/ui/svelte/src/lib/modules/chat/MessageTimer.svelte b/HybridRAG/ui/svelte/src/lib/modules/chat/MessageTimer.svelte new file mode 100644 index 0000000000..0f441226db --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/modules/chat/MessageTimer.svelte @@ -0,0 +1,67 @@ + + + + +
+
+
+ + { + dispatch("handleTop"); + }} + > +
+
+
+ +
+
+ End to End Time: +

{time}s

+
+
+
+
diff --git a/HybridRAG/ui/svelte/src/lib/modules/frame/Layout.svelte b/HybridRAG/ui/svelte/src/lib/modules/frame/Layout.svelte new file mode 100644 index 0000000000..0c5b997d28 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/modules/frame/Layout.svelte @@ -0,0 +1,48 @@ + + + + +
+
+
+ + + +
+
+
diff --git a/HybridRAG/ui/svelte/src/lib/network/chat/Network.ts b/HybridRAG/ui/svelte/src/lib/network/chat/Network.ts new file mode 100644 index 0000000000..060c5a5ffb --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/network/chat/Network.ts @@ -0,0 +1,41 @@ +// Copyright (c) 2024 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import { env } from "$env/dynamic/public"; +import { SSE } from "sse.js"; + +const CHAT_BASE_URL = env.CHAT_BASE_URL; +const MODEL_ID = env.MODEL_ID; + +export async function fetchTextStream(query: string) { + let payload = {}; + let url = ""; + let modelId = "meta-llama/Meta-Llama-3-8B-Instruct"; + + if (MODEL_ID) { + modelId = MODEL_ID; + } + + payload = { + model: `${modelId}`, + messages: query, + }; + url = `${CHAT_BASE_URL}`; + console.log("fetchTextStream", url); + + return new SSE(url, { + headers: { "Content-Type": "application/json" }, + payload: JSON.stringify(payload), + }); +} diff --git a/HybridRAG/ui/svelte/src/lib/network/upload/Network.ts b/HybridRAG/ui/svelte/src/lib/network/upload/Network.ts new file mode 100644 index 0000000000..a9c76462a5 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/network/upload/Network.ts @@ -0,0 +1,82 @@ +// Copyright (c) 2024 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import { env } from "$env/dynamic/public"; + +const UPLOAD_FILE_BASE_URL = env.UPLOAD_FILE_BASE_URL; +const GET_FILE = env.GET_FILE; +const DELETE_FILE = env.DELETE_FILE; + +async function fetchFunc(url, init) { + try { + const response = await fetch(url, init); + if (!response.ok) throw response.status; + + return await response.json(); + } catch (error) { + console.error("network error: ", error); + + return undefined; + } +} + +export async function fetchKnowledgeBaseId(file: Blob, fileName: string) { + const url = `${UPLOAD_FILE_BASE_URL}`; + const formData = new FormData(); + formData.append("files", file, fileName); + const init: RequestInit = { + method: "POST", + body: formData, + }; + + return fetchFunc(url, init); +} + +export async function fetchKnowledgeBaseIdByPaste(pasteUrlList: any) { + const url = `${UPLOAD_FILE_BASE_URL}`; + const formData = new FormData(); + formData.append("link_list", JSON.stringify(pasteUrlList)); + const init: RequestInit = { + method: "POST", + body: formData, + }; + + return fetchFunc(url, init); +} + +export async function fetchAllFile() { + const url = `${GET_FILE}`; + const init: RequestInit = { + method: "POST", + headers: { "Content-Type": "application/json" }, + }; + + return fetchFunc(url, init); +} + +export async function deleteFiles(path) { + const UploadKnowledge_URL = DELETE_FILE; + + const data = { + file_path: path, + }; + + const init: RequestInit = { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(data), + }; + + return fetchFunc(UploadKnowledge_URL, init); +} diff --git a/HybridRAG/ui/svelte/src/lib/shared/Utils.ts b/HybridRAG/ui/svelte/src/lib/shared/Utils.ts new file mode 100644 index 0000000000..fb182cef67 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/shared/Utils.ts @@ -0,0 +1,54 @@ +// Copyright (c) 2024 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +export function scrollToBottom(scrollToDiv: HTMLElement) { + if (scrollToDiv) { + setTimeout( + () => + scrollToDiv.scroll({ + behavior: "auto", + top: scrollToDiv.scrollHeight, + }), + 100, + ); + } +} + +export function scrollToTop(scrollToDiv: HTMLElement) { + if (scrollToDiv) { + setTimeout( + () => + scrollToDiv.scroll({ + behavior: "auto", + top: 0, + }), + 100, + ); + } +} + +export function getCurrentTimeStamp() { + return Math.floor(new Date().getTime()); +} + +export function fromTimeStampToTime(timeStamp: number) { + return new Date(timeStamp * 1000).toTimeString().slice(0, 8); +} + +export function formatTime(seconds) { + const hours = String(Math.floor(seconds / 3600)).padStart(2, "0"); + const minutes = String(Math.floor((seconds % 3600) / 60)).padStart(2, "0"); + const remainingSeconds = String(seconds % 60).padStart(2, "0"); + return `${hours}:${minutes}:${remainingSeconds}`; +} diff --git a/HybridRAG/ui/svelte/src/lib/shared/components/chat/gallery.svelte b/HybridRAG/ui/svelte/src/lib/shared/components/chat/gallery.svelte new file mode 100644 index 0000000000..a89e857b6b --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/shared/components/chat/gallery.svelte @@ -0,0 +1,156 @@ + + + + + + + diff --git a/HybridRAG/ui/svelte/src/lib/shared/components/doc_management/docCard.svelte b/HybridRAG/ui/svelte/src/lib/shared/components/doc_management/docCard.svelte new file mode 100644 index 0000000000..451ee8e3ca --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/shared/components/doc_management/docCard.svelte @@ -0,0 +1,150 @@ + + + + + +
+ +
+ + +
+

Confirm file deletion?

+ + +
+
+ +
+ {#each files as file, index} +
+ {#if file.type === "File"} +
+ +
+

+ {file.name} +

+ {:else if file.type === "Directory" && file.id === "uploaded_links"} + + {:else} + + {/if} + + + +
+ {/each} +
diff --git a/HybridRAG/ui/svelte/src/lib/shared/components/doc_management/treeView/svelte-tree.svelte b/HybridRAG/ui/svelte/src/lib/shared/components/doc_management/treeView/svelte-tree.svelte new file mode 100644 index 0000000000..66dae55bef --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/shared/components/doc_management/treeView/svelte-tree.svelte @@ -0,0 +1,35 @@ + + + + +
+ {#if data && data.length > 0} +
    + +
+ {:else} +

Folder is empty. Please upload a file.

+ {/if} +
diff --git a/HybridRAG/ui/svelte/src/lib/shared/components/doc_management/treeView/tree-branch.svelte b/HybridRAG/ui/svelte/src/lib/shared/components/doc_management/treeView/tree-branch.svelte new file mode 100644 index 0000000000..27e9d276b9 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/shared/components/doc_management/treeView/tree-branch.svelte @@ -0,0 +1,46 @@ + + + + +{#if data && data.length > 0} + {#each data as item} + + {/each} +{:else} +

Folder is empty. Please upload a file.

+{/if} diff --git a/HybridRAG/ui/svelte/src/lib/shared/components/doc_management/treeView/tree-node.svelte b/HybridRAG/ui/svelte/src/lib/shared/components/doc_management/treeView/tree-node.svelte new file mode 100644 index 0000000000..eac311df83 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/shared/components/doc_management/treeView/tree-node.svelte @@ -0,0 +1,111 @@ + + + + +
  • +
    + + {#if node.type === "Directory"} + {#if open} + + + + + {:else} + + + + + {/if} + {#if node.id === "uploaded_links"} + + {:else} + + {/if} + {:else} + + {/if} + + + {node?.name} +
    + + {#if open && node.type === "Directory"} +
      + {#each node.children as child} + + {/each} +
    + {/if} +
  • diff --git a/HybridRAG/ui/svelte/src/lib/shared/components/loading/Loading.svelte b/HybridRAG/ui/svelte/src/lib/shared/components/loading/Loading.svelte new file mode 100644 index 0000000000..51e89cfe7e --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/shared/components/loading/Loading.svelte @@ -0,0 +1,48 @@ + + +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    diff --git a/HybridRAG/ui/svelte/src/lib/shared/components/loading/Spinner.svelte b/HybridRAG/ui/svelte/src/lib/shared/components/loading/Spinner.svelte new file mode 100644 index 0000000000..1b0a086ad1 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/shared/components/loading/Spinner.svelte @@ -0,0 +1,68 @@ + + + + +
    + + + +
    + + diff --git a/HybridRAG/ui/svelte/src/lib/shared/components/scrollbar/Scrollbar.svelte b/HybridRAG/ui/svelte/src/lib/shared/components/scrollbar/Scrollbar.svelte new file mode 100644 index 0000000000..f18e23e690 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/shared/components/scrollbar/Scrollbar.svelte @@ -0,0 +1,48 @@ + + + + +
    + +
    + +
    +
    +
    + + diff --git a/HybridRAG/ui/svelte/src/lib/shared/components/upload/PasteKnowledge.svelte b/HybridRAG/ui/svelte/src/lib/shared/components/upload/PasteKnowledge.svelte new file mode 100644 index 0000000000..d0758e770c --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/shared/components/upload/PasteKnowledge.svelte @@ -0,0 +1,52 @@ + + + + + diff --git a/HybridRAG/ui/svelte/src/lib/shared/components/upload/upload-knowledge.svelte b/HybridRAG/ui/svelte/src/lib/shared/components/upload/upload-knowledge.svelte new file mode 100644 index 0000000000..09a22a6ba7 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/shared/components/upload/upload-knowledge.svelte @@ -0,0 +1,49 @@ + + + + +
    + +
    diff --git a/HybridRAG/ui/svelte/src/lib/shared/components/upload/uploadFile.svelte b/HybridRAG/ui/svelte/src/lib/shared/components/upload/uploadFile.svelte new file mode 100644 index 0000000000..541e115599 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/shared/components/upload/uploadFile.svelte @@ -0,0 +1,184 @@ + + + + +
    + +
    + + +
    +
    + Data Source +
    + (hidden6 = true)} + class="mb-4 dark:text-white" + /> +
    +

    + Please upload your local file or paste a remote file link, and Chat will + respond based on the content of the uploaded file. +

    + + + + Upload File + + + + Paste Link + + + + {#if uploading} +
    + +
    + {/if} + + {#if files.length > 0} + + {:else} +
    + +

    No files uploaded

    +
    + {/if} +
    diff --git a/HybridRAG/ui/svelte/src/lib/shared/constant/Interface.ts b/HybridRAG/ui/svelte/src/lib/shared/constant/Interface.ts new file mode 100644 index 0000000000..221f17a263 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/shared/constant/Interface.ts @@ -0,0 +1,47 @@ +// Copyright (c) 2024 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +export enum MessageRole { + Assistant, + User, +} + +export enum MessageType { + Text, + SingleAudio, + AudioList, + SingleImage, + ImageList, + singleVideo, +} + +type Map = T extends MessageType.Text | MessageType.SingleAudio + ? string + : T extends MessageType.AudioList + ? string[] + : T extends MessageType.SingleImage + ? { imgSrc: string; imgId: string } + : { imgSrc: string; imgId: string }[]; + +export interface Message { + role: MessageRole; + type: MessageType; + content: Map; + time: number; +} + +export enum LOCAL_STORAGE_KEY { + STORAGE_CHAT_KEY = "chatMessages", + STORAGE_TIME_KEY = "initTime", +} diff --git a/HybridRAG/ui/svelte/src/lib/shared/stores/common/Store.ts b/HybridRAG/ui/svelte/src/lib/shared/stores/common/Store.ts new file mode 100644 index 0000000000..7316e803a1 --- /dev/null +++ b/HybridRAG/ui/svelte/src/lib/shared/stores/common/Store.ts @@ -0,0 +1,41 @@ +// Copyright (c) 2024 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import { writable } from "svelte/store"; + +export let open = writable(true); + +export let knowledgeAccess = writable(true); + +export let showTemplate = writable(false); + +export let showSidePage = writable(false); + +export let droppedObj = writable({}); + +export let isLoading = writable(false); + +export let newUploadNum = writable(0); + +export let ifStoreMsg = writable(true); + +export const resetControl = writable(false); + +export const knowledge1 = writable<{ + id: string; +}>(); + +export const knowledgeName = writable(""); + +export const storageFiles = writable([]); diff --git a/HybridRAG/ui/svelte/src/routes/+layout.svelte b/HybridRAG/ui/svelte/src/routes/+layout.svelte new file mode 100644 index 0000000000..8141177d4a --- /dev/null +++ b/HybridRAG/ui/svelte/src/routes/+layout.svelte @@ -0,0 +1,48 @@ + + + + + + +
    + +
    + +
    +
    diff --git a/HybridRAG/ui/svelte/src/routes/+page.svelte b/HybridRAG/ui/svelte/src/routes/+page.svelte new file mode 100644 index 0000000000..bac35fa18c --- /dev/null +++ b/HybridRAG/ui/svelte/src/routes/+page.svelte @@ -0,0 +1,318 @@ + + + + + +
    +
    +
    +

    HybridRAGQnA

    + +
    +
    +
    +
    + { + if (event.key === "Enter" && !event.shiftKey && query) { + event.preventDefault(); + handleTextSubmit(); + } + }} + /> + +
    +
    +
    + + + {#if Array.isArray(chatMessages) && chatMessages.length > 0 && !loading} +
    +
    + +
    +
    + {/if} + + +
    + + {#each chatMessages as message, i} + handleTop()} + msg={message} + time={i === 0 || (message.time > 0 && message.time < 100) + ? message.time + : ""} + /> + {/each} + + + {#if loading} + + {/if} +
    + +
    +
    + + diff --git a/HybridRAG/ui/svelte/src/routes/+page.ts b/HybridRAG/ui/svelte/src/routes/+page.ts new file mode 100644 index 0000000000..f4de8d6760 --- /dev/null +++ b/HybridRAG/ui/svelte/src/routes/+page.ts @@ -0,0 +1,26 @@ +// Copyright (c) 2024 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import { browser } from "$app/environment"; +import { LOCAL_STORAGE_KEY } from "$lib/shared/constant/Interface"; + +export const load = async () => { + if (browser) { + const chat = localStorage.getItem(LOCAL_STORAGE_KEY.STORAGE_CHAT_KEY); + + return { + chatMsg: JSON.parse(chat || "[]"), + }; + } +}; diff --git a/HybridRAG/ui/svelte/static/favicon.png b/HybridRAG/ui/svelte/static/favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..75b997f8156c09d1c72026d98c80d0227210da38 GIT binary patch literal 70954 zcmce-byQSs+cyjd0wN))ASsGUcc-M%4MV7OH$#Vll%#~TfC5Sh12Z&8=g>n8ol-;R z5bs8>>$&gseb2w|>)Nu`-m~{UkMoG%kvl?NRe|sh^&Jch3_>MESxpQKtj{<9@vwj^ z`dai^7#Iu~O0v@0UYJ{%xL!1J=hx#8gX2hKSau&VJ2D0{JKCa@oa3hxq=MRr9zWa` zs=ju42%g3SSF*atP=-IQVP?c9F`Osj=ug<@v&Fl7s=yMGmtSpja@|)y>;*}%=zgwy zwlVZ5R<{nqn{BWIo=XMK#t&1cEuCZ<&LtYoedP!|f0rL&Wnc#qDJoWdmG{C=7dchr zu&tvD#c?9Rz`QvW?_(tx;w*6xGOB$hxJQBc=i+^8Og!Pc?*H5pZiYp&FXULCBVM=g z^v|_mStbm|u(j6ihJAWUE$CK(}T)tTgI7m9N8J1n9y&?`}BWiC*!oa&5jS=!thUm}F7+5aK7&3I7 z(Sns$f)8>2T#Hi#9wBl4|7=m#Cs)?^h9nH^5Aql?M2xt9_<<2D^aO)3cBzAA3PKZu z^_OwX|H~Fln8Y=5IsQ`h;?e)YBf{-TLWSTz6f%&nWr+Uo;4)V2m7Dx+0wZ6?VrWO1w`4C z>2~q+g~r)ImstNY?6cimz-te{@l1|(#5-x z&P!eV;Hck*upjvQ{fPZLpoS@q)V~eNdS+t;|J_Vx&?M^T1k$cJ<~hhJsVc8$*2K;Z zqAxn?LypfL>%^#LXPs5uoHjX_pfM*_p7Yo(hrMc6V6S+!7NN3R|5t-Cf~`plD@524 zF}5Jzq_H>UVBJZi9d+i|u>hVhZ`EbJ7ESUu{>uDJyIy;1r83wz4*N{!4^@uq0{2D; z@&8o-tl+c`B8bPW%)*`?nJ6CoFgcV$%X-0~4@mxf=24O0gccQ4)o9S~#X){x6YQW{ zP@NARM&7#%TouSs(3Y$AR|0stvwu-bpk^kDghOv*Vlo> zLKZlj|1QcCY+|5cUMe-v)BwpJGO$2)RT}X(Jv3Wx^zdKA@|E|uGkjrp-jZK=*56d^ z`#!h&c7Q^Sv5UxGBmBxRU(RS;Q7m?Vpi0sW^R0ghroCO23}Hs9fO3b1grFI!aI`fa!~?wT!=g&53$ z-|kB7gD6=OY}MO|*ObzZQvQ7^A5ZLZb#v&s`U4*C>)!tSt4LDHe=eA9ypw(N z*LdIku{~UH`|H0r7yS7TV0Dh-M6~?H2W*8K^Cm_`{`$vuxaFEbT{ehnJR-7ErDYvm zmeu%9?UB5HmW+@BZjTbXS86tjc|%n~@(sLXRZ>Yl|AR4qerHq**}U&@qm{v5AKw^1 z%XWKg?scK295S*t79~9Z-lF8cmpop z!zLGnLN_8~#sUC2LKQNqpKG_S1_xcc#c6UG3M<$SAl1J= z8ADy^IZXaB74^j5cf70FmV)<{OuN`zk86GkSIs6HIspXrTmN*ku^Oh?sGF6SmxLVA z7(U_nY2tTs{@Yl-kK78;1VZHPV&7}1T~Pi9n3+j*qNWQ{X*!|-Y)U0C8216Fl=t6k zJAeG(sN_3g^skK*DdIZ-Y60-IPM{C=H+*pra=zM&a(QabLL46$6y^h+IJV4Cd0$JTMw&WtCcvVX*VTGvQCVw z3SKW}RcX)^C)+qrKkki;>K0<#$G*&#t@@)MPqFZyEF0C+Y<(w4AE7i%O$WUf_h;nI zYu&e;6yO8i+G9TJ);qk4Ec5xf9Y$v{KARQZ2u5vAR@I?Wk_0&k@4ru@)tIL8qOph+vCS(}5@6SNY6J=sPH*+#mW2-3%B z`&MA!K|wkc_3=0QLy76N&8FVm*7`m8#x#SK>E1tn&=pIAVjtgXis?K{sLZA%HJWUd zy#_JmeJ1WeHPeX7K#pHuI9(gMZ`L|D4AdQk5?n!#>ycMS2Tf&?J5JYGftL`;l>>W4&{vIXiS4Y{#^=rb^ zy!GJi?HZd+)ohu7-l)2FK68hQ9NA|#F9ax`uAe|oq!NO1|2$Ubo4FIG0DC&^uhkkK7+BDNOTwlLA3Y$ z^$aq{pvOHOP!>L6M`%93K1EM%lL3qNmT3>Yi~D)kjUhzrj!J~qpB2PN!(TDd{|^4rvvH`uj4C~P$`=8P z`{b0SzQV{{A4)Gn7nXW{FWev{2vUv+MAC_+Cctz z_%%}}gfethx7z_U12C!)ukBkLv2{EW$u1xue>Ixk$|YawY5VB0uAQ6udin`CzV#4t zdR`!7b0Z~&H0)f~<@(KZk>(d?%#3Bwsu9p{5ne9VW-^3X+DbIqTXVt`=og#%l^7=U?&QZ8bDy%ZOKiN6N-t2jfZsVMAk{RvgVNr6zZ z@C)N|4r$xoU*%>5pfwC5mi`VSaS}t!--rh<7zh}#y)M*&o3a0W-wnnxHZO(AbiH^S zzVuhKWEe0Qi81LU$z|xCOFtz3qa`E&*m~pbp5~D3tK%xi=PJs?fEZ}K~nD#p%y1(8E8wi$3=J#Iwzag>ikLB3)gCLDwKTY{-hW3|%OQD9k z|Li5s8Q64=z;}Aa6NK3{$Z@K6WQ_<`U%i=MBHA>$cBiWNU%D6m$UswNT;!R{&nH6L zfvt#4K&{heZRNt^!lo4e-bb1OFm@DrWD{RxA+%emzDIMiQ}n<7j>|E-A%_ua!x~w4TE`y}r5oMCHvMC6!axM^-ufhfF}750>+P*Q zbRAUg`zuL2p$UpVFL|vGyd(_A*O^5}QeaoYQ$Vb2_P&wl#NViu;m@udl~0`cX{q>y zkM8!?MbT6p+>XY@{Tm<2CIN!}QSHL>j|0Zz1dPJVn@RAW^YG`uxwgxa`M(~<1lZZ` zzO3WF=Q_Z7;bKSk4G(YNp&k(I!b-&Szvn-IbB*@sPk)o4ASo|kKdrrFHUFM#1Lt{> zG%UA*+Zd!aPr`NlNb@ucaq!xn0SjjcJd7m7YE{FKk+^UOA`J@4mFjY#04T?n#i*bq zd9sC#rNIJZY$C21B-o1?806w`8j^N3AS>OMwbK~I#m<$&Fr>rj62ig5Rs@Wg;pvSN zZ^n~qaiI;8Dh9&IPQ$1#Vhkj8*oy5I24~Bk28>E4*jZlQ#!`~Qkjc{SkM4OVbsz8W z@GvP!ryh+8Y>A_)00K9>RDf;fAzc{Qq;XPZeYHd1-*TLohO(HCH_crc^)#4k6n)wm zEP9GYuJVZf-w zN1uwF#Hk9j8nBTRP3S}psP=N}TRJO^AYxK5S)fU5ajGiYAKg&fwRNw#WOJim5r9zH zNPt zS6Kb4c=YMQ9>_QGA@<@&;2m)4yX`o^U1akOi=#Dg+$M_}CsVy_cc?%7M?b`YR!6sB z5=gRc{onuaZSG0QmDadm1eYv&=fJ$Ox;?+n6-z3?Oq*lGIS=5ZD_J>bF zg4uNykl`s4GY@UR>k9xW>eAsV(Pb-=!{qW9RqCH*V{HU~leA=u6>JSCjY1S3{1%pr zY*78+Om3;)?ilRR_31@MQkg+ohK^BtziwFjYB98Gd@3X9m8r;H3?X}_)BgEUk5P+7 z?2k^3ZyMSp;aY%5gdBv(X|S1I21S1)*u0p=Md)zMXwKA6_GwRRIM~4^GAJ$MA!Tl9 z(8Q^!&TGrDle!ZZ1(=D5K<>(mRloq>VT2fckpdF7W|g4Hkx~gqojTnMyOFPTu#BN8 zM;mkT4dz)LT>(v#_U--W$c_z*bk`jZiMlft$Cx_u2W|t-?7>op0DRIrm1$V3#|v@69>BjN6%MzW-&9ukh7%3bXb`e8g#M+@%sL} z2Onz>uu!^dhj8i}`;N<|&X&{l^wk;~9=A;gN>W4{dMtaT6)?TYOy+G$yRdIV+~9TH znHL~VfBW7n4$BxV`|90ff*VLU1)%#R0*6ML;X8g@qUmK9WW@3EWZEL#wq!e5o|>v) z_J}Bdk&Y2;SMT888d=hLy}n07Z>kW+!Y{2&hn;(4AxQa~PpfSx&VOx(dzk_uv~e&+Y%0sVxfhhl z&>b2V<-P1@hWj0;5*3$&`-~fpCW`2N*P?dBFwkzv_Zr4-;RH%Ed;|dGO-Mws6vGu> z7uzvGRQr?LPyn`;An+F4O#c2%<@A{kLeVjG?5;`1@IH6Cm42nA-v@?*{XW)iNAK+d zrjZS%`uAb1eiw`~5z-7D0QQ>I2dj&bgfnA4DYajBxpMVQck)6@rUsh6akN?6-8G6pm^aP$e^3lbrhAscr1zmNq7xqNQ8e;)n+rsHpx5YiQN?QN zwd7_Xedu9I805e}@&qt=u3%XK1`;rK^~nS81m0)hXX7Q4Z}s~qr59N9+P^~)~WF+V~r zp8kH%uy665##Zv)xc!;{LqN&ySv9V`hct9Gy=Q4Hon79>T=H}^PvqcAF9C1(l`z)! zJqCSk{^L&LnFjHLv}+w(yKH2=!TssJxqh<#=*emO8eUVU0f{sYu$!3`KkG>*JKhKQ*Vyf~U zHDq5-Q}rk(e&rRCp?n;W?J1!tQBdP%x%6R9Vo$!TNX_e~uda>@Xzg=ivQ!Gi51zY# zb+ls87GMWYFw8H0>M}0dxj_A#lN2vl`L^*yz|<(blEhV~pjI-<=+QF2z4FW(sPGya z()FVMx^`h5e3oF{&=;K9P|YeDJ)$L5GZFMoGe~OsCPpy+l+(;WB7nt{EgNuY8}?@J zz%5O{z@7*NS=HUN1zkUX7Qp5j6)pVh`yZu^Hebnfa-nU!xOf6 z876i%50$qJj0H#9pHpXle{QIR9MW@MYz*nurV7t)BAfgi5QU{-l$Ue5*} z$i{uTLFkSO6BxNTovC6L9#vNSB8wF!-_`G_P*pNh|229*%`V++C&R*n&8YSg?FdvN zIqpn6H=0CKIYbDnKPACy69N|aC`b|s=q=N3>ny5VPh8l_#;PQ#|JpKh0~)=-?fNRu zcQ<{rl2cF+HCIaMo3pZ9XJX+|(I@V>T>yg?%pHDQ$}6j}J-ENqN7vc6ugK56uR;n~ z8?%kIHt%iXXux~F8A|hir@>Fc4bB08hdAkB%&p*R#%8^H zN$646tC{QCCo@Qsnfe;UYO(#a;?Q*UdcDS@|~7YQP93mx0+d3k!L6F$iYI+w+aA{S*emb0|_24W^53|+tmJ&dmM#otPMl1@j3jc zfF$+&pcYJZu>bbS7F5t)`|P6(M};f8S+HWF^dQ+IYp$YdyERj6m`2Mb#pcIf<~uD&Vs{i+3XH> zL3urY)1Cf&B2RE&T@OtuG_~JNv=?eXF&KSR$pR+JIcDH;zX>TJt?>*bEZEg=d;G~a zFAg{xYf+|7AVZjeJ$&jhtv#ie7EM1+**=>~K8k|4*sfuYLX}q&#lEbAh~Z6kOg$Cs zl)M`Ig;^o(s#YeQK_bU3&5(Sf(vG0r$dkjWOoO%}JWc7AeJ}{Ek_!JFg00_TQ_@Prvs)g$c7Cdm6H~x1(o%$M5^E#9VO4sQ$O&d{u+)9T2>fl8 zD+Te(E~R#*AKe_VT`L?)$ILa(vVuOgzfbLfSJk>kIrm@grT2A>S z4)AiPR~mWRUvn*OASPeIe&TV}yMglN7VQepw+-M%=7mb1w~1yS%vbV3Gt1py&5Cof znV!511RY8SR@2L#2g(5byk;sOD&l-TaU09!Wl)h2yON9yO4Hl`wfV^yEQaps!yO;z z1M3>l@*iJyFY^8BTl(D5QX8t#CFiVLl?=hyg2vdU15vtx!<@E*os(X)kVwhTN^>TbdRv;|)bNxzoF zQoc3W^0xR@+ABsH+7#cw38Y&;nNx=4_UBf;hp;%}8rzRXC5wH(`bPxmT;HjQAx(Rq zskwUcf{<*{4r5UG#wxrIglIV51K**4@MV{u&n_k`7g?5k3ba(eFk&!{WBk%133tLO zHqEqSXjgIT?%7Gt&h#0ds{ID~Q4FW{^;LPq+g7ygkXx4?GCe{_C&38zZ4wn1t`o)h zY*kkR8Ae#z*cL$@EkkW=IblxcNG*8w@wSdf0mPzL$8@RASlx#t{2>6l%M(miB;f*> z60uPyMF)>H&E*Ulx8Ju0*7+R|_m8Rg*`_9We=O25>JnDltyx;A=n@~9sFZPZDk>MT zEg=v6&>mg_^?=*VWOy%W!J2+%K03Hdcvy#$OJ|1T0@)D|iYr8i=DrIG#6sm*P*VX- z1U)tI%wh~FGRiH*VZY7DP5)_i?GHQSZz~Jru5@&8+NOp`-0Ymxm16J=LWz5}$UznH zVsJtixmE1wUlTA!BPxMT)=LmYqRTh}a4<>c0cfixkzyFatEO$D`1b8`We?_*Hqs+9 z)=s@(WNM&DIpF%tzMb5VPmU~&y^Y&dCH#JTy1ufUTdi=ffW_>!=88k_vIO-474JG} zNw7LS$$!cp_XV(&n~bM5KE}oeUbm`^i$hB4k1QxB{H|wR)h_tUGm~k+)`>c9+CNQk zd!pNYmKcfg-W~lr3oywqIyxaZ#vXV%VV4HIKG8xs&E0xT4~@cSiD3nhv)Yz}oD%ys z<2;>lGQ~ga#M^k#t5^To;RhN1UPT^;*ata-ocXvI$mNGe(|Ds zvauNB&c}==GpEy0AHtvrKJf7X!P-I13CN|ssmUJ*%Rahly70`gi%BR??vE2Se%T9@ z^pqo}g=1sw-8A9{O2R7M2Ss9-(ub-{7!K`qw1G@Z?cR)30qr-gcVqZER87laEWcRep#uRWiU|5njaVg~YwdH}T>;$GjvYh%Tvyk)t`VoK+; zzL-kfq(44vGBeR}F{ab&K7YPCX}cv0?mg4UVUNn4I81bpL3 z=hlkTL-1t7D`@?LGL>K;i6FlzKjG07U+*h=Mbm%05n+gy1 zY=9O`Z|G|UbpW8dne@WK_u90G{jP$Maj{(aeJDqzCVCVGx9 zaH(uemo{ZG0%YV9sW)A2@6`(UDAJ^MznnHp$@-G$thi>q{PA~cx+I)nOn@o481MTT zNP4lS100lFl-khbda+y7fit`vVIp>Erxj*EC1Mu-zJcqzE&+>Rcq^4w!<-FAvlZ5y zjN@ulj?SL-F2A2c<&@6O=N*%#P4rAw{`}DRWGz=#J3?G?o)eFm7r+S~n8XHM3U-?& z)3kJEdcM8Oy_KPqfX}XIXqqKdQ7LImW3umdm)xm9Roua$^Bv*MffMRy;4r(DhE2-! zJm`>jZ3Fwt?Zx{u zbtkjOEirF5ld1C;;f9-*5>K`dk4af5cCwQxE5&KQOm15VLP>=;k$QGDRWJJF&FpIV zM4oTiyQHM$_n%Y-?#~8FYM-{=+0?Qk=271ZH?LJ zx|Y!T9(jqU^FRnqQRs;Jyg#gDMmLm97=b7i!J|dT`!nBAZ{iBpCSb6VTv~usYtecpfYxkBxbXs z_+nzrV?A$rz2u0K3UvO_k8%k_4!5YSYQ&FhfSkPaeyO}$FToNV0V9orzb zpcb1oGz^S0^p)%q4U9S~B$bQ|PgpuGM7vWvn2R-z(R_!YJRu_ z3~WHsMJ&auJ>M)fogBsm)fMC}yaOt#HzNTty}%4mG=|NC>*=$l#`31XdY~qUWJWMk zY)0{x2`*I8T_{3*v_+|HqWT-Ss6%61VsR#bf{ZF;Mq_4L_;U7`YW6e(mYf2iDflIU zr&`bdtcL(A=caKaL)`hUnTurp{ZfNpM)eMK&9oQY>1I{Q1u|kEV@gZ+A=Ek|;{Hr2 zgQ6p!zqL#r3?H6=C;OHQm%w3?*C^jK(K%SQZJl}J{kq>p_dk62^-TKpXTSi%@WoUK zuoy|=D8A{%OHQ7(;x~aDZMF*`<-yKQ4l$1?^oWz~#OC{=w>_XS&wYI}8)n?sB(EwpKTmZ7&O^F+tJcKfwWB* zNT+iZ5S{(KzUcujag#o=7RJd|tL~5wm1+hW1vRE(r}wbm#s_USlzAFg^l3oecG^p5 zP1}2DPfhv#@DjUIlW&*l3xSY$Sb_lUaD)0EjMgDI=8m&+xfC!F;tN??10&@VCQr%! z%kP9_1{J9Zqk{?aMYGBR4=oN71fu$XC0|v>$NDt&(u7)5MCG^T1Nqwb%Dp$!(xB~a zKTF|sv?J8<^T@P8W1GuTnw}>LrCDg2$+${3JYPX~TtxVw;}(EqVwI#lcv#P|dIcH+ zPLq16v#y-R5{~R$$cyq2t67%$)KpfVev0n|c+8m!nK_E$lxl#MQJwby2BQkAhFc@^ z9M@lsILq zE!TFMTpR}aaVG@`3Z}GBzH24GPPi$ge@~-Hie|x5j;}U@zFPl80V_0xocAy7cxKe? zTfBNiLnSC!yHv#uLM$y&j?19LDWzmMl3gA#mf4|31s$%BHo(4UJWXQuet6?N4we#7 z-p?{8jM?a*z~p|D)}Nl7#My+_I3M#lc(l`BRmKaV_4DVYG@7xucol-rSP=C5!tTOg zdA1c(rpqRh8f|Z11Qlj4Df?KnP-SI$5u_WlpeB+v8&z#<0R%YzsRM?SI&;t2J6iNa z?x#>eX{cdvSy9HIU}> zMZ(S5DP-%+xIr1t`Pd^Lex`%_z~oKr&^VO)zou-H<_HJv{G}OgYtFxO}B-wE`r;8(UD?csb zNf#p~7QQOLl6k_98|{Lajnmby;+rNK3bs~RSalmkkO-ixPP<WHVX!sH z?tq`!TKFGD*0+} zCEhvGWT~H?CRG?}2$`l`jFzjia|f*{Pi=2|2l{V}7|)TaolIGCkHVh<(ewYby08_c z?#DdWV;^>1NiGmotp#sS#&Wr3l=ge(Ha+<}_Oew?C{= zNq0pVSOx6vm1a4@#%T`TP(gNmZ`l(8ZA%wml8p|m@&Sv53G<0?vEW3>iao+$2Wh$` zay6jVZwd`XX7(NOA#q*b&{+F2O{`Gk7nJd(XLxT3R=Lqjt)A)v9ln4yw<1_z8yBhO z%Wl7+ec5_vb(a5ey&qCB+PU5cRL9X6OpeUxos6$3$JPrKqzYT|4s3TEr^y$jD!4s* z1adF%wak3{&ffQg-g(8+?z1g=B0WO~sw)41ICWf~7P1Yps%msU)G3<1cBKmBN${P~ zM(gQ3SncRomuA2Ma<+Rxk!;co)>!5-b=8Hu1H~0d1Dn^wo(NX~4#)EHT~21?azR7W zRK%HO_v2+hlmYIDYdQom#1_IPb z6&W|Rv^5mgT`Y(1mN~Y9Is#RC>;|S*NIt7cM4nSX4|Ijrv-@Z1k;#?nJOQ50D^Eao z%$u;(uk{(4LybPhu_l4VCC$p+pqbzw2fr7UdmZ{3dQ;>$WD;5OaFV5xZ!qT3#>fwhk5{`T*i!^ z+RKnS*IDibco=x^jqGfJ=CaXUQGJWc!N`z+y32aMzH3!;E5XsrSTeMO%vd&InyyJ@PvwT|b;!>A6<8!MxI00c}iRwks)`hCMo_hCRKH)l_o+9U5tJ9jElwOP3l!&i95*w0Pnf>XgK-DEs*CFz3(7zm(?zRoQV%pjgn;4JSjg=cE@ZJ7_-6!Vjgl*6 z1EYKnkH6=uRS{D~P|h(WDl;ELv~;kt@sQ#Rjv<{GMf{;(DFy_!El6Dj3Nqv>Jx29u z39=@**JD|CE>4QFK6|%%o&8kxC|y+U(ktWgKF z{4=Sqe%Qvqz}nMUcWiuqdi-P6Gwr_0?LueIM>V94n{)@-h1KfSvT+VO1td<4C#U5_ zttGs~fyarhR=>I5Mi;01tGcZj^gK}&tP-coZO{WqfF-ji-)XYc%KMkN*MO0bWsNA2l_6KWr!jW{7&T)8T`>~_FmB} zGVl%@j|Dqg)hJG~mQ_u3Mb{X@yW(p@XvS)mK`}6<03_um4IxL)+6J_166HFz^NqCG z{!6Ka+o`SFXLLio5t60lyv`1~66XWH349h?TkSSf#c9>e5&_a$8{Vv)f?}E)F6$|# z{@b&$I~xJBuXAT_vBz4|wEM1-?0Y#$CrUcv4fcZ%}+I086j1Wh%Ps4TfT|~=4qHDk$f{iiAdxr@8ni4_U zcf1mq%QX`vDYzT+*?-2DvvG?G_jhTU^RFMHZlEUud@4qRVlHp&ItIocs-G7`?(=px zWZ5+pyw19MUtKe(F<5w0mZcR|{alP~PPzYnG$Sg!&vARlZ|}ua^tL5k`bSFIwcC%Q zm`bx;pJ?~^;t6}h!WW$B+I8K3eu~tKH-^h*;uJc0eZ3496=*s~P(2i<8Y}GQEC`*@ z?*d!2p09mN{1hu$ceE@Qt7>hnR00EL@cZ=#6FpU5b>PG*bq>sKO}F_?q4RotM2M%U zR4c9yzRU*Pbjuch#WIk{W4Xo_1dchx?`&?IZ4&H|vpbeKZZL@F2Rk3zSg|Ib`$4Kp zynJ(ieNNrmj@gl~KIK)%10C5_f4|kgS?`SSdz+%0cUVkXh zH`8Mx<^Bc-(N0gOFmISA7z)- zNE>sddMtTu?JFjed3WB-id{fyU(#7&PY}+`6l^A}H0}hPUTqYBGY}V~Gn z_x&Nj2L$E3cA6irY)O?+#T-A>&e?rNy#+t`%22m<)gn@_1FK3Jl1torF0id&o)?Q} zu`q`~q(j8PlZXxrW>(*TsZ3Y>g*R0vY7*DwA8-5K%$k2#0cc$(P#Q{CH5tqZaJ1VH zj!jJB^jeMR3@M=T-FltJGxYQc{9eJ0Bb{=7>M2IL>%QF6L)=iBjm$WMm6aLSMzC$! z(tyAjlPgBS)-p9y2yz?!W!YxE0+G<4z}PS>{p@%3mof<4ZvP?kt%4%jEkg5tW>AoT8&@B{Z@yk~_o;FuS!u0Q40VWRT{Wq2G8G&s2&{OaY&`H(c?&4lbgF{BFh{5bMZ5Qu20Hq0 zciHKs>D71VChT=<6pPLvy4e+B!KkwhxE%JY&D#=iH>MwLU(qAsI zAq0+onRcyBkachOnxwb>FZxy8q`m2BT45$DW+|x`7&4`p@5&fRgt7V*&eL~12Lg6W z(kzgx0_D|aV*-Xg8H?Y*&bKma>bZm0a^xE%KS_VR^*vAU!4#8j+P&1l9u&W)1OP|C zq*)(m$h$4ti8Jtu7BRsC`U!kP^jL>Rrkc)trFWnyjwr*`++nFZMJ&_M$?VCgpYN(n z^1SKxay81+6!^Gl9Lci*8W{j(d3xr#51M!SK(j zqZ|xmhyDHOt5A1s$@AH^Uet8mjsiE0ic$0nU6<__L9fcSb-yVKvuyr|QKBk^i-B!c zy;)f@>v)#xS5ntF)FiyJDZn3SQMCgH}D#tMEqb8xW}>BFHYmA{(*jnUwPi*0z6k?HX-1|Xhn`4Wib?@ z1TTBXXjfDH!Q8;0W`Br$J?KRd-D%TVY;;jA`twwKXP^J+#0bsgT1HrGdR@qDC;SZ% z!By7=KGZsCt{y1~8)HlU`iPQ00~R>c_8<-e6PWK3=D;BdCk?*7{E4oNG+a+Gjc!6^ z?WIif9f_pxt*&?n8e3M|rS6sLoSleioW@!apaP)trB?epemvKuV>n>(rGw1CwGpqi zxwA?&p^vjOTMo6G-4}J)&n`M4XV+eEuM_GSW3}}i{WtZS8Y6RhD;E}WsIilnnnRy; zV!>|@x%bwqHYX{*+{vP>jbx3i@YqCDR79Fyp9`j#x~$}$%SkrPSG)?iTr<5!m9m!B zERBy(d!O;27`gP*rk#{sIj>QSPrFP~1lDd17hTOcIbMu8In^xc>FFd6TnwP;WqR`t zO{udBg5x@^>T*V16z4Z*d>hwi&?Rg;s81(dr5pyRgcWfPi`cCxcGkmLR#smMXhZL} zL8960naFow7!O`HM@=jj$^+uD?eYw5RO%mD0rK3AH2)i|3n;bU9P_b&pQNX~`1_3x1{OC^#GzXv3|ahV&NxUC_|huSeV69XHwVDCs{ffg z`TK?|pqP6(^vmg=_mM0CUW0p9>M7kn51azg_=y5;)4y}e{}Axla*uC;lgY5#I*T=z zB0_dG=ushQG&3Osuv)zo^0bJ}FwdWPeR~VhHfg_m@Ik-S zSo6r4xB)B=+{)$jHI(yx3Cx z^2KhJY`=}~1z8`v$M;uy^-2s^n%=!Y>%1_tPHkJ)=r%g}E%+cr?7Z*O$w!T=xtRU+ z4}+zxAQ=}=51FrF>a-%V%k!=^)YR0Pr*@--8n{F!c5UJJv_rD@#)EO`yauIlqpA5f zf^HEhDk%vsedOg^p{K^w@P`r+_ym2peNO>L2@nZ0*A^WN+ga~u(*0h`@c6hO7&V=k zm{?z5Ur|xf(jxQz*K3~CkFA%a7F@e}0RaI91_sj7(sFX`XKUGbNv+h=H`%)+6ZPH@@*};MFC))0VkNJe-*KC*|$=bk&$74)zjb4 z{_NQ^Hnv3O@1><2vUu*NzB>(qM%(vA*Vs8Zi3ya=7qbBkm!!X-rUU9GI}_zE622;Z zdO|@?uAD7#nW&|j+G6@ir|eVfNt&4VJ|Jo_o8hnBpIFkB^Xu!yloCWlMCyykt5h|9 zF+P4QS2j*XMYXoNnvg8Jz{|?|N|7pJ^KgB{Lu95emA618{fW{rF1_D8p#sE0U3V=L zoW-o<)6kfo|3N#0Pn450cG>OAfU^=>>WsKDb8CNkdU|njF(H9eDPd!C^E*P@;Hcok zhwL(AW8(y7T9>wa1(}{{-R3<#J!umDB2_$Nd|T{UpFZWGPIpT=FE<+e2idhpr>BkL z-wVZ`l#66KB8m+Tcc5N-Z%^->)+n|dk%fpEF~<+gW6uVlx9>fY*VfifOi7`lpa8Z{ zPDx2gK`}&F$vn;31O4)aK~i$=Z8;wwpJKw{#le6bs`vf#k$DONdt#SdKq|2hgTH6kCLM!PBjY}TkRngFhXI84Js?t{Z@ufCxV*{;pa&(kxN%QgFSpZtU z-CwDxsRh{qKES;gsuS(%4c764WKTXllay?dI(oibmK$f9NPG#4tOUP<8)<8s3#0>3 z_gKlW{B1kQ;8{*&wjVHZhHiToeUEW>+HD}3MzHkj*S6POgX~#Zdmi_E<0(F8W{Uft zAEu?H<;6c*O=@xWT1^bLbDk^O4>_U5(hV_PgP9u78v08JmVW@BehVqR}VqYm(KXjv1PAE7bLWm??bSi{HMS|u33CwvXMJ2f($?0t zkR??%)X;k{;Ajx!-6gCZTX}zpft~Z|(>P|EpRHFH3Oa#HxClxas~{)+2GAU9G>6BUS|TcN~Va;%+iKGZbV$I zm4(I1%8CISrBcGI|A}!`NYM!67}wAD76!BBW}V^p9w~OUH^h?8O4Y-xi5?sl8P>}= zF1iC4aCUa)j`Y92Jh@dA;`xDZa?z(D0G=WIb@869g2olslMr+*gHm37dwY8Vv$4KY+79ALa^QBqfNx!>~!npt0wOk+InAj+B` zdER2cjvp)W+`kL48W2cTZsCsn`Z(Fag)tyaCLvl?)fOPhdnHMN<9D#&y=)W;1=ytW zXRe%|Kh?#U+J+9IgLp$i0Ac+~^F`HIy;ePaY!7(0H!mVSUXY#6zb94|TEKq)>r0Id z(SpFeQ$}+PfQ1@m`IGj^%-y1N1?o?(b@0Z^fO?&_wyX@Dmxs)>mKdV&0Kn;cGIZ5) z98Z2{`yWp(=b3;&>Dk$m{^#y{)=Z=uBXW?Vu{A<2cDB{6E&EJWJzd@8^mJCG;W0f^ zTCdHKmC>;=Ipa;Bb5$(u9*3>})oU66*VvGY(mK?g44UJIJV4kSH?+_^k2mdi(BDbt zYo8cuWQNh7#&6dWf)5Z0q}fi>Vd?07rgEe50N3rZu4S-R5_oc9|i5sOg#n z<_FOolMpMKXn9Q+N!JzsW5sd7iHmN2<@ z3i@jn?&|9L^3^NC+wYl`=KN1GskIV!_|4-1h*vP}!r|M);O1l8+1$)?xs$1?Aie?k zASSnL`2wLlAK34Lv6O5}ccW^*BJrU^AAdQy&~Ebt=5^x0)A#2Ud)dd&mNg2?oVBQ$ z+bKO-tY|d$M(ZLf!QEq$-$z0;@M;2Ip7OKZ$o5yb@@>YS;GNl9L_!R_yu3Y>!ZZHA zW8@)kyTpA@P&4lVZ!C$1b+uoPqY)1}*caKDmvL}#0H=rPSyE@j;Yu^Q?mjj)CZDIl z%Tda~f?u$E_au*gBabN_-rb#3em6P*U7oPc(qJO#wyHe5zggBK(fsOBy-aDqRXBFW z?&>o(SN9TJud0x0nh-JBAR#dZEWrPSJ#_7QR*>wb%GFP;Ewf8P}_RKSZo1ue8R zsLoWOMkZAh?SJafOkDw^#@BF1*}fImF$5w3Oe5!?^T7Qo>BIAt!0enP$=$q=R%~%) zb1N$XIhIG7Dkg=#s;WdP-k>bUZTVzz4>}5~uTVFNnI~a3C7Ea>BfKt}~s-%F3z`O|7G$!I;RO7OA?%6jfhc z?ZsdP>zesB<`ofPc|Q4tue5%@2MCoeE-rrjc-~;5W)hOfZt;{^-JgToIV&ZlU}XL9 zLXL4e_soWm8c0CRm(6?ow#ebwBUa^&?d@$oW9LVe?HB(504kZItPy{By|tcU6|!iB zUc8RyfWU^*heNU5)<^LL0PoJuyotZ}YKrve$hH~TEmd}Pb>UoJK4d^OeEj&av9S?; zEGP(M!YpIp>gi7C;E+iv<2iwjMye^kC)ED;dyb}fE>06iWUpEDknSQa<32^))aPVr z(xs5$M?)Vp%1khy}QcpV0TzMnDZdc z5;kafcz9wWnZAs(o;_|vMzd?Tc8h;$MabF=@@!*3Yc z4#eRmy^&Op-m5hoP1&5M{|e8;e~R=`dHFM+(koj&v_Y6+oB0z~Dzd*n|GGbma`WZI zX~?oM_oN0MLbINnA^2ajK}CMFsP)&x+hiG!&rv(8^c&}-xLE74u~LT$yRw4Mkh5Vg z6ORAEAOR1sNUuk;!0hpNPcD-WrfF(T*>6Lq@d~;MpPak+R|4uGd0Dj96z;e zgc8xhJ=oumR517Syn2bktz56HlcUwu)eEy+cjF1in4teqQBnD;$=`T_465@F9YZN|tP>#fv;G5IK=eAu$!aJGl?GyjZR{CXldKi1o(rJHkYUeLM4TsCx?Wj*Ev z&+P;#@jt$$a`{FbrJ%nQ_Ty&0u!FB?@wnS_?aJ_QJmq-XrsZVQrQB9{O3aGUuPiN# z%gcFqd3m|H3xXCt>)V?dmcSB%vL`G=g0b{2CB=fDa{q6{{)9Kl_{hkJ$HB_@*cfJw zWq2NkY6|Z-PSg9pffbM5Zru%cY&c^5fOk&x_M##UjD|0vnYJe36qtX%e@huf4f?h#cUscSky0Nt0u%C)iaaWoE$>+ zzEoR6^8G01EQNBK78OFepqn3>7Ra~Vh2F>`1p9(ar}59hudX**Ytl`Y>br@ zWjKD-xe$dvb{iq7dDyB!b+SD9qN5%DKi;`Q*zHH}Lk)X0%~Z9*tVZ7t-*P!WoKP&U zg%+hTevUWPbVKnu>fn z^V(-cx&5Vu1%P<RfKjEUvU1aAMa$>M0&8Jip8|yMu|9vA zCE*2V_9#b21GWASS03pjX;Ye>bRi8b^QsL15ZX`Bl>B@0E9Su^&;`lUt*f7Mt5s^A zY>sL^c+9?5Cfp6!opVH1)%$Q<_gVd`zq@ZPX^vdwg6E*|&8bzf>Yy)0D^t?QAo=ptHVn<3WkznT_$XL3ktZou2?Qp>jUm z4UbA>jL7ib306f~$xpaJ7vFA}_XXxPM|W~K=e6teyAyl5{=`a{6n`G`yEwPBQ7gkIy3NOF;LKB z;|ugQEz8Tx$Jd=P{BO0*Y;ctVA21+qqP#(|;%;O#H&*g!;=|MYmCC<%+LvT=AOFd1 zIEPx`nm<7#v-lIjnK$;o2rS0OF5B3%+?hOZZuBeoSdbn2D6@0$RmTor9@%3~ z-kJNVhh4pLu&>VpKziFIz0#;VLl6uZK$?f;CS9BL*E80!<8-@EBELH@;1xd?I9B7R zXq+e`b*{bpPR;yFba`3X!KXK2Jc;f*O{ z%ZrE>SxE)HD{%QtT`iNXUNp0!xY!bfjB7vX;Ts#@^RS*OzR+P<$|NUm>NNQ+`^=Ct zp-)amvbF4ym+k`&1bd4J8AA6iF~)Ce1bYFrx4`!zt*Se-yaILdk1kRpc*fya6f7`UdP7PdWma00Why43JC?z%6iGM$ugp`uH2wWJDw$+W%+@H zL-^L#7Su!7p(Q~EPI;;SJo{KvGTfKK3JVL-p709^LBDnWmMTDD=o~i~j^jz4D(Uv) z6&#JR5YG49viEJQ4QC#QilHH0PzkZgcyGf?^EDX_s(;^-(dUNo97QuV$R3N6NANf# z4jH_3(AjU~;tUkuN#p);PE&fuMT9E z^{G?-d)t-F^NdY1OB`B5)IiBDlF3O$OKSvPJD{`OGx>oyB}^~Tsmr_wUeWb}1QivP zGyD9~FuIp;&8}N|QSv+LG5dt2I@AaXi!mv3V=Ho6^#?RY5i>3S0`yT`25cCje0+Q| zb>g9vu^+ zsEO<5&6|n&m#1Cw6o!JfGmY?`)6^x4KWZV5mzK_Ro%ebuvf-hQ)$w9F z=3vS{O9vpZ7{UPKO><`CuU;!5Bf`VcKO&u+oUC|jSd@=v-HfY>BGxYE25i&%wRupO z;$(;kgLjD{>JRcK{m5F3Dkr{N+@l(Hm0sxtx{s}je~PWR&`V?Y4T0j>*t_PBfdy;Hs|(?q*}< z%Gj}?Vs)U#DUZ(#S8S>B0W%*-y!{B=wb`!Ev&JlA$HMu;?jSbB&;VtGmG8 z7OE(PuS4y#nx`NEy?_6{H>e$e9|;AgNJ~$syoTW>3(DG}0p~I8BpW4&uy5~KzIwbm zX^}0lmJ`5%UOT1osOaALA_yG7#1%nt_-HkXZeaCi+OD04gM;#cuQ%{woEIAh#Qp=S zD{;e?8TVi7%gf6%4&rr3+)EBJm`#)%twLf{!vSC2r+nnQ79p`;yl(Vb{#B;S_TKM% zn6FTBKJGKOIZktXW*nCP<=N^3Y@-IQibk_PYr|;G0mC|u0i9Z)Wj%Rvv{6=x<(p?Z zNh(8+*y`S_t7zJ~%^}hJgL&*nSSC-Pc=fcKu+7B8gxg{e1y&7y)08LBhpTSAHk=PF zOqJ7zk^G=5=mw$q?+%brFNa@_H4K2X2rnG(1}XA(FfnVOxXf>JX=GoB#;-d1+<#2d zauA`^980DP_AFVBeKMQOVm0;A3Yx9;^>H{{P{W}~Q*@n0Io|hdPthg#V$%s0ylT9T zmX_3Ahx2}sOaU>mmVyFQAgPA%=~nHzh2&N{u!k|sZY~tHg;4kl)&5e2;!nu?55eCg zt$PttWMsYW1AqaVHi$epS*3CIdr6Tb;bC#v*>AdV*$WB^wx;U1ev5vrt5c2_1X6!V zcylQd1PlPH-VdTMqC5!;exV7XdKF=1?C3ZL%HXs9#O5J!DPN7c4lE+M*OQBt z6Et?M0*)x9edkB%v)1kfY?q{}QSysJPZV6oQUS(2hb57g7tf#Lzm{<9 zRPm7>QMmJ^FCEiplla_D1^EpQhMN2!@UWyOf+B`aEctEizt;YUD6?dqC^CBJ)O2)o z(mjlu+uL*!o`0&p&~wpFj329HuV-Y?<`3%`7;r0($P!8v?5Kv~dY8&za-ek7^QfEr z_`iSu9?$adxzIhm;k1n?vPm5|SDGWDa?WCI5y1#?35oN6o0SvKm(S~3TD(A(2RQ-% zvyI^gV#TY8w zJXD`I;eY<(#W@IZ)1Q1=dYKq>ChVHm*_7igm3$c=5|d~QlEW5-3(Z%IF{*j4>|RRu z&fx8>eOTN6e(+$Js;p%ed3ciY)hTpXDx8eFn*hwE(byt{Mf4z#4Ri_=}~$8t}T!Itm<$Yt^J z<;v>nsd$7Zb7tJz!K)@JNkM^IdX0EiL=RE)LJb&>7vLAdf7i%f^`-DUOOB6cB3CCK zRAb6r$%r-F1d{mX-)J`0^8ZP9*LhJW#CWa+lm|a73tl`fn;3B_SQva-svnSF-rxkh?jupr44fqHm4sFF%5jU;`4+q{?wq@pC2+xPAzu?2*kro0gn zY>rTlAN%n`AqltinO^ep^&8}o^b%=-1`^&}pA|B5x=<+8&>hQa+Twfm2=*~}AV63_ z^ig7zZR#`xOVZeMavHK}>F8O%X~PjGOq56MFfgd>5ELc%rw}|gV+qLW~@Q!yP+EQBewe0)zzY+BKUE4 zwnf*_jM`b(j*8=aSdjs$E4&ctUoBxR78)8F4<8OgR|i!Q!{EX@r&iI{OG`%M*LCn= zKyk>-%)ID%iSuFy^uwZ$z!{}mB+TZSj+s$R3ptJY&Z3464h~PAkh{2?SkQck-E`ykh0FXN~U6B6Lx^L zb*Y{^TdO=Q8XlhV#>P}KjhjEj==#Y$UcWAXDdCu@QB?Bww_<)sd|qDO`e+gMotM&v zoBq$>6wcp9g~~~Q7)sK_RFZhOxM;*&w*W#pi)QNIxd#t%0s!finNm^`rb5;DK3w8> zYce4r!3h8j+ z{fc7~i?Zn6XVQCFw$zj4=7G=mhN^b1ki^Atd)b%L^BE4GAD?X-VkiaP2KTiMDm`2K zq^3#feDwA$x^lea`JYE|?3qG8Ya_b5({T)tC87%X4*&kFUx`*i^WLPr@i^4hn!Wyv z5!=<=D2r^L`eqx$*WP7(xAv;yp^#{6$0sGR2F?%~fv9kx#g#>9*~Azbcj_OOeFpCP z>1u#+02^cf{*`P5`0`q2?pk!o#Aa=6ZIRl$8n9Be+yAAd$21rA{YR}0_mkWy3=37t*q_o>8Y%&%qYPNSGSts z6y0gUXd#Fwvz9d&9fj&6_4I zo#|kjrsLKJ%9(%$jlxgL&gNrkvvk%eE@||lj_Qq!jNI6;{@#S4`)EIGy3rw@MH$7v zdr&~FXv1mh-~b@g>TF9eh^nGe8rQ?fD68)|IGF!H@_Va~cRzND__=aCG!DzOd8yc* zBR%r{;NW0zLi6&NT#h64&3X=RlYmEg1*syub5zHsC%+fPJ{k~=$#}ogJ;@WRN_<)j z;AmVhmLTlf?C?&@uB382JRLza+0U7?V!0EWP@bZNmqO>yciiS!Dc2O%+lulyK!QMc zIcU8RTD-5>ue0VXsvLi`JxwvT?wyR-q6xUQ4-zpvnQj2gkO9Nzv0b zozI`)j~i*nvH{2B)Kr=O@z!T&XG=58SgAh50_!5#S8^}^QW@*@ATKE;g$6$uE`2>q z7Ze+?Zce^|-(y^RQU=P^-Ma?>k;y4S-0J^4($$@SdN?jq(R37;r0H|*&*uQWO>}M& ze^{2-19)Coatr(Ye3$K)r|q!o!3wh5MSO3I{50ZxTR9&6>59jYjQBp18AL{IZf^YN zn7Tig0sT;lIHImx11-lwPlM4qLbD17K6W z3kyZLM2(;#uxVrf;@i5*U8sNzN;w3{1zPkOxIQ-52bd#5(`P{dNe5>*3^ zz)Yh9dBls$Z)Qu+pk~n3%OKlNmLjTj8GU&iWpa~^;>dCD@$N?Prl@W$oR^)1k+7ir zH6P@U4}$tWR9Arrs|>h0yCfV(3{!d`N@y;YMY(8nd36<=#jkd;v2iwgyWZa$2tqGae9ms)0m_AH z{~IC_12V{S{x6#UJC3H4xN#-T1}tMRN~nSa@z%;S%~Zilhqnv~4+VwZcNSSxGksV; zy&(Ma+-P*l#+i5V!LA_-iDUk-@ari-<#;Tli?EO^vRlcRryHXXTY`?R_~MV`b1g&> z*Q3b9gjn-`?}U_JQ6*jr^wyI5iNHaJI&umf9$Yc`lgD<8WSqRV&*8!$)L;)7A(Xs{ zV@+LXR?zd$8Z@Qv;wIlq#*JOhSe3Q$2c!aGBa&}QM1svG%1|I}a&;=DG_h$2BhCzHZJ_+Ictj>lN9%L@yubZ^8agVy`+`5?0E+86 z73zOk02^J0kSmFVMVS*+ZBxhW)mt<{)kfOdcYv(8P`-}Kt*n)|gQmp&q3zkIm#QB- z9;zc3*TdEIexP{AlMdVe9{I+kbLDN3N3a4=GHbSysl-_%!`&DKN;q1iNkJg#Kf{F0 zv-VimkX1Q;*iw)ZAG{1_CI=&zx1#t;{i-S|y-_rZbQl3yOle{;|E`=qu)Hcdb|z6)a)?WG z=_TB=`Jt92?g53KNt8`}{O~SaG`ZYhe}7qd`Au`>uJ~JUN0j*)%7A2`c%I=jd5(pV zXBSHKEal$+dcAjj>FT^u-)Y_zmARQ2sQv%3r27X3p3D~ox|fkY#|AI+s@5d1YtkcY za0{{^qL=;P4Nw@^xH+{$scqRz%P17^mV`d%f9pkFtK>>lObvvA;*PJFQx0+rHwAUI z%;QcYx88f0MH{?}bIO2?@Wrb8rc-rKraTLo{L?ni!#G8H5%XUBH20Mv*>qIK)-M2Z zP|*N$7E8ngKar@rW(=-3ylEJ1u>$We=}S!bC;e9pznk{@x!4@f{5q@nCH#M zb6(7W99Xq>Rty65oabrlyB5uMg~O4U~<%HAj^ntwqgl=jT7?GQ*b2QM#yHx|f+ z9Gsk25uVlTHyUJeww*E$%Np}IBFK5-S*%vRz4L4{I#&{i&3I(_4Zs+P42Jfd*K;2{ z#(d`c;8YfQ-VrvL9n6gaFB4DtF6vEqYEZ!2psT0bDxSb3l--a7688sSe*o=1CX-pM zN|l0fnm-KWSFZhE?4Fg5!NWvgN5=655XhmfV&9zCJaOZ+hinW`<;#BK3+=?u7B6}# zhm~RBbc?iF>=~V_y?z_<2cB`Y{q^7!e8(GNAh-OCW77?mkqg8at%`a1!0g_kei_dU-m-d*~!epg!NzdWp>d4rQvGjc|jQ~`Op7xN~t zI@esh>{?Jpg~D*1#wY4A1Q~ORaio*N9+|vb zOGtPGTxiDOOOPs3zLTTcr1LFE75+c&K0dC9KrbfMF=;c(PD*-Ftq0vx(LgRs+8l2l z4VM{pJmaK!QvBId18SKeq#`sbCmp)m1)OLG1IJ+YqT@QO>(pI$pY7 zxX$-|_AsmBB_r8n6gsiwzqR~5ZNh7U(!l5c_bgbUBnY(mD&NKLD$Q0Nm+s-_)odkX({sumZS;GX3Pq zkGq57cR$`;DlWKEjHccbK7+$jeSyEQDgzt>l-4J8e0xfRL?atAP5(jab8~Y_?qQGE z{f#P%UJRW*LkVK=u(Pwn3b=chl>D#MM2KE*;*_UW=|}R=|N1uD_0!%j#RS9Xx*j@* zu1I~?&1R+T>IJB33TnpHwufD$;Gv;00B!;VkcxGn$wg1=YHDiYSFj2PzK=YYWkgla zD+n0MG|#@8|6~|n0I){?(7&tLip-E_F_D*(Q|0n<%MjaJNB$yP9p_=GAaOxK!Zl^r z+R6BGtCscL;*WCDnu7=2Zdq=Z-K4u?38Vy?(uwh}I1O=&a~Q)5h{N?bzE1Xs-IBv0 z(mvY0yt~_Q`fIQLdBT?M{XVa3)^F*VnfvhX{5LyaQhE)X&YM<)BTbX9=*8XbU%ley ztPuM~xkBnZwK!MbezweKL_II|Fw>VcuA#m@TrLlZRH`bDS!=RKKED%-oTxO8>DeV} zrNhHlzIMK+Pxkp2_kD|t)Kvlz-;2K%aY}+fODTd0Oo?$qX}7xlq$%t4!H13isaUAZ z%F0?YZtZ?J*v>hSkeFBi!2r*5KQxT{ucyWkLxCNcvQmuu1B+~D8}K)UT%I}UeeRv^ zwZApHctRjoF}e=&JryY_M#ziG@L^fL0IZvMxVR3F=5Gc%Rt?$mQ^IOwCS1-d8$P)e zmoHZ_ZcX=ucY<`hQR=`1+PB~Jm^+3CW!>409f=eod<9}wd%h!06T6>Iqlw-gDDiSH z1WTg(puJD6`bfYQ+1N{IV23;TWu{HiWc*?I6H*!Q%66JXMMKJebJ3%t#~oZFHWKr@ z<~j^3`Z_jt_jV!gj>=+#oFzD81HX>eg)U@Kj4LEEfCr_jhp$^Kl)`7^$>cWf_0j+_GkpB`@TYl<1Xx2* zX+1&GX7JjoX4QS52^MNRt7>xfAlQv5+)wwx`}3Ntc>xBq4`gY{`Hap`37RsgZyFK0Pfq^T6Ra$C|z52AtaTz8I7bjKD!7h+v8V1~F zsDxq+Vh-j=3)lfvoY22qLw~|Sb`{sbfyPv4@$idR53f{N4m|w@=gViX7U!TKB zJLx$I<_)HLbMa_9QlwCpqc`LKFs9tauZ6Fb3C-}~H*fuBZ|uMOm~%WwI-uTeeh!VS zm1;~gl(sCG=#JG0OI6P%L8+9kO5q(&&JBCc+q-1GnDSRqdr3nJX@h$Q0 z?rvr#rUg4WIXNAjF-V@~ZDqlV{T=bqs|$YZ72#(Qho#T4v9Wtwq$DIHq@+%>E)x*4 zm@xAC9x#3avL7ZER`2J}jf{@3yjlm)SGlFmZ{C{nij`$cu&UFKQ_|7b-;h=cD$YPK z5;fg@j`^m6$b=Frg^13&c@ZZlIxrBkT8}%S&tz}AW%pBeVl3xSQA9L6@uSTNJ0LeZ zn=Ss1KN=l=omqGd!tHIKC;uqaw(|58ovK!psc~Bv5%@qK;EwyKj36w1ro%wxS6`Vu zb^2;p9*=K-lyuqn`=TPvcXxVB_#v%{ZTQOZCMIREiJZ$&b|?gEDtO+V%j;_QjWW()4FCC>qXf(h`qf%@xB1AGXL~b-Li3D zg>dT`l0IogaVY5pUZYk@A|j$&w+gjO+|8pRCP4WvF6Id6v|k@#C4BjQ^x@xIaoHkH zy(avT3=ez(RMFKeeYu{YYO=8M&t7(a2I+KkVZdPY%3%K&R(C!gygcZu@DRXSEiEk_ zz1Q=)kAl|#2M^Ej@`Q@dd`8z+cj@10xtM^!!$NIhnSY7Peief0ycbUx_j%EOPP`f5 z|2AR#Mja`71Q5XXOcil@uS?wOzL%%OLVfoo{zl7Qu)FOOhepgE z7a62K26k(jgb?#F7gK{CciEG2NDM;GrSID}%nvylXwN8coQCV&m@kAEWf@L8=>LRS zFeIrYWo6NmlA7~VHv7h>o2G(u104RoxHxT0_TTry^m1_j%7r5Z1uTq?F?_X6lQ#c! z`2CTmIxb!{RX-o>Hmzpm%7#EdLl6H|!$E*3Z?OP_xO;30dt~zJe3NeNWX8L9BJRIU z#Q)6fnBa}jyh>n@^lo%o_dL?|l|jvvLJ)%=uCho20^jz3*;4waUta+5VN+wi~TcKw0<>1`Sm!>+LkM;C^F7-s5 z=aQd?Bb)qm9zUkS53Z`J0{Wk%a=qbOQ&#UJ#txyRyYn+{avQ-kFqy-8%<_kSS_LTv zo9gT9cSK5OvpU8pu@H$gw7U=T5H%fFZGRU4z}D=n%tF#>$UN|zAnR>B)ICeV+}zyZ z;o;T6>@UUF_~vXTqyu@*Qd3jm-Ma~6u{aiL*Z^sPu^5~>g!E5w~nOhHUlh# zufGryN>`Z(z?GYG!9TUd$M0^1tQ8s>8fOyTy;*lA;f869QTT|Wf&zSMLCf`#!nxjP z(^Sa7Dkv((P~XFA=xchq)MSePwv45-bWPXTXL(z#o~E6FLERvE&b{79fgqFEfU&d!Ym}p5P(r)sY!7W<^0naOmBu! z%g+?2RHBo2{pqRN{%HQqH1b%!wKH7fx|wF0y0Wq&9e@=S9PF8nJSWngVWvog zi{qrnh4QRuvK0}>kB?C=tq?=MorZMCL1WTXQ^PbTh7z!U-@ZcvbPTdEIg&8BT&;x< z=uSN@3%u9#2&kYtga6iWchdF@_F~0Blg$KgKqnCiNqk%!n$4DhiB|^AkDue#hcJa8 z%{vcvgAL}wvy7b%`lg6eq*!=Z3g(W6;wIB&SXh|***ChOKz>6x05ede+SRWC6)3}rTo*pqGu=V_CdM=nyVI25a4AC9P9ArJEJmqmJ@F>wK)#3aS!(hix zxsW_gfcrd)M>mGiF`}o&X;ov$=Pq?rG5mEn=tEew`NF14=mc3uZv?O^FwCVi=Lq+x zNt~sywz%lb7eq{=49ms`((c_DDA=BBqp>ERoRXIhkelO;V@8h^9V!)jx-E$2b{>L5 z_L6=H^YOJXIi{5U>jC_{Q^8-_NkV6S#U6#>m@)|S*a)-LiC`ir)M1JpTa;)mhTsf$0u4pRz)(4EZ84C6Yw*cpkUqra z`R+Pf+=2FIUHW^jnLa*L7(ew_jnbBW@tPacwM{EB?#~&v&+*Ix{KiFC*HNpzU7&ov zE5&+ehVjjUKQ_P9+$M_1$;M{$!_(S|_6yGmUCUqOw^7Ex2*|>rPu#zM-|WfoVw<=* z$=cd&If4lCoGyqSZjM3bZK*szH{iPCqj? z_F76JW0NDr&p1y*vBrrEr9$nU?n3znvqQO1_SZm3$!T6B;&;cwQEzL{I&-hjxK@tYB3k2o>cBVI^)dPuQUu6)B z*y)9gADVUtOPu}gPJKLc`&c?l;Nb&yN58k&NXTg7V|4?Vj8e$gFJCeA>}~ssZz~gU zW1Sa`uDw(1Epy01MxRRC8yg!KWEfSp!~OzjXxo17`U+Q1 zU0g9Fly(9?CyanrIF^WsB-?48a4 zY|T`D>7r4QdCeJkoSbq@552uxZGx&MRv~ak9B4Spty!0Wh_( zu^}|)Za$bL@!KutfrQj9F^Wg> z*hAvKS({BSLQEF|MSlwfqiLQYPGrMpVymmG9h<~OM4pyEA*8wg>g4w#=*flJiz;IE zn9huG<`<9?mt$Jw84=F-c~3|v|Ai3pfr8Dh-w~DwE)Gry&Nf2+)%lNM(u1uAlje5< z{>WKO6`Wp0lf|x}K%zDyzJxx%v#k*J3c|=2;1)u$0=xTc4Vvmi`-RKzg-%$;RW4t@ z(=;~Sp#?q@4!s>A8w{ya=U+cJR^@h@H>hc7($mu5EX4H5qQb)Kw7eJ11i$`GyXL>P zvJ!FIF)%m(1^M!3%V8#dN|SlYju2JULjLIbd$o66NyyB-CyHUTyrl7c&jX44$S4Xk zSr@527NjAeICePQc?o*gb8fw`}VdOa$o4sz@?@l zBMY3!5lQG5ScN_fAeo!|cz3?tYxnNFEIh0n@CGYPyX3u83Rc z!dJ}10*P~>L2N?#9s<>1hJ$QsT{lI?gzbVT)TUx!V4$OmsvpUCzZk@K-)>F~2kjO+ zt%RYNl*5&Nb$)srQs`o^{YM5XQ{gsI(<95~qaJ8Da1Yug&Oh+oRW}*q6y@cwvc}u* zAy+)p{^pHaD6J>aOfW&R`gD{54kKfSJX{4Y_1kGfOc)@=LEz%zsuYf>uir-?r1OWt z(FZMUN|K5!eye>yl(3s07B|E=OmZ-=EGI-l_<`Ywi+h6*G(+n+OW&8Zsf5Yo7NxoR z=>_G>qpe98P0W|0CnrCG5Jkf@*3~n3`&LFqW+r0`J&r3$#nh_J>DE-r8&)C@;G(zWGGW6hIvx@ z!PUHD(%}b^6xy$K7W>TecT%WK`HIYW7!Oh{8f{9A6iq||sDmikL)$R|nqY}I^ZXh+ zubf+iAeB9e58_-SNto__KcS^1pev2%MBNlg2|&3@AsrpCmXLW2Fv z-%es* zM)qB7+SnwO!ob)V$$v=cNXb2YaP)pJcJG}~*^RBdq;E%GZd_O}aU)A=)4`EuIwq-t zUw>QZlF??`CTqSxBJ$H?+`3MjX>|a<@vn0TvPoS7-nHl37Fiq|xj95*UN}Y5`?zVb{DSgi&rgV0``=%Y-;#*u##6jqUWG<@aMSs@bRPC$Dnk!l5 z>OTkszDwfLl>~hTYMK(NRZY}8sO%BUZ8d|gLS8yQ*|FW$iy&%Bsp^r~Cm{9!)k2ra z?FYTq|1eOB^6$7AJOBEcm5r!tVh#Js9dpLz%!B7dsu8CmG-!S z2E6Ty!8BJN4Yeu~#zkS>b%zkb4CQB~>28z{ zqR5W=`)+MxF2Re>3bk*ON8lnL!5~F6v>3p~1W!VWJak6218?{rC47MozPp_g0n0v2 zyu4E*XJ8nde&VbiWfDTtb*=JrpReL|Ublc33w>~rPRv;pfm^)a>wRGKM=*4^o&1j!E~4s& z+zzWrkfl!~)MZJm)-UXCSIiTM>Py%=8X<_w6Z^cr$3Jrq1Nk~+pBoecm1XemV42SH z9J0ejm{9!x??h9Dpuj${r*p5H@IgnRg<}(~;H)2;p%(LgaZHZ`f6efr6&j`W%^NoW z!q1wKD=<3pxzGS3#{5839z#qNI`hTv7>SDQ=OOR^4}r%DKzr5Pv|ty4k5>>qw!X^B1nu>t-8sVQ`#BW|0-X`*B-%RmhE z>!oC75AI4ME@IX(8&-nR#{H|0&_n1m0KaRBLEH7{bLiQtufGpl1CT!K$79V{r^(^0 z;ccJB#q-ZR=GhnLry#0MA3t8Rle+-5@Z!&?W(u#N=$nf(s%a+(bHMKS%RX=blLQxF zc>^X1iY?N}+?Q~!c!VMmu`3T1JLsl@-xu z_NSj$GKO#_adL8o6YVdj82@YBIS)Zn1q-Bq4^;-)zdG( z&%sgr?Av-{F*j^WKW6Xo03xE$R_w=58eZH=`2i9yEfG=9#AXXJlOw1bS3omtq1C-P zRe5Ky$4tP$R|V%UL#JvkQ9&D zp6g*VUtSw2Rs3J2{AXsO<9av!d(JvmN|wVHvEx~(MYE&Dy^03o*};PDSU?LUI)JO~ zfAvO5+G79bD-;zJO2@53q#Ptnt@LxX05K5Ii6)vdFyyZ?iDb#~5;fiU5LkIe1vTKj z$+h``9dfCPgOeo8J|IKPsA2QyzOozdg=_6iGyv9m+{u8gqpouHASITR^rg7`dLTEz zk5A+wCl?6mA*TpAuw7V47#)q5e%R`x$2HuDBg#<(fa4KU{C?*fi=#+4f0YQ{ubOo3 z(6UOnUfvWZx5vX@>N7XWF9n7_f0nbKHdeN+)EVSqq)h{96dC|apy2)c|7ukJ`)-~+ zqd~B6bF1YK+anxT;Chr$T_epBDwaE1jlE+jKBXyz(8l@7wD`#3bdYw|K*+yTt-pN0 zBJu|f*Qjiqjg|Fwl1gKyPWh8E9nL~c`o?>YL1!JeZl$?*`?hlaFv#i)c0#PGfPCoa z)Nz_X+NAKWkC-A1qh)anxi@ft$&{i}2nC-aGy|L9Ra^eoP~%gr4ML!NeUa6yB6mp@uUFiS_0F6u{4bqWRqBM?XsfZhBek0hv#TrE!zD zWThLEk(XB$@^a##&?=E7p$6fX-d;{JJfapl5_(@&NWk2AYHiJl zD5$TmPwzKL5hnwpX1(+|j5F!0>C-I^s%hE1;+H2-s-18g8g*VKio)>rJ6WL5$W^i>MAi$;lM-~a(4!NlO=~yaDoa}lxKae_>z-!-4Rt$xuLN)~HOtO_Bg~x+_H!G7!>$q>ckzIUDbLW{$Y8q& zx_jdb6ddH?dmViUxBDtIp0f)Qx8ZLg-|L>w;Dmt=KePaB!lS3TXW__BMm1;WkEe13 zzl~rLT~hYj_dtNvq#TIU?5oG0t*hnsl5m+l*!1OK79A-mScxdlhjgE=xADVlBpKNs zFuq(ZsHRj;A@6wO#tn#-$kZ!@_$W)KYnF{0y*WQrLJ9L&@|6s{*_Dh7_|dFg@+bI* zXd+oCEz3Ssg24VDjOc7!k|v)Bl7Tq4Pr5kmD5xYN*!Z1RA=e^jd3boh%w|idQ$^#OVeH#v|5n5f2i+TvP1Z`RHK z4SG1P}~P z0N97t4IAnIqab6}byBG9E>_GnJW!(CuD40P>sVM9e+S>Js4Q}%xCs(&2OrBa?GDj5Nui+pP9uY^ z83L);ugIA=eG{=c*d2V+>7bsAvKG!TfuUoV6pPs$yuc#g-vLP#_Gu!NHXgM(!RuWt z?CcO6?OHr~ps?Aw->i%>xv9XJtquh%aKyF8=IFjC& zaN8tEwXa8bVqQdmD6qxt%JjMb{+zsFKT=TfXVC&=%{h&sfdjy)gwAk6R0k zcS?=k{P5oI+Xf%2LwpQ)za0{r$Wa1pg97DhJ$&7K5*e5r&-lEdyc_V(wL!9*j7 zFxn#|_DyP(uIgq^Ps!+dV+-fZcjh4xsmHGF+u=V%8*cfhr)S89%~TROjWA*E?f~rD z->JlSGl1634>gZc%$1uqx0)fWQIelCtY--n85o$^0|jGmHP7K7IXfF9fpQ(rTw#8? zMS_|cGZmJ2*s+-!6tuK(M9xcF@j3gu_!x2!bV8WVeEU74T_X4&bFrtlmw;aU1nMLG zAPJGj^XCO(Uj`YwuWf-hvdNwngNz35II?-j`+-@CrlWoCC-WC-OpFfDi2{ji%S~;Y zvfe%e2^=1ufgq+316r`@%J*~%Dk|K7%!qpyv@u{ULug+ynpPQSFTp?ExU_7ZD!B8t zBX5uxi{tM^B^J@l9T-2k3Y9m-d02gGgK=OswbYz(>)Fo?N5~L-s(skn_O+|Gg%B)E zOfr=dVu7L4Bpo*uPq7GS*I2zpAMzsmGDMs{>htI_t-heYJ>&J&HZ>0fNC2%#;fg3p zt*$jpzb?%)yemXiDtg9y7v=>6iS+(-6MqT9C>Vzt9H*3+olQvb?$oeEnCdN>570m0 z@k$yRST^^0@ovtg61his?&G*1(s8 zI8>Vo3cvg8sG*%s$TmZxD1g(xt%E?0mpmebfNsMfj8=*{Ffcf{nbC1hO^Gc59KZ)6 zU4DnQ)q0Rn%YRY^aRvh>>;{dZgAXr+KI5@C^2RyUeswv1PprTZp$K?-hME6pN{N6$ zQVqu;7bm!zNE9L}AVK3Ipk+zmP{7dUlQPi5Zj>{%ke>8h~>yAWQ-i0^0%& z$7o^Vl9G~)j0-bOPKKJ96c`hXkvC?C%>GPN};1WZ-fwKXGi(OreY+eG!B~iZHepZU{wI9hm|?O8YDo6$=zneKum1P zPuXfXa&N-xx<#Q2^vU62&1mt z7y*GeoAuEC$)BxFS-ijval9R7U#&MB_L$%2 zarsj9(%qPY*r@bZlAw0Pk zqPF5kv+M{2TH?d~gM(qSoXJ*+)6bS5M2+L6be^Hnq-5YLa-uocxSK`??Ma87+fth1!9sCAi?ms}~_j8=|RPdeP zxS-W6FWbI&*8e*g0CMFtq@<*DbcUZLd$wxReBdy&1EBp^Gq$-}jaE+yQ{m|of7qah zym~DN%40{~-iHTQhxEQZy3k*b?|=?3FPbQE#KrLpQWA(3d!RTBkQ%6AHl$pY2Gx$EG0Ud3d*C5o z9R<>qq;l%MDRc!NF1v&!J^xrX8SJd=U=0*AW&^GR z$-%5z`2KxYz%Y+QG$FLt3LTA&g$23nD1(G2yp+cn`&T{vK3KSoqd$Cuu8r+%ts8JA zy=`j;pHSA6T`d*dY!cA8>FL4Uq=vKUb~3Te2>>4B;zH(Q=4Z8eB`Ax)1=*?zf0VRg zq@$_Lel~M}=qlu@VB82#YI0QMpFyKNf|1b#P>sp8YXZ{PLL$$d->JQSQr6wob(PP9 z?;s{dE52r|nIY@|a{jNM*e4^E7c#xQF$@ntW*yE~B$a`1TBKj2rGA(0|4j1qH@UB7E!>_8yZZO(&T zNXFtddiS-sL5Ofeorhp-`4byK>i-x49pNqEJ{{tr2E3r^B01LVQ(1op!V!*a3I1xJ z=$S6xhc%1ek=w3c{=>SCDQ*pxnp?8RA3jddBIMTB*8u`RXv(ab^4WD!MjFPkt{HS~ zuynOJI4Y-(z$8l_eRc_i-Zl?@V-?brkog~Dq!w?rUIx@GI1T~aI{6541mgfLOfRw` zp2G_S1K}zr;5=k{lgW9(FY^Y(NghRI?d$eQ!UBan^4+@$MA}cEuH!Rw&Iep|0BL?N zZym-|^!W@iw`iiPbA!gLx~j1hMJq5AUMH#C^FFZ;=o}guS!%Fn%ymp*;3n^f4tjO| z5!iaR%1yRLzz@TWDzimhfSt-d`3?iXFdfnkgB4>?PLHH-aCX$VXW*XK~PYTi;IXvdf_KwG5J}@IrDIxF#j`9scLFk8WVnqmt(Pfb41-NIskLc zs#P|li36DlfU499d?-mwu{Hia=DRURvS>C27S;x3sS60b4I6!P6Xze`uwZ`w&-}|D zya76Trq6s|u>nckb2LN$(LrAfK)2QcY+w4&Z^J^Q^n7@ZC%$PX0_+v7sA9jmd&Y6= ztGcK<{}a$Ko$o+_!VBOs_#pt-4Tv`%Cj&Ug)83G7jkgh|pRXpQZ}+xj=PZ5j=?4yO zr+)wF=+SaJ)Wf4Gl^dT$r^%R&6a@tZNH79ks>~1(73Io4WG}3CXxiM2^8G_Bpl|^; zFI7OaFedyYP)E_ZD)=8wOI0VxWcrt^|N4JGL$`PgtYDUxZ!4tf;-y}_2a0HNCP{{x zdQvk0O@)fnq|2h$b*8_;#fhnyc}=*JCysL-2+TGY`?GTSQ>&qju6rHiJI@_2#7(=b zd7M_tSF}Dj=;~wS0Ub2q5mTuaQ$k+eN59TziExy}^(OXz$I(j)ylJzc73M81?hix^ zgmHe>Cmh|%><^!x!2AwP_UJ zxsTf78Rb>GD2p!{DFv=$+3s51- z>`_adS;PhG`~f8nfI$FCBbyjN1eKeg59qI?%4gRbjFxpK(0k2GRmq8nBrxpFieb3# zQAIcb$gmh@IHAq+^+@LDPwZE(fV3$f#tKZl2Xxs0hdFCmt4-q?6Mfx=5~5V749w!@ zpV4L_UcCqEN&ja53}UDn*Dx>zO{EQ3s=&Jy4er!8x3O`q4t4MJ83GOuFn0iCU35M~ zJ^}nglp!7f8qx62h8uOs3WGhkf7SO-|`sN^*z zHH=BO0I7w9kqetcV|0T4L;Qd-P&xgh0WBwQD9ktWiK$eKEMAIkhqPI4V{xWyi52uV zLa~r=?60%=Mc>OA3G`uEHhkAJAFHL4;+vZr;1s;m0Sfx!;xKc$-&<$+KA?Qex_`-d zC2&@EtbJsR!0qm+Y0K1~A2|%1=2Y^cU z_3g-$@?Q#3uVaeiY!5Cf;5_pz03=)hQ3fn0@YW1k%sLI5ZSe7+{t`GX{hh<7&odmZ z5tP?MI@@HJ!8o(4&1cN@J$zy44HOB90Iq=bQVftHsa-zD+&5*0qx+EpC@;|F%gf6H z@Fc)C4HFt%t)qTQBU-LTR!CBPpaFV8YEOz6)KUMTcYW^-0RoRFfyshAaQdYap% zjCLW|SN$S1u8vWQ20gw)0647piO`Oidc)y*;FXCvF5f|FaVzcy9&rND#Q<*%u=4@4 zeriTWWCR(1aCen!f1Cm7%9&MJ}9R)6}hJZEf0@dfAo7mQ;UPq8A^@yrXMR0YJgR%%}#MEEEV!(uRm; zG$XhP;R2TvbSHS*{|p3p1{masx9g0{nsW}5`H8-=;NswHEkM@00#@AYG}q?~4ko&P zVn_GiqX!p=gNc(`aw2lOSJD1Mkz0}ISluts(SiCD>N$@Z>9f-DQ$E^gWhSqqqXY0= zQbhoI27s7-=kIf>9E8d^`2k1EF53)H`rYF<9!F(1;@!y4mxhT0!?S(_<(0{J0$B~2 zwdmCpE!K)g-fG^?pPTW(_4V~j4~VI$=@ar12vY*~g3}WTwVTvg$_53U20H)|!y;vS z2Oy@rykx0yeIh3i|EvRI3evA3+AH?Dv?pD~0RQ-`HeVvd50j@2d1uvd0O&5B891Vg zrvs!*{ox>^SiorY+$$Vo{7*q?*a1nw1p zSWqs$2z?3D`McIy4oFgg%T+RdDNTj3o2`EnSd@Txyx`t+IaGe<;2U~b0U#6xoYnwc z?&p$Pjp8vw?X(P->wy>f3|02sWQ~lmJ%{7<>f4eT(%uM_B$iw*U&JEu`P^ z%&#*#CMH|4@asTL{=+6>_~n06E}#Pi($M^)f$1LSIn9eBivKQhoO<0GMv4HSjLKkW zROojy*DuDVrIi2`G)*#p%3>b<0U(5cv>`xl1WW`K2HhU$EdPlj#fOAxIR`R1QA%U393g8U^H@*{aDpiUm%(EVy*7al^^ZrW>0)i6s07?Kja0ry* zu-BK)AT1tsBLubP;eY{?!h=0xkC^5~w`L+;D8Ao8o zcq3{ENVkBzEJMSO{O(z(fIAP6N;m^;xqmdED?U9D-CMi8zugB^OV2^hzdn(T3E8oQ z+><=4&qMV$;(Gim^&${P570Ph=>V_ zOojpA7=W>Ei!}KEujk}0O*s7Xe-wJm2Y>?x1p9z_Dd%n&F?s5e@llL`S?$4A0Sn4;x!a8C803g=dfC3B(jOKv- z7)Un>xB-$nfEaPK7+WLi?fKz?+nfA6{4zN9;HhM)Lu7W6q-GU-MfNVcHD>UldzIIv|Y5i|!_tRmh zo?j$V_g`)b0RTZT3M|!cwE~_kT6bUUg6i~%_X%sUkz=iV+&8!Sy0d(IRhAU%pzHbR ziQ#%{(@kS{d4sSkX?wTctzW+IMnYJIu>Rzwz(pIrGbMrTKVWwO$sfAg8$!j<%dtiu zPJ<)6PguNrFpqJ|5u?oi#vb8^xxF+wIr|z8#POU@W%)bvq#ja2f?AfxJs($g1+Cdl zfqNB)w(VCOTO8f+{2M|B;rLym$eGbOZyi_kQ@%c6d)ZkXhQU*U5|p7MQ?QijD*fkh zt0?Tbhe>u*a0tj7ptV8rL~DeAjpqM8PTl}(>mI}WJsAKf{lACpe;wrir$0M1u;-W- zNDZizFYdRgYvClWZX7YJV1!aYmtVM-n*dvnDMKuNzs6I%oEe!%p<&z!rkN6RnAxzsTP+Dg8Hu(N1OBA z*=m+1`-T&enLl@{k7}FWA|G-yS!=j;Edg$!b$VuUda$M@B~_EmEK{1f^k=C};=%$; zO-)+!wAYcM^!jFC|XUmh8K!80Iu#WC``yah}T9YXRIrRMdZ82=dH&XEx|IfV(jJ;&;fk|#7 z-pB63lm%3Jvp+;B~B?Pfp*W$Mnjx4?LSycH3UUmcSz!`CKKl%m=b^iBhxNDH zlHblT(jrgKd;(Z6J?*4B{Iq`kd{X`Kzghrfi}Oy`;jG|U=#Ct`Unf;OiVEXqynQckC!Ibac!{fhT-U8@u)>-b$)4InsoBKFj%W-Q9`IZLZ z)1EsQ;Ykv!)3}P%ybbaf%`62J|CbwP(CZJ zf7#>C7aZ%PX;UX~(`EHoo!GP7%CL%1Sz9LQdGdx$d+Opnd5=tcs`VhfxxCkobWk&I z?q_c>W_@tW7fKdn`zEji!-o@@9psq(5dp@F z6da~y<_7}!Raw-@MWdePsXZL+M{(|X_1oY%=v3341ravq5bj1iOpbNg_9=r^eU~-fR@>61Dw6)dzczKdc*(bob&4=}G12_q zLi3=AhpC`yjmL=hYO^pq$|k*a8_vrw+-7ngl3JJ1P`EcYqE(uitQ9r7=y8!(WOU-I zws)>Lb~Eg@>qo{4VLO+|r^fJsy1zHGcBdAa0=0u6=blsCt*P~^ls6l;0vt9|%~N__ z(B6a`PQ3?x;9I>iWoGT;dNRM9klOuerE@{6k3vT*O5_(dl2&2oqCmdgMM|j6Lo`Y8+N?Ri0p=+;qzRV0_I6aOuy zRyOSHKBW&Zd(uN-@cF!jl32)mt}`#4vj$!5eC|xUwxsN_jT zHe0xJP4imOLM}fJd9CL2RufKVkKR@HAXF7-#%5`+v+8a{5uB`#YAy!;R z1uX|#$u{V-lTMABr)oH)VZ~;@VJ0O@iLG^fo;J?C3y@?9SnnT5a$d^tAIqT3@HL36 z5R9zH`*J=}Lp&@_LpfbV+mE{c1M>=u8mq&3TQ-DfJGXC}x)EKdM2_o&TG!49Rn`5pa4v z%iHtbQ5yI~z$i;dk`@J@+2LP)Zk%bP)mlSIikvLVL(`AuyV90Ffd`YCniF(f^Ld)@ ztc}5T>ibPKICGTbU-Lqc##?u^3xl-c_Z$3@ZCTY{&>7Gp@3LGUNCVyXE&~go;8azQ zPsM+^r-eI);7J(el>V$eC1uGm3w7(8 z$q|0?NTG9BB&o+a&BB_dgpG#&2sgPsXAS;~nU84Z_d04iX*9_`AMAQPNa;MKnD%*Y<%jV_sP%&qK@QxG&FWHj&@T7B zI&@b#eAcj(Q@~}i_lzd}+$42R!=+^CbJ-|2Y;^7tanqhyrUl$C`0bJGN8y`OHCig| z^!ii3T@&p2ycDLU$qM1-8kQ;UWB;w6{;Y^L#(#)2^3A20kc|~ zC9eZf5bEd&A3u>1 zGW)Y{V#}=#e{IUxY$zkkiJRR697ta}MCxVBp!VyZR?FC$CGTO=ak=L;rGk`$kj=;e zh_E7YeN3IRZHg45D$zCA@>8ksyzOX+U7uhQg_pq;fALqmAsn81y5fg9YL<1e+15 zP33|2`)6J;5>1s_eV=R5;dzE`eKPZXj%U72K3z#MRmgo>coT_vJdeg3;#|4=SNxH#0zr`P z%Qq`uhaKng!IUA^s32AWW*jpy!UUT4_=MtRuaz_tb*hB(|GH@Q_|{CWJ3c8nRo23K z*GempoxGUS>G<$TqJ4C$%+SVS1}9yO8}0n622L`PhsU2Xt_fOFh=9!)rkH92DcUWS zK%(EWO-Abl{sExH$8sWi?}2iG=sHtsWtMWXxpoxT`-l;y(ouwS!eaYY{Oq#+ExgEn zQs)s+yuV#Xwt^hHeXW&jgbsTSj&S2!b~(te{UVb0L%{dQU#rgvumx8h;sv(#SJ z+pCx)sU=nlOK9TM-vR4~ko&ra`K6P85E2UzAat^Sl`swl=OX$yMp z^DAW_7wydK!$$UgMU1xUtd=?{4KrLU)O=#L0SJ)q27GCHO{{R`Of~BzR{cagieq;p z{^MxWAdI8nPH^w+@ZyE;%{qU5i>48+L-Ds#8wx|cW=E?1LyuFP2Vhjdv4VboB}rUd zMPww%HOeM4wV6;8ua~C2b4YNmitRPTWh(GzSfxCt zEb-8)TwCVZ#Xj#oj!CK|$IhKFV`SoZ!tN<<{rWgdM_#N=(FpUhEt7}m#j#qt;Bxqe zP3>FMUUz>YSKL}Gz?t{L7&jy^)WmdOm6_?Pu5%tQtnp^^`u=Hs<(C?+S$XLwvuXV+ z5p;5w6y8T_6T*xfF#>mCtYJo+W0+9)r{LjhNrz>UXK>9@cd?yP}qGGO-i*qae;C%Wh=j$DV`gnh)xi9ZdkMBG) z-LK$VzXzMbYq_$@`Yl~&wuqGY{yvPbbZ=B>(W~ZxU^1*_smQ$`)P%N_%WM4csld1O zhxs@9P8FlDcbC*-I_g)wrzK~u=Tb%U19a(jsUxt+U%1o=^}zYpHa-Vk9tl6`Cn z%+%9hg`XHmJkp!;HJjIF6rro;7yGD%kK8`X(M|IQq__{2oe^uyeIaJx}P!i;I5_9ad(p}l$oIyV6Z&} z3$H)X@1Rk}kT&O`om90bip1)Q629YqxMb4H{mglLJIbdKD(5dIC>Z#PUl1mg%@(>(#NMOe>pSxLR(IB&1*p+`=ifV$@2DaWMBTJ{lX*G&1+1>w?I@N;9L zYHis|fgSeOcq5hku06iLc)p?BeaXVlRFkPstCF@l=WYWzOfUusJuL8@RY0;~mJ?)K z)Kq?d#=d<3tW`xGF0^T~q4Q;9E}!m#DwCcXkkOWpF&6tc8-y@pl0(0Y0h2XGNbP!` zGDVfQ+{Fe83jdmF_4^W`cbIP|XU>OkKN3@GU(L{S|J^n35k=TUU?=F7N}b8TnK6-H z1R%SA`E_v^KjD7SUXp+^zg386bJBjORQ9rHs>N)X*wu*W>f#?Y(H_(M?3!gufX+s!3U;4hk< zbzLzgl(p#WD<7G`NmAn6koMO(wE%YManbz{rY1_=uPO+S zeRPu5Ml>wel4schUdX^7pkz_8s%kac#`_vxE0bD-OD*19m>nUu6>t9E$RojI%PJY{ zMhVzdT?U29*`qD>l3^=#z>#8ich?bl9?HbB_3f@<+|KH(Y08yJB5a!wHX&v|jkAO- z;aD)3&-tV~|8YAUw{|}7E-m&^nWg^oOW9)cV9z%1zEfN4(s7rD7IL*hH&i2BC{b`r zkea?^mF-ex&TFt!F3HE!%nV-FKFspB-04}OFX7h(XOV^6agSlfGS_nj_M~_AA7#3e z7pNYt_c(b!*=@)>o7{Kkxhp%}vO-b2ts>yH9CNox;=;=h*lEFu5 zq5*5&x$0M;D7mNfsvlhLLm6^q%lR@nZ*N$!sirrjFuVM~-Br%Asnfh8*Uql{2-TA# zuFSXt#dD^~&j?w!+OH;DMIf~ZNYA%3toI;XEI{;${D_vCYw4e4=VA6T-U2NZ;np;8 zcIPPd5=l*Aqv)8w1TT5=yq29fHNA2luRnwc?JilEa2d&JW@)JRbp{{9%AFtRX~3o2w3b z;|)D);9%d@FNQdRK9cn8^{W1aV2n4ye4F*sugB6G_v3R4hg>UX#!D>06?(E{t$(@e zH%7?zmz!6YVRANJAv4t~xiP92Dv%L45f}zxe{yScUlB$OYC^w6F%-;f5>Fla94J~Q za3$`QRli(P|2+Q#^e^%t0rg<@Bsk4-?zgI0{81mX`*$mkTjT}_!utefR0qpG+Q&hZ zMIHs~0n*m5t?85YEWHp=_w!YKD;eayRX>Cdxn#i5>Od( zR{_W47AbYvN+2LeJmP}2jm5fQQjYAt8#>at`#YIl@S{7+0=ZmhTYap}2Sx-|XfcjI zV20*@we8;*OexYarjniN9B{)R)~I0FP$jUU$m95v_2rMGA8uOa{Vcj1JQ(NGtFWw4 zrMUapSR>T#ziB(u#)Qv3$t*KC(Qbuf2_L;`w4}$WLAbrHn3_F|(y6L{=z!Wh{xURS z_DwX{zns7nN9-3TX?=xTClL{l1t!~i(884C)V^IhEZtv7(4b4aXNl>?kT{elp7ndH z99X!O=X`0=6J%<`Gr1@Vb1$=&cC_k91?$^yI5@2c&_HI6(f1N%njU-lQP3rO4>zWD zo0X{8qZV2_3$Y-r5trGio4E${WnoMO4I)kQ>9uyqsQh!pe#V4o z!e(o6@{TN3m;!Q~C(N=m%uy^SE;HzPkU?tmy$;PX0ZWhnVe|Lq<*uxtxrP_ z2N?4~f_6e`ETJoIhCV`48$39_#K8ny>R+Vxt7d0__8Ot+o%CR|pL3j6qy8yw3O!Jp z`ZeWuM6?i+ODm~}IaPwaK5%Rq&z;=L=)n@r+1Q?NTnD38@R9L@L-eS@dYXr3En=|C zR*Tx(@z#HiKb}<;InuSL>GbJ#ezugsHn*0n1ag@i@4VZCZp#SwJX$k>|KVFZ_cX(>~S9p%2#TXtx-`FeY9;Wdd$tLyB99{y|#m(68cIEVVz zycY?Y?*5Bs<*|u%^D{xMfYviO%GXcZx+?eByHv9gwCuvA_w5H)K_NWztr?M!0Bm;0 zZq?d?Qu5V!MD1HP7OVcEx%Q2bDA~+r^PUJUxoI82oY}vOc<$wegIAa+PF*h0p*yiB zC4qKOet4@$=f{|8NUUaZAg+rc@PY_nNT(*fND~#d||t_wUbZAAkMgs^IBggNV@7)1FCX zaC<60*D-3h2b&}voE~@%M&xV7x_Bqu9_V^lxLVi%t5lK2O(~*E>+?elnxQLhdG z6F7*))tHMtFb4Zgc{8}6LGfGhWRLAXUXZkk>H&4;9hOlg#oemRV!JN7(Z|Lf?1E$z zp}BKP=g~IV4OJQIKe(PXc`WS@E5A)_k=K2C3Ov=CJNnC|-DPs41n{K$qHlMn`OEnu zP|L-%NuZi{ZL9hZGrn$yd8E?IzYSggehB#oB<*Cegi@hsj8~fPt#>O zdPo^irWY6bnPU3Q55SCl|3l)2fjm4n7j`CdI8ah+yImOzBFzv#+nST9ldYfE4F%^b z*-DlyOQQhV0EW&tjvnPQ_Lt#jkk?bM)(2f#xH@yy)DqOmEK8>>@4M89tbg`#duEki zGkX}=^`8AW*9&K23QNM29A)UnEKN*s=sDFLTfR~vp7y9_mKY~62g+EEwotji!Hjc` zA_*gAqOi-(1*NTb%EQ5tMbABli#M2wuc6Wbee+#4nr-5>H_=p{3A`k`Fqv$KoW4Ed zEHi!H3$g4KOhtrfaYNbckX5)sxp{vzc2T}*l5f1Lyghl5!fderL_TBtMuJG3 zX%3$qD)wFE$4*OVaHIZUDWU4@VPh%DR3pUT)I$}Xz$>Uut*I^ro5f`4-bxG_X28XN zy1mNRTei!yxtqypI?IX6gnCZwm3FK)v>l>$#(p&T*;~|r6f>G?zoOzIgl#6TAXF;4 zr7oAUT#cqAGOoB|y00icpp#(OtIp1&L0t+4J{L|2U(ZzE3t3k0m7S-Qo-DBcXTl@Z z75wn!b$}hl>%yLg^~PSS*VIGB0>71dHG_OmmFs@mA6z*R>L+ehJYByEf>n`w!d_UV zrqnh`(S;AkYEGk%R_R2>YH15YhWIPPimB!mji!XNLcvS0?`0o}ssk*nEG&|Ro{n=& zOVXYCOr@mJHgW2Ec`shRa51@WA&KK7j!=3ievP9pF|6k*wslVHgc}b%8DI$8yqg_0 zeOPUT)E>``ewZ`0Z7XmVaO;)YA|5)5YEvxnVyv&BqE8$T&?vTCyk(dD*V*|vNDf99 zB>`>-*~pOHXO6Nm?frH&ey#6Qa&tk zem@7gz1j{VNNCMas9R<@Tl@3YI=NDMzb*s*C=S-qVJ$QuI^7u*0bA(YE$jvGFE@8k z&GWi-C9qtsTGhZcz}yohAz05 zWQ~rOUrbC5E_456fqz!iCyV%F=ehjqT!;(o^{M z`EZgJi4f$=AAa-aSc_l4Lmj>ge#XIk4xKXvx8xdrB{M(m^*?JX)*WK|u`%G?s)+Kc z9A|3bSL&btnYTzMOOR2#$?mhm=}S$^$yAKb8EG3)p46DjhOAN zH7%YoijP`!91#6c`?%&UvSEkH(L61=E0FJR*($;6UVDW(BO}-oEV$Fj*JN)p#2LQ{ zR|gcP%m|?$*w&$)pSoQ<`Q}X)hNBL=L9GwbmmH|7C4cK^3mz31LVgtzs4LfV+{OEL zP`l5rpJN^In8%HZCMV9#S)|!EH6f&p3 zu)ay*E}oZps#|)54h3!tkl1glMmKgwlX>!g*Kk`UA}Cc0%Z8-%yE~?wbcGi15*GZ2 z$1PRwr_9Zh1s)u?<<5GmbBz}001vk&H<*x=Sb*}izg zl6YUh@8|I8==YS638yORe5z`bp4Lo-_41GEWE};wFf+*~_2S8O;b!6vmDbgyn<}dn$t^9DGCB zK_ z0i(@^S<3GmzOL*s2Rg{Rpfa+@pb@dRL(;{W-1aKzzH-*cRXT&~GSJRL#7E0&*s;zp z%YV{Ru^2^ER=CDh5P+>)c|OoRH+Lhb=^!e)e zLC+I2Uf^Zm012h;K`VP?z7~lUnj|@7rc4P-nGlyJtK*G?<1tQ+D2FOYh^6yHb7;Au zOLJ9`mkRCy>LhRa@?0Ge9>Ix0{l#7(haG!uW4d0O@YZs39o1vPQq^B0-k#)i0pIay zW^A-T(OyI3X^>ucJ<{mmAhaLtcaPYXBHHD5%Z>tz8pHu|0<;c0q>meU_EMOry!;CqjmbS)=WHG_Aa%cIOT{hWvS=zX0h1fH+NWfibliX6cw zEqezR)CNu;Y@IsX?RnsFO2+AH8*0;ZB!{ig`-v@X%x?7{7niwgAJjs)O0cbfCRyHy zHTSaD>^dqJG$N&)%K>b%(}K?ct+v_)r-Amv0N3MJec{HCBxzP>?9BG}2Q!KN6&Cqz z_aU)ZGPCNOcQs`(w&SatMc$`3*R!leDGCaiT8m#j%MP7hacerw$woWuHo}7`l*KbQE8qde;$5K}Akc~J@Bn*cyt=pB0+BD+c5WtGQ5UTewq z!ov(u`k*C@{6xl7O9v_P9gFCyKpBEsN`w6ewa?bqZ;-u)3tzM1NQ-5Rw2boU$E7~= z09_w2x8Z82w_R_zT6Db!~Un?> zQ7e>}B)5T~-(IQA`ZVMFylXs6Sn{vSxACNA8Sz|%Y*M_%0LKv^SH#pk+UWby!#Afew~yvLK0OBP9XKo$W}RpG zU6rjdrel9Dt><@>sEY)1$4$5u7GqaV{>SB(RC$bqUmo|c!rhls)S7BT#&dO6hyYNz zx$ryoK&`qd^6GTeosq_NnNU@pjL(&_dDhdUc2W1J=FObS!a^&IAS?<9CuS@EP@^xH zzEAjja;#;rLQ6x#OZUg{!%xt#y5p4KSWV5%^vh(MgbrP03{eL73UT`>VlH1dz4nyy z3Mba`N`_l-5Y16OE#}_rsfS>rV%)jsMBCmOYyr`oM`i`hJk6Om%b;Bi*qF5cNf|Gt z9J3_=;%01%^TNoC`aF8i2D8^+N$U=>+CF22}Q@a0=xqwFD+IsOc&v|QumYHe4sOslk zz3OSeP^nxiWLRB0Q)?kwMLT_jo?QFFzF zC&BZ$zGpgiD{8071az8Rs$Xq9Du;Z`-i{^XKw?aa@@EaZM1FyQfCt+_j&LL4|CptBh=rGncQ3U+{aoA|&o6LBJ;+T;Q(m3t?t$J&(V; zwY>5nQwbHZr=Wm1D5OX7*U>qV`jz4FlHk2_corHela&- zzl%t0{EEm%+aGv0QZ~D6jE#13#nGa3>y*-u;K7-4I6q=-%e?4M*xkq7{H<`P+>|DJ z*<%@DQm`WGo-VerIBzMrGvm3fWc`+2Z98rY`^a*TwGc$CCMsZmoobPg(>NQ2R(?8%K?bMqMnRq~mz#5^aT~oGr;d!Y+qS?R za(nfV-^r;%?U+mSJ-D=h2*l97{!;D zEbOX@8?#2?VReu5zAe?gJ6_DKyVpgsl^Y806j$|5>?|SNB`!5SS`v2+F01CX6sg?H z1@(CSK|Sj&V^lYTaWY;@mhhadt+HC2*Ks*`sAs0nR4^|Mjb9J%0BA_^<&16_t2S7t z`8a2T%Xag(#<7Sf8t7?vZYiGNokPOk^5Au+?8&_AKKirCJpe~aBEn8xzI|(L>{74p zGnwuDysozNvvV)1NQxx=64KCEM5152X-V34I>SsyP13t7X9E)3Qmz^FOlGdy-Dy&+ z7V(;~YwP#7VdfCvb6$kgaL72`p3&97`;H86jIT^-*|a!0g-;Cqwr=b5O8LJZGjPEG zt;&RwSqWM&BZHCM#2c020|RkcBKaMQn>!f-mE~rObKd|~YCgwP4myX0Nt>edLu?Po zttXHjVscooIt}Hz-32*H?Yg`jhWe&O^T*W8b>|@~&$bvFfqZ3k{cU`AL4AkIF8+AW zy!fNplCt}y1XMGYn(>J=L8|W8|CV5G>LL_@?k?*UH7yKmTSxt9tdfYj_g=cMm9IoBz^W&avBIO-kPIu+FL=sX>rBQz_H1Hp?krV)dk1 zsxwNbdK)j?>5(=ShyX~kDH_n13!QAGnt}tGor4H3FMEpM#WFHiD0RNZpL49yAVIEf z#Erc4Pv&IHGa2XUnVzPFgZ!&sr<8-6*0l95-0*|E=DQ4N9 z!*I6~rVv9{agP;}k+g^!gcFD9kAuLF2q~U&scJ$bj$Ut<-m@HpB6ozw$nUc zf2Ye9qfXI1)e{9}g(8EFp=oiBy-`y6SS=1N?Uu~#9MM|2$;7BdHvPq30We>r+VDf=x^Ea`*?j68y2H%GQw&|mzK2(Xt>ddN%YKUHHQqYY z$|TGVe_X0dQg=F}i;8$FtL0Xm=*2fUG-N%dd%lJdc}hG6t~xWj+O!rhqd29W4j!uj&qhN@H7ixL1+N<`y7YK_E)}ixE#r)PlXRO!UXGtC@bYA!C zRE|VdEC@IG{SvPkLFPRDql^ISE@;;SOLIfF6QlJXYyBW3ENBDbAQz7Xy3B_j?zaQ=x8pkiz=;OZ|lTHVVU+X6t!bRP!kl!-6TSDVJCh$H({^unV)i%cE{(pHu{smjPV-gO? z%yoZu&bz3f-_>Pr-1+IN1s4xGHh~DK+Ma+iR5B0e@v@r1Ob~brioAl7Z?W)iNX$W? z@2X0%jBFQg?gvYI+T;so7?^E&jW*wf?eE|THyP2;qxvC253pYkwd-r1FEP$Yy{6|d zqdK!~Cjw46m`tV*CfM#8v`2jhwiiUgfIb8&eF%vWRJOE1PjayLy=o z!DkM6LMR{WlK=ji#$UcckTgbp;oCFZ)a1N)JDXBb)nxrPNwm0Qt(EQ5H=Vg;GNm84 ze|aMfUf>a7v^klBa6~|KZN2X^`o&gV+vdbRR#=2hwZ#h3^vS(MAEt~N=G(h@lOLg# zBy1X3(^E(&Lo!%Y{1=>P&lj^!#9Yr14PqFFKC~P^tli}@zU8bnY<_5Kig9~dAEQUp zbG^};D5PRI|C-q4#0OHEP)u;KeTebk7$8bl7AK@cgIbw(cy_l>86x|O17m#j@id|H zTCO??y2QHu;u&b;&BwIwlM)a=XV;tCmZ0}%YAjCZLGCmard|yPf+MojJaem|vAQNM z->R5n>W%Da9D+!`kPn}TlzV1v(i70)G9xVSCJkuW?{(qKKH<WrPc;%Sao*kw^qo6mc!ZE4=6;e$(0L`VcH zi5yO%M^5gM`DY$jtayfiaxahFkBC;Uak3ccZ}dMUJQmUs>LDNj=zd?S@Xe$)^*olq zm*Z>@a!4FOo;%s&`KKvnx=jrfzGXX()4&W-6{sAwtLge&kyNzu-y=_85wb`8<+{rlkY(P#og$hz5Ucf{;BEWE=_My zU}qs0O~rZIP6@wpP?4uDheQzj#ktJ79`CR<VFCmw^=VqwmysK_%#7W}C(3p(VY7XWE#O#~V3ert5i5872RQ z{bJoTg3aYnda?N|-jFq(&!^=3j@*Kf31~RKw6aFuB{QIq#nS+edLX36lG9(Mp+GiBpYN%*3)J(F41z$n|b_jVi;4 z+f~X+8JxqW^rS+LhHYT?Bwnsd1}SLIZhl8EcVXJw7(#t%lvajG_i%|u=2XAj!sOZ) zl9#F#U+*jY@)Yla|IIhucradsE;rzRqVR*AGj^7Iq3kg%sn9g@p9PgO`lkyhWrf{x zUC!ArW@?GOIq-d$a1K~E+eq`8uZm|il#tidqb#WkFY%UWx8*S2Lypid{t?t ziLp@BL~glD?Z^<%w++w48=GgOiI*j7`%k(1-@a<}im85E*LvC8e45@|Fp(LJ_9qC< z##2g{EcwOKNuJEc(lbN37PODT6BA6Mx)^o8e8DXjtJ_bym;VB$QUgcSQ85wNKMB7Z zfM70h3(UEVuCsx$vKtIMPBT6@SS2R=IPt?C7UN5prb6U@#_Mpn@-w0vZm{OEW5Hg6 zZR>GSp;kxy?gxLK8<}lJUEl@y@kVMR6xh^ahh;2}Qy-RAr{26<^Nb+TCdt`*H(xRs zG46h^cY6R$gG_O?w@YB~`NGbI&!otRzXX64Bp1ZHg6W;s%X(JBQ1*wu6xhF%G&;(f zn$q6#)iraWbqFG#CY&3oQLhxf3QP5h(CY6*d>s5hZbLc(b|nTiP`d|`SYEdXzNs5r z`$LJBLHg)DKhqUBTTE_9SDVCvIMsNYlFd!)u*yp^@r?3s+G>bfzXB#?HSI{gkY0Ju zinp)*^b9IXVwU@^;{Z%hF>7>1Uei6f{>^@36X?14y^(z|`Y5MuRbNTL@&!E~n`S;^ z4r?b<+AUdh5eATar_FC%sX0}#RJBiL{HChyDP)oFXAc&u$)!CoYdpX2e9|mhH`ZSt z-DEwy*?s&;yO*<}=IHk8@2C_gwQJMuYasIKNYTCeQfRU;+Au~*9rs(Af1o?f_K2su z*Ga3Jm#}GM$7(|O)!}=0=P{Rz7*~TZ0on?e<5c{ZoS%^+AtocDOKz*Bl#fRR)LTI~ zuYV=9(BSEmx$`(ykp)bh=e=pX_>n?|iTn5ZyWOBb%vnduy&n)8(J86;O9NM)gpT1f zFo8uU{d~z$?tb0U+&=KLnL7;ekm6CI@o}$!Anx;#iB@$(xN?xoI@cwd3bKnqfjBQ~ z5W06vviackphrJr7=&LH{>J>mV}Lh0+xHW@vF6h~9G@Vuq%Z|ta$VLO?PK?Zy*|}@ zg4UR7oj})qUh5v;4-S`Gn}xn&-FVccg*DtfHzC_0BiS|2hJG_s zq%aJBjZlP4PM~ow*O8u?@sumo8QGaD6E?dF-qs{x!fA62_Sx^Hiz}^=>4^q^I|DX2W@jb5ZdjM_Fn2l|-Nt20f+in`$M#G8I*tTukMq}G{?tH#_ zai9Aa+~+qh&)Kur+BnPbnmV!O?(BK7;c6Zo)(>hka^W?_NvG0vw*+yQ?D+Sk?4TGH z<<(4U_vbwtJf<|%o*#8FeQ1Fm#<-A`MZSM)f8!O#nU}~p+lxh|xP7B8!eo397klnq z#{b$kYRuboHlja-;?7gCl$FePd8GX5_OdYMV0m&wM>39>YAZCSM11o$D$uI_H&fl% z5&iyJl*9vd_uy22&hA8Y%)y~72@njK6UK^uS9vNK$?4b&$l$#KejF@8%js~{=d_^L zj~md$>qIDyrYKCuVRB9!cJ68wv4y(0v)rKj^lzLL7~~pA(#hAa;c+g_vADi-yNSuA z>>HIlgd$ToI&MpBrU2h0P|s4jG(P#WQR)U%xs`k#ok@ z5%|`deg#PGNjmDhP~{6p%N>A-nCH&VG9m58pS>K#FQjc%rjTpbMFxp@iN1f^_ZN0@ z0LZN`Ex&76ZQj2EY;b5 z_D!Inx|I%o5e^mkvQ1@4=|Z1k(*g(2X-J+ef71UeZS?)S!grdaR!pwT86D@=uLU{p zhzuQZ`UE9sCpS&g8YM~;-$p|Of~2s?O41OMyeGPR%x0vkI%3h|wyw~*i?qp&Qvimf zO}~o`3Mnz<*UeTpO(K%Ny;Ur7Q|Ywu;(xc+Fj`1Tb545X6o#1T^E* zmc1Hb@iiFf5TO4pRKQ&}w9jIjD6|?!s}UA7z8L7M{CtGJBy!sSy+%t^m~QUA;M`&m zwRB$=%($}mBT>BfAIB=0A=%?vHbXu0<-4uJ}YW}clvjxqCOiN}D z$vonZh_9!ce6DaR-|4Oc@h_H9kYm+gA{CD|g-K4m6NY6IaJ>+6IUW8O3B|`hzAB)MI$T= zp?ekwy!>j?A4>6;@0=aW`Jle5okvxLVpz5-B!)k*q`v)Xi%cG_Ahu^goPjsR9-Qp3 zrijbp_Iw!oN?cG z^3fIsV7il&dxt#IERcr+PnNF%bY8EEyt8*VuUS5kzn-YYf9%3(9vlqCcCmsE?B_Ds zr2sdQ4-r2_;%E1cH14864a}WjY;UH{0|82-EKtWt2x0|3-3A8Bp_y#C{)Za7rM0u} zQ$}>x_pdZ-PLkf6Z=%R=Wz1a5)~$wXH%+gOK8|C4n@o)5R|nv5fvfcqZHP^N^Z#5DY{>Es}?U^TS580qJE&ZLF&p zE)T_^mD_7w9#e+mOf}MkJ?c`SlQ1}z&PzMsR&@mwJY~xy;hp=SorB=$=s~uuLVM%{ zJ@!@sL6i!pt4Sd3TX3`|y&=qYoP#W478~GNiUj3nGaMEx!%@*+@O#MhNH9}R+}TSs z)VkL;r|@oGh@{VsAFTpjKhWB}hvEtW~Y^7G7 zvMzRBYa=Hb&1)4y=A`$PBGIl%6Hp&{8;X`8Fc9g(&KK~>8)=hK<7XPQbhT`K80?l^ z2H;OHC|#tu;4#N)3+`}PT(!nZ)JG=Y$n+?mGF8|LYDsx)OC(Xv=}NK=Wf6^&e$d|oHO$cyA=Mnw&RMWsnD|* z%(N2X(G<*IRo=doH+pobE`VU4mVQf6xZA95_^3%mP^WR+<@pZ$L z{v+t*u|cgEmk_!ZyjNktY*B-C&xNz|P8(E!wD>GxyCHEo^*Nt>8z-Ha%!Tz8#$BIu zg!v~phAl!C#35V%(Vh3HCpXmlWqYpHwu-AqE~72Vx1kAE2To6O1I9f*iIYD$3#LC9 zP_El=*f>HJvAf2IxOa03W|mxz{#uJCA3x6pJ(`kUf=hI4Q?%B&>aR8*R~#-?;1K$a z$ZD#d%qP(it(q&d{@l5AN!dTiy1V|~x;U4GdTJHOcpW0YWaOLNsHxB_xu>+!zr#DZ zf8@8Y>-pFpp0Sj{QERyM%8p@Bk;`R_<_^skAZf*W# zKCdt67mgW>d$u7_!;h))I$2ruSjeE?5@r3}fV3$vd7J!_MSsrzv4W7jY(W*z!OcHy z=8#hMyf_?=%cJiA*lcY7`i--~*F1deJiEQI#$g0+K)&%T^5-Y{?9s%8z@H|gbW@ki zEvnI5ExTu=mga-D!w>W6hOyhSY)&{ey4wTQr4glacEFJGllBL-@M^v|!Asu!e1`q* zt@!)qc2!pc!BXJ2hA7}e7d;Z4u!({3N$0o=Q z+Nhj0Zl*d>iJ%k-gBai9{-w9mk)CdN+o8Cez?45fZGH4X_mG@G>#A3$+tJ?cI#;-I z|2#Un)yLy`4G}|ujr8p5$7s8fsCAJ<6W@_n9OT-zfR&@hbSypwI-S7_c^;B$xePp( zXOA@4=_Wy`hsS%3Iv;b#y>NHBPNrriOtLn;SM}gwmbdO+yU+9j{H?ff7UPbUW{%7s zOk@}^Au*_1EH%TnSqoc!D~34!+J^`{VNM7!E;<|Z?Hy|8Zyb!RGGm!xz~1Y~${|;; z2G+wQ-R(02>VU(R`2k7Grqx-w27&ypj2$Q`URf1*+wR@5 zB>wtC+TtaU!v7T(;mpIfWf(RRew=QQaD6QbdA*rNfAKkqMJ- zF;cL;(O?3=gq|{L)s-U{J?5Tn!7RH9VoJy4{8((2?^*_8Fbfv%eO5yQ`=WY9GCQkG zZsEpx=pQ4{6q#RsBid%T^(iM!dBdhc3!n}eo1!NSWZ-I~vs*>wDsZ4QzWugXh%rTe zr^?3fD8gYGUXM);!W(?S-F$aTqV|FZmseZ8aK8@F&shH2HI-hslU!#*pIlAcRYzu$;<0( z)N*$cxhQCen*jGSQEVUh1AsiQED9IhPCY<}!iyfkBh8IQdaip3(%#zk{LAp^ z!nY*=5jFDcNN!$}1zR-U4mJ|vJg zbP(P9WV```Q(Wd}Yk42%>Y%FqqPX%i%Uj}eYLo5Dz@BYx#;d@Mr%~vxG(smglJ1Fp zbFk>xqFAk@S>wVTxkd^H3!6Z_!3W-@hU7YSwVmkr1))IVdWNbCcBi^GSeZRdOuv`w=GlP~0xq!C&X3mfKv25cq?NNdD)a-Dv)ha! z0<<}eh#a3Wx7J>vC9nDDG@r&vaJPU}05;(cu(^Ku7pw56i}+K=R42u3Vrtv6uBzeP z7Irh_uT5+V0{6N3H^n-*QeKvq)L|gfu&PoMgLM%&dCl4| zb&N5nkd`cS1qh9TLUm-_t1p_fSPRflB0BZm>>W(vLxtWW-zoQ4Vd1UW1JNaEYL5}W zh+uXW^vZ`$W)2h@_#a6vIj}Xt0W!*{M_^z3_J0>SBQZ>#d5AIQnDJi;CUYGLY+Qv3 zSWS~fb}j-0X5$59@MwLC3^A$vD^Hi!xSorW7VJ=356QY6V<6KHO zwt;Wp)x<0E{^i_Ty1WD_&_dStK5a0nh_`V)!KkqUOVBF$EiY~|bK)y9*zMfu!P{z; zEmLudW@unMa>roK3iS*IDrdv@1js=MAwNIb@?f#by%B7VRubQ}wRAziq{cTgIPt8I zYB#)|P%?6W` zuWI?PahZcmC!)|t2bIo253%o!p#iCn`#ZcTU*lWKJyT4?A4!1_5?1MVSTy&lZ%G;( zt%mJ)psGqdb&IGP`ZhOlX1_xD!=U!eRkdCplx(xYN!O4We~qDuI*b$UQDiEha*+4j zs_|Amw>2@ri)@axw$GGh3GDl~Gi?!S-0mdij&eINA@42C3rA?n@@B+JV;*7y0>O8n z$SL1TN%Fg@2h|l86g0?K?j*pt&QC;#(!HC0o{8L;nCc!8Y3^0Pq)uSY6S-w!r4n73KR@=5%g|f3g^8I_-mzo^iOgp`1$cb^NJXT-<7LR+Y)&qC z^SYd=A0l*&NvB}SLmmr%j2zig@vz4F$Cb~1 ztI$A9Mz5p9%p)(kLrZzo3zOdnlDpPzcF0DZRQ?5b)zN+a*c|^t+i@d)5GbAIdP$W- zmA#7I-?1IdK{B7w=yXt#v~HO`UY^J$0xrO^SqJx=i@IKf9NMIE&s5Cv%CDb%pl6 zxM6$ z&G368<;~rBRPlPy#;B6~uqs|#x#4-Yc~1+&svYJQFwt%*n|3o!_Y#Tt^Wqf)&Hd5P zeeW_KkI3u!9_4Z6w*LN?j|4m2+;0>4I+(wyJ5KmVVL%<~-FQQJLiXcCr&i%#nG%1? z8;0J=?glN1vMFRP8p<(zz`y~K4&=`7y9Aeq_}(`)l_L;CQ*_1i-JCkgS=t4H*jSi> zv5Q3=G(4J_?1(7U1Cn4T`pN*awckG{JNoSZX07a~kbQY45=rWa87VP>ZT2?g@_OG_ ze}vquG}C~{B&TRWFL&Is9dA(z)rs_wK5XptrdX#Q+9z_}89cfQ99@&esAI=>e0^J7 z+GKUA9uxc+-Y+CvKaJ9g3KU2lj)v^8`Owylme*-Lae5q_4aU;BAnvztpI{8Z9lU<*oT3cX3#tKVY) z6Z+7c%(fkTZ=W}?${^^7doeJ~lEEc54`7p8C@Vgh(A=5+7~VFz%D>$1Y=2O{g#LEx zPoqa6ksq2lgu;-Um$Ztuhpu#9xg5tps&wWgWO0<9Y;a`ndQL2f3Kvc1jZHHF!-dw? z^CqMEoQ*5PgA-rEQh9NpJH0LKi2t^38_(axlC72Xr)xi3p0#N>6OdIy5SxbeuJH6r zg}Ce6PY7`UdX)7r=Q-}<_+gx(EU|e%k0&XAH`P$AYK#gok~AQ~_VJ7{^XYE-FP?Pv zgglIjI}ek%ZMwwwp<`=HQULrwzOA20jkZ|=B*s|?+mEd4sp*|0J#oifayG?Qnr z{+}iK-(yWRb)4+-@PGtukGcRdhnaY@l&*ziB(4sIU%2Bc9As4Lc;j--e=5Uf@A*F0 z=?CDX5JVe7r`d~0qcde?`To2GhU_ctLu8nU_s)6F8$^`Ss>Ps%x2#OhU%n!dqsM_ppl zWi?PbWu_NUI}6%@l5>Rw2aMl+Te#!troZg#vXI|9r_*x}=*1jj)7gRGBD&bT8!dXZ z`zutbUiBouDc=^#?m9A4wB{%0Y^7qMW*6r)DZ-c&bHAd*oHGX<>V&s$XCzBpl>L(7 z%3r@oDN98^y%+QLDV8e>7#!bF(OFg5?%QFW_5r~8vvfK^%onPM4y(c?{akrBZgL3u z#}P;~yZC8-P06L7ck-feyMeOMInd|Ks4}Q{%vTjXsd9 znlkOp4sub~H=v|%5Y#L|+3_81NFVozzp}HB^n|9cNXq;H-xXBLt|CBxo!j6o_fq%j z(bC4VU#k&QRlf+`G_1e3xv5c|@9D?m1qH@^#g8(>l{PX9GrZ$FrGd-7R!>j@MeN%N zQg&3F?@o}IvK%7&f<8~P*Zo5kLLMTz2X(w3+1u zrF8h*x6P<4iY-ZAw+nLdtb}^T#xpLjX`|X0iJbc^*kZYj_Zg0a`l<)vf?thXQDkHW zN-CqQn~|q+TBJ6Nh=^@ByrUgv$P~HRx#m8dB6k$=u z*_3;}#(lPS7-|qBKowwuY}JhN@a6Kw`{<4fV$`>gtc=SbJy1ROk%X>FB)sJxdnH0n zX{KEeliLt$+G6KDD$W+y%e04RI&CJX$U`&3Vc>QU8YWi61IQMIp$7Z{*kLBEKqqnt zxiiTM2ZfCIB&ccd=6yc7a~A2l-}1&8TO?21wiNS${U-Yx^P>ukX;31 zZbS0BE~dXuiGUxZ(^|P~VBwa@WKrL3G$th@?dz&IJ1_iwXzJB>k}4#!tE|GBD7j7&Sti(c`P?@AEfNpBIvnTK;z;atmFWG= zzu=dT!Vv}Qh<``q4N@q+0!!Z}xm5B~0L|0J)mHb4`5`^@yq4T9DJ9hK`|V9$EWHQf~WRz!>eh#GJD-? zwXv3PLx*b5aN-ju027l*AlYJ*(5loQMvUDv_rHXbRH<5pPUP9<8zvvgZrRrrJr`;c zVhg!P^6aiqcZ92nW9CG}(A?hgu)cY4iq)}7`KS*{xF?YxE;pq#vbHC`2kM%T@h5dS zhc}sHi6FCO-f^ksiZ(BBSbxfz>*n{uM>>CZh8v?#qoj0;AvR%)(3c!Y2kQLs00O|p zcQeg^0G3=nk28JT#jg9WT3h5?^$sEu%z8QRe98{I)2}ug50e80OPL%Niy*%bw2IK7 zRqho!DF7!N$WyYDocyIhTHbRTV94i5B_^j=bfnk$XO{5a+D#V`UXc%4oNf49agh%bv#TuYS5h13S*M>q?qn8l9f{M zo7H^|2~!dqFfUPsuwvibei;!~H!H&@Es087;tlaN;3Ct{2F}D`^%KU%vt8iAn(lA< z*v)K0%OxXcD2jR1Fi3rA%O`&>5eC#s{d~ZY?uBY=Ux65~v}^|juY%mwJ;U98bO-pJ zDQbJ|Hm_}$amrfF3$NW!3_Rz|zjT#lP8S$}cQhr~i#ZdoRXx)5wGAl8p{n~?!TDMFSDlLUOXB-UZ7WO0sRSmKe4B2fkvMI*N4Y>MTTp8ZhFsZv!9U?U@7wx# zO}>H?ir=oS4uDc{@&@;eM>&7z-$uLM9%bK$cr&=+(of<61?;mnm+H2zgjPg1YFR22 zH*cu|jDd{{hpHagaPMaN+sn_Jy!)v|SiGi*HkNtuljHQencnI_(k|z~&1Spsu~n@D zV8Wu1AYcteC`L1CVJXAyu5cds&G3gsYpSdZ^0|Zfj&daOhfo?95{uL8Rop^A>5A^ z`geW%=t%)Rjv3Y`RB0x;K zYp8;NbsigtTEJBFtB+J^tGSQyTB=Pg;-fF(3J6 zipROf4eH6mtTecsR4c9%bIy4l7YCcXjYe*|2)iuPRW=XYe=yxc1d~tt&&z(U{UHwz zp9tl_tPcXR3L%)#IERcim|c%!(Y{YzfFga^1tVPlESoBIXLJB!;jni!1Zb~^jW1m% z+Y1ct%6oD3^$J3`5;4`nG1INW%YvIr7hy1~9xx3sUe<7nt1|gb)Gmrs2H1S{n3u#0 zz-7KqPp)BM6PPd^=P`%uJeIp%llnH}lX8&^eOsjwRHJohG^;ZD)?#eG>MWjr#D4Q@ zv>)>hWX7j8xfRmQP_i|?nlI$e^tDD7A?>z`HH2mMuWhwYW_^zN{6)%dJdAXVY^N4# zolzKSgWWUb`my7`(05{|`P)=Rq%=H_AfhJTC#Q3C5!#J0VR)wT3u6(sE)FmZqBueE z02ooMt!#IBR;bS3&t|^T^LLCHl@M$N)oJ&Q>RE3 z&fb?ks~TEhUk7Wfsa;3WkxMPL|R@<=i9{C{fRtrbM~6z*uI=HMgn9^k^h( zK+;LGVzq~!v+hQ*fL>%sYkC1nj`-!~P_Up^?AqEorY7E_zBAFu#2(NI9 z#W5oltu@CJQ@+pLvkr(BrEP11aKg?DLPcaoG!f^22!_rj_RcYj%)`UOrHqQ_2+FSp z8IkJWoO@n$H!lM26qL$)1Xvr+WWt)}9_2Fg$=ea94)FDz91^+=WU{lB0+?>TsecX3 z!=4=TW;Z;7;5V^J5c6?1vX|UzQSJO#=xmSm;rLUWa-H$nV)!>K*`0C&N*ZAii{Zzd z#H9*5=F?XmBoF8L5J~cHacZ3oQB*rOvEsaEcS3XghW4pq?xQO;3(=*COhXcTiWWnD zX%(SjwD;n^@5^B+mK)@NOa(9o%lLne*O1IxIT&I&G&uIjL4TF#u*%gvO;@X2B7mCV z$o9a7enu-WfMB%tcU{MOQpJL`Lup)!+z9)Y0c2<|hCkaokzTFcT$;#sGo@g*$ponL z`r{W{>%s7WIb^LwfP07X)fTLSX9m-kr{C|Fe| zQy=X}9hSuZeQ4q(=d@;L%od7!bX+P18mAf*_2~6wMq^(cTh|{zM~FNhzr_FiM&+x| zGQSjYhwosfcxF#O*-LXG3#;F>UPv;cZE}<%sts1gf{Y)d{7zl8r~;zw?xe`&D-SQG z+#`|Dn}%tP#t~o9+=G+Y&*`&|dhlD`pJfl_tjXn+>PoE&IEd~HY0eT8DZXZpD!Ec* z+Ld6{4odTYRPNKuwVc%~bAML&`#R!E z#8;=1dCN;|fURD`o7BNdtTKV)ZNDhoSxDkJ+dY-~;ywa)!^gk`sVg(zaqCS);Wfh~ZfIi-7ezJm;A3gPx663`KCu0+$Jhz%u@ta$|MXDh^7+&_Q1GEq!mo7_ zEDIsUG3Oqa;-0X_@X$mCE-4_B{YOWy=^$P_Q~Tv+HCV1_xB92{H9zGIf|TsR;8F1P zeWka0v$`%%MM-aX#>7h1_tivMK9nlD9J-xf)2{gVKsg2#MGzC8JwcT(m#Zh6Mv%Jw zVk?^(SR$YOfC(tAYCq)Zcu%DP+Zo z--2MMe0Y`L^r1lI{F3|S>rbak5Ib~sOyz$$(T1TN*DD=Eoz$SbA%nak_62p980_bq zdGky{NUOm#Nto42M!c2-pau=g#`_KQK+YIpI5X++7g4;7wsnXEsI}+-A_@(D6zMyE z9DRiln;0&8I8=v%D_h2wcW^Ns4Ws8nX2OQ;fsAob+n*u=0E)J1yVkv{U@YQhP@pw^ zy-$`}!C+mp&R4wqe&1tx=3;<_8iW)W-Y83bs20(U$#s0Q{KohI`cRK@qks2XD#M4jXTFR-x$+*Rh~gaU2{lG?(Ux}KM}<3QqPnzkm$(ubkq38LLv z%qh4tCZ26PKv?`hVRA5?obO<&95xsBNlUtC#~k0O=SB ze1qveUCBCW-Kai@tfLAUJxR;SH}XuvQf_YD>tD{dex_?dC&)C=BzBpc-}g0lgAz*e zoEnubMSs2YbP}5^Swd`e(EGF?5;amD#Zg4hijoczBPcesiDA(r03FC<*&Swt#V`@8 z=hc-qsMkXxammBq&@P!Z2>8bAe>$6G$nf#V_jWURxXAf7fl{doZ*F27 zI``v`2xEogh+ZQBOoC<&Nk3d*VPp*|>mre_`1`FTyHTpsV)P;4pz! z^EiE)S_C zH9_$V($Gh>f3>%3d{MHG3E2_ZyQ7;M(o^SfIgnI!4v@Bu7FO4cxElDggNG!fd5@${z;;CL(lp}g$Uxz(k6ytl0-xAQ zB8hCt(XXTB1_OoJTyH1pJ(eM@zx*+vu0oj7*l2^gZP*UGUqyvLPY&HwzNcF6Y!Tz1Srfe_PW+gO^ z^HNbxD#V4isjY*sF)77NBw=IkJ^Ai7oBDx!cf1Y8Qp5ys#UNrAF^y%l4jQZU*k zbZyqwS&cu9xXXz(P*~^&$mP%byL3gBIzJZ!{TVV=Q2Odo(W;Uri9t3IGz3_!^~J{v z$u+LegmAA@olfd%js*FTO9J1&){!L~!?Ms&xi?#1V>ZjL$PfgBk7ZmohJeC=hJChV-ha-o&GyU$szCE$= zKeNv7&+4w}Ptk=e`qCvuCF65K+!1B$Vuwx^w5ioZSIttot}fVP4oW9|q_H8RJl{zj z%KHfkcl14b*Xj6P9RRZI#!hRd9?C!DLgtcy?ZO&J&$+b~0tvS3eqz0dQ#QP>S+TPM zmcT{GQ5F|YM?>FN^vT4R+6t)p?)j}RxJe$}$#&*7Bnn{|T#Vd2JvoRIa@M*tywY7O zZTiLIx#)3go5-3L;xEv@M{3rZVqwNLUG6^Ltp70yS!UM>P(qW<_KJP{mWMOaZNKt;Jj_}C`uV38!c`tPH25hC^er#NCyc)PI1SCBh zA<5@evver@0G5VJziaF>b4M4rX7FU+t>+kG*K(_%n(_HsjP2g)tu-Ly0=g^ZEb&!~ zH+Zy@huZ|X3hAKatH*<(HzL!bI@)r^u^=3E;6dvk;k)|wQ?*tlTxSBZ;jnrA0DPUe zoA>DI2iF>^MB=?a=f|$LiL8E&9xP*ktZa-qAoSX9LufJB+xAsFKzt)bpA|XdBt|%4 zhc&6N@kApL%bG0)CZC6mgSoVR&BE2E;If{dp7-Dm50kHsmIfofUa6MM6HezQ)5DE? z^<}#*Pu*zUGb{b6>8pa%IuT7pkg9Uw*POpe&?i~N?F%+Q=Nniv zU-gmZ{w!(Vsg_*3@Aa(pDox=mx}ELgm+!G@f4Fje?RCMjhMcdwDIV$vs!Fw;qZ(Uz zs-YsRTu~?^slpPHl{C`}ldPY$b$^1xoeW>VCbS0Wduyy0Zpxas|5Z0VdNt&SULxH} zSILQz@Ctbz5-RXxBP4R|3;5jka_eqx`MN-OgI#uDGKw)`jJ}Z2YwGL>#vL+d(db@rI4ae9=%Cc!O3&Ib;fRV8@yA}82NCG2re1wys z$uo)>3b;AQa6nTE3nsK#BGWOqx-;90!~hqx!IQ(H?^F%AOZtW9a=tEZ4sJkaylTL4 z6ai8`sYP@>!7kYBhmrlc!sr$|e0B)x=uWgrt{|Vde|zZ9?}z`XNMY%qmKw0pVPs22 z{~>ZqDnGBds&ay){`9-R|KK!Jug-K+uceq8jHZp!eJ#7tC3vW8e@~y!tTz{wvjS%q zSF||l{dRnpdY;zH1J+)_!#Lo^f*LhV912-Yk;Q^GQBU}`jLVFD#CSs6`duc z+7CpuHrGo+9ghKd9gzOX8d6)7&F+Q)*<8}-EM>3+=96$U+WDCGaz?ZJ+u}0Y5|haB zfHw*iu%*e-Pn4{Go>+=_?yxQa1CDh;tpR(LWmE363u=mihpUAg;yPch?Cd0oi*`*d zC8^HM%=MM9M4na#h9P*!W!X*4l4iyT`v#NdHbl3fhM&Lb;Z*m>`!tUw;MHRt8MIc? zZ*1}N8nq^KILera_i-46jDpdvebSP9I$^3}Ij&rG7TZd$^V_hkYdz8sHFXwvL+)zz z{I!3q112&mwENwLZNXRiH!r-ej&5ym?FM*;GfVI8%0LCG&8JVxXXDh5X^Q9JXa|xj zr)`g0dArb%+$j|xVa)gww6>R1-Ur7&FMRb|c5kD#NAxjv5jXKj^_RR>z!mSN`vz3L(80nMJnb9>#X0%rRlA?sK`zFa@Mn6xS!tf1F{^v=Sr5U`0iT=z;UHIHYZ9504M86#gwUpBdhnvT1BfFJ8w}FDU-q#?MRy%;Y@au##pR0M#-={5 z=0OgBo{Os6s1A~=yfUiKr$KZ6j*5$-riu5z6|Da(E!}M_V%N5m*X6B)AN&4k%YM%G zSzP!Mf&Fi@;rrH3c=YH9C+%T!o`Zu!6xV)no{#JN%;CY?*xxs2Y#97^ro+8Gudd>w z9Z>W2(6!_G+Ezp0!*vsJmx_^Xdhg2P&upZK=Tl}Bzb(}c*`<3C(i+dM(BcUjopZrM zCbVZkl9LtqKq8c#k|4BZG{nldG2K`6)%H;YJamHJo<(%3OZSc5ue-2hGnlrPmf=l3 znp)bVnOmPoX#S0E_|2i?_IfnBIv3am;n^cDD;<2Jt>Mv$6-kQp4CBIF^T+q!Z9!L? zSyezY?~?>3JW825^DoaV8quG1;!_*9Swc_v8ldnQ@tGv}I^pY6r4Pz*QlnyXA0#=bU5%%gVqYTE9f2Kk(uT9T0i@@`*wAweEc2hrUY#l~g+dc5$;j@Clj-A9OhQ>*551)0Ux*@$NXnKJO025r||H5cibv%RgV4IkzWlMwm?hD~tfkyen z3$?SNUcD+tFeGX?Ppn5WF#DvBMpJ$AJA@~Nm>>0{2cJlIU`F=J=E4ohvNr)alFD*c z+*in1m-q;^5TBX!jI)&2phDQ{_)pQbuiBvLyNOD)*oL`gf(G7>g5}S4ZNbt|OS(z% zv6@Qd6eb8U3nzoU$ZUkS@6XngRryoKiBHrF_#nbI-uyq$VZgp4UlYM%>7E_!dsRro zz>wR|U1YpQzhKMk(c(g%M*YZ-tXY#M`M29Nkc)(uk(!yH>?c9jfz z`L8k6*x)jIm}uCWjG&kQ8k0@hEmE1=^YdR5fW~wif&QW`W8l^HuL(e7{K7!9hCxN+ z{2K^1SU5?yh|PEZ>vNWW17QZI1Q zAEWAE{JgtcKziS6-CdVd3q#yMAH_y>9|6sEAePmJTr~9T>}%NY6X$>EH#V4%0KiFi zdX-gsli^wfWa;V0ll(@y+=@x=5Q6D!9&2Rlww)O5*CnWFNO-6@^=}G3jfF~_Iu|*h z&4;Um2j#eiSH}~ZWNw&*iMIpGu=YaujiZ%dne;LoEjbJLue0GM6L6s=AYyWV<_)3R zyAv5(BN;<;2Lg6xNs$=d?h=s^$PCGy_{f@WMGj3(|2fQTP~;4A%$DEOYUbaY859oq zOSDSoA}ToO*Nfp=Yob&+XQ!%1RmmI2@Biiq%h+Icm3S{V>U^M;O7oJ0U^cXiBE5uy z+?*n0Rp%NXCsC$NR=zH$ui;HL`tA1+A4dP>VJ`^aQ&xXF_x2yN)@!|=%eaMXeIo9b ze#dT+M#As*Ghm;o9e}EL^uZHCL5rLQh1Z9IWL`I+w}Jn%ygMAo^h|1$bj;v>R{3R6 z9PmzfPxrXM>A6T|W{jBn9z;HZ4sLqJE!KZI;4}b5B8T(&r|H|W?ot~=bbB4%ANgL0 z|1rd17^lJLb>|Eq&+%RL_T7&7fB5b25Ab!-<%|ybL0~!`#~kzZE9-yxVF#N3BusN# z zOY!;Qes)I@CX5n(t*<9f53bHkta0V zGR#fAGX6sv_&|?XWB<2Xm|}jwwcg&k=*|)O(f{0O9FRmIv-e+g{enQ~HdSA7%}qXW z{Tn?$ZxjPkQ_}zK8+`;wN(l-wCGw8TivKs9Y}WrzWSt8$uATpb-Yapp$fU@B7S|1< zfepeP9)61Ng_57<-*5y>|38sEmEqn!{SSH%f_}hsVEVs-`k_`c|082zW_sVr`Y8Py hj{oHU8` { + await page.goto("/"); +}); + +// Constants definition +const CHAT_ITEMS = ["What is the total revenue of Nike in 2023?"]; +const UPLOAD_LINK = ["https://www.ces.tech/"]; + +// Helper function: Check notification text +async function checkNotificationText(page: Page, expectedText: string) { + const notification = await page.waitForSelector(".notification"); + const notificationText = await notification.textContent(); + expect(notificationText).toContain(expectedText); +} + +// Helper function: Enter message to chat +async function enterMessageToChat(page: Page, message: string) { + await page.getByTestId("chat-input").click(); + await page.getByTestId("chat-input").fill(message); + await page.getByTestId("chat-input").press("Enter"); + await page.waitForTimeout(10000); + //await expect(page.getByTestId("display-answer")).toBeVisible(); +} + +// Helper function: Upload file +async function uploadFile(page: Page, filePath: string) { + const fileUpload = page.getByTestId("file-upload"); + await fileUpload.setInputFiles(filePath); + await checkNotificationText(page, "Uploaded successfully"); +} + +// Helper function: Paste link +async function pasteLink(page: Page, link: string) { + await page.getByTestId("paste-link").fill(link); + const pasteClick = page.getByTestId("paste-click"); + await pasteClick.click(); + await checkNotificationText(page, "Uploaded successfully"); +} + +// Test description: New Chat +test.describe("New Chat", () => { + // Test: Enter message to chat + test("should enter message to chat", async ({ page }) => { + await enterMessageToChat(page, CHAT_ITEMS[0]); + }); +}); + +// Test description: Upload file +test.describe("Upload file", () => { + // Test: Upload file + test("should upload a file", async ({ page }) => { + await page.waitForTimeout(10000); + await page.getByTestId("open-upload").click(); + await page.waitForTimeout(10000); + await uploadFile(page, FILE_PATH); + }); + + // Test: Paste link + test("should paste link", async ({ page }) => { + await page.waitForTimeout(10000); + await page.getByTestId("open-upload").click(); + await page.waitForTimeout(10000); + await page.getByTestId("exchange-paste").click(); + await pasteLink(page, UPLOAD_LINK[0]); + }); +}); diff --git a/HybridRAG/ui/svelte/tests/test_file.txt b/HybridRAG/ui/svelte/tests/test_file.txt new file mode 100644 index 0000000000..bfbd73c1c3 --- /dev/null +++ b/HybridRAG/ui/svelte/tests/test_file.txt @@ -0,0 +1,104 @@ +Follow CES +EXHIBIT +REGISTER +All together. All ON. +Registration is now open for CES® 2024 — taking place Jan. 9-12, in Las Vegas. +Flip the switch on global business opportunity with CES, where you can meet with partners, customers, media, investors, and policymakers from across the industry and the world all in one place. +Don't miss your chance to be a part of the most powerful tech event in the world. +KEYNOTE ANNOUNCEMENT +Qualcomm CEO to Keynote CES 2024 + Cristiano Amon + President and CEO, Qualcomm +Anticipated Numbers for CES 2024 +130K+ +attendees +1000+ +startups within Eureka Park +3500+ +exhibitors and a sold-out West Hall + Learn more + arrow-black +Featured Podcast +Li-Fi Unleashes the Future of Esports +Listen Nowarrow-black +Featured Event +Apply Today for CTA Match +Learn Morearrow-black +CES 2024 is ALL ON +× +Vehicle Technology +With features like adaptive cruise control, collision prevention and lane guidance, technology is paving the way to safer roads. Discover what’s driving the innovations behind concept cars, connected vehicles and autonomous mobility.  +Learn Morearrow-black +  +Featured Podcast +On the Fast Track to Autonomous Driving: Mobileye +Read Morearrow-black +Brunswick Boating Tech Smooths Rough Waters +Register for CES 2024 +See the next generation of innovation at CES 2024. + Register Now +Featured Exhibitors +See the companies from across the globe that will be showcasing the latest in digital health, food tech, automotive tech, NFTs, gaming, smart home and more. +  +  +  + View 2024 Featured Exhibitors + arrow-black +Apply for Eureka Park™️ +Eureka Park is the buzzworthy startup arena that provides a unique opportunity to launch a new product, service or idea. If you’re looking for your big break in the tech industry, Eureka Park is the place for you. + Apply Now +Exhibit at CES +CES is where business gets done. +CASE STUDY +VW +Over a busy four days in Las Vegas, Volkswagen showed the world and media that its accomplishments transcend the legacy auto sector. It’s ID.7 sedan promises stellar performance and efficiency with a 435-mile range plus impressive features. +Read Morearrow-black +600K +interactions on social media +CES is a great opportunity, both from a business perspective in the networking sense and seeing what's going on in the tech field, and also it's a great opportunity from a media perspective because we see massive media attendance and we see a great deal of coverage. +MARK GILLIES +DIRECTOR OF PUBLIC RELATIONS AND REPUTATION, VOLKSWAGEN +CES Success Stories 2023: Volkswagon +× +Want to Exhibit at CES 2024? +Showcase your brand, launch your latest products and win business at the ultimate platform for innovation. + Contact Us +Latest Articles + CES 2024 Sector Trends: Digital Health +Read more +arrow-black +Nasdaq Keynote, CES 2024: Finance Taps Tech for Humanity +Read more +arrow-black +Walmart Keynote at CES 2024: Disruptive Retail Tech +Read more +arrow-black +Press Releases +Qualcomm CEO Cristiano Amon to Highlight How We Will Interact with Our Devices in the AI Age During CES 2024 Keynote +HD Hyundai to Keynote CES 2024 +Elevance Health’s Gail Boudreaux to Keynote CES 2024 + View all press releases + arrow-black +CES is owned and produced by the Consumer Technology Association, which provides the ultimate platform for technology leaders to connect, collaborate, and propel consumer technology forward. + Become a CTA Member +About CES +CES Events +Innovation Awards +CES Tech Talk Podcast +Promote Your Brand +Topics +Articles +CES Success Stories +Schedule +Our Partners +Information for: +Exhibitors +Media +International +Follow CES +Code of Conduct +Terms of Use +Privacy +Sitemap +Copyright © 2003 - 2023. All rights reserved. +CTATECH-PROD2 diff --git a/HybridRAG/ui/svelte/tsconfig.json b/HybridRAG/ui/svelte/tsconfig.json new file mode 100644 index 0000000000..b0135d48eb --- /dev/null +++ b/HybridRAG/ui/svelte/tsconfig.json @@ -0,0 +1,16 @@ +{ + "extends": "./.svelte-kit/tsconfig.json", + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "module": "ESNext", + "target": "ES6", + "outDir": "./dist" + } +} diff --git a/HybridRAG/ui/svelte/vite.config.ts b/HybridRAG/ui/svelte/vite.config.ts new file mode 100644 index 0000000000..d48b5ad894 --- /dev/null +++ b/HybridRAG/ui/svelte/vite.config.ts @@ -0,0 +1,25 @@ +// Copyright (c) 2024 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import { sveltekit } from "@sveltejs/kit/vite"; +import type { UserConfig } from "vite"; + +const config: UserConfig = { + plugins: [sveltekit()], + server: { + allowedHosts: true, + }, +}; + +export default config; diff --git a/pyproject.toml b/pyproject.toml index 081c2d03e9..2149887c11 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ line-length = 120 [tool.codespell] -skip = '*.po,*.js,*.map,*.js.map,*.css.map,*.json,*.sql' +skip = '*.po,*.js,*.map,*.js.map,*.css.map,*.json,*.sql,*.txt' count = '' quiet-level = 3 ignore-words = ".github/code_spell_ignore.txt" From d83ddac3c1360a682f85712c25a80e89578b2159 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Thu, 22 May 2025 13:56:42 +0800 Subject: [PATCH 068/217] Reduce the verification delay time to 1s (#1982) Signed-off-by: ZePan110 --- .github/workflows/pr-link-path-scan.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pr-link-path-scan.yml b/.github/workflows/pr-link-path-scan.yml index 3b147af241..1f389a7c69 100644 --- a/.github/workflows/pr-link-path-scan.yml +++ b/.github/workflows/pr-link-path-scan.yml @@ -23,7 +23,7 @@ jobs: - name: Check the Validity of Hyperlinks run: | cd ${{github.workspace}} - delay=15 + delay=1 fail="FALSE" merged_commit=$(git log -1 --format='%H') changed_files="$(git diff --name-status --diff-filter=ARM ${{ github.event.pull_request.base.sha }} ${merged_commit} | awk '/\.md$/ {print $NF}')" @@ -80,7 +80,7 @@ jobs: - name: Checking Relative Path Validity run: | cd ${{github.workspace}} - delay=15 + delay=1 fail="FALSE" repo_name=${{ github.event.pull_request.head.repo.full_name }} branch="https://github.com/$repo_name/blob/${{ github.event.pull_request.head.ref }}" From d1a500f945f23055c9bc47bf82c905a684e1eece Mon Sep 17 00:00:00 2001 From: "Sun, Xuehao" Date: Thu, 22 May 2025 14:51:00 +0800 Subject: [PATCH 069/217] Create scorecard.yml (#1980) Signed-off-by: Sun, Xuehao --- .github/workflows/scorecard.yml | 81 +++++++++++++++++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 .github/workflows/scorecard.yml diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml new file mode 100644 index 0000000000..e6286dfa5d --- /dev/null +++ b/.github/workflows/scorecard.yml @@ -0,0 +1,81 @@ +# Copyright (C) 2025 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +# This workflow uses actions that are not certified by GitHub. They are provided +# by a third-party and are governed by separate terms of service, privacy +# policy, and support documentation. + +name: Scorecard supply-chain security +on: + # For Branch-Protection check. Only the default branch is supported. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection + branch_protection_rule: + # To guarantee Maintained check is occasionally updated. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained + schedule: + - cron: '18 7 * * 3' + push: + branches: [ "main" ] + +# Declare default permissions as read only. +permissions: read-all + +jobs: + analysis: + name: Scorecard analysis + runs-on: ubuntu-latest + # `publish_results: true` only works when run from the default branch. conditional can be removed if disabled. + if: github.event.repository.default_branch == github.ref_name || github.event_name == 'pull_request' + permissions: + # Needed to upload the results to code-scanning dashboard. + security-events: write + # Needed to publish results and get a badge (see publish_results below). + id-token: write + # Uncomment the permissions below if installing in a private repository. + # contents: read + # actions: read + + steps: + - name: "Checkout code" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + + - name: "Run analysis" + uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1 + with: + results_file: results.sarif + results_format: sarif + # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: + # - you want to enable the Branch-Protection check on a *public* repository, or + # - you are installing Scorecard on a *private* repository + # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action?tab=readme-ov-file#authentication-with-fine-grained-pat-optional. + repo_token: ${{ secrets.ACTION_TOKEN }} + + # Public repositories: + # - Publish results to OpenSSF REST API for easy access by consumers + # - Allows the repository to include the Scorecard badge. + # - See https://github.com/ossf/scorecard-action#publishing-results. + # For private repositories: + # - `publish_results` will always be set to `false`, regardless + # of the value entered here. + publish_results: true + + # (Optional) Uncomment file_mode if you have a .gitattributes with files marked export-ignore + # file_mode: git + + # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF + # format to the repository Actions tab. + - name: "Upload artifact" + uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard (optional). + # Commenting out will disable upload of results to your repo's Code Scanning dashboard + - name: "Upload to code-scanning" + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: results.sarif From afb46bdfa1075dc1ba6ce10466efec28fda5199e Mon Sep 17 00:00:00 2001 From: "chen, suyue" Date: Thu, 22 May 2025 15:49:48 +0800 Subject: [PATCH 070/217] update default model to resolve the vllm/model_executor issue (#1985) Signed-off-by: chensuyue --- DocSum/docker_compose/intel/cpu/xeon/compose.yaml | 1 + DocSum/docker_compose/intel/cpu/xeon/compose_tgi.yaml | 1 + DocSum/docker_compose/intel/hpu/gaudi/compose.yaml | 1 + DocSum/docker_compose/intel/hpu/gaudi/compose_tgi.yaml | 1 + DocSum/docker_compose/intel/set_env.sh | 2 +- 5 files changed, 5 insertions(+), 1 deletion(-) diff --git a/DocSum/docker_compose/intel/cpu/xeon/compose.yaml b/DocSum/docker_compose/intel/cpu/xeon/compose.yaml index 8eb3bb28b6..9f05963e7a 100644 --- a/DocSum/docker_compose/intel/cpu/xeon/compose.yaml +++ b/DocSum/docker_compose/intel/cpu/xeon/compose.yaml @@ -40,6 +40,7 @@ services: LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} MAX_INPUT_TOKENS: ${MAX_INPUT_TOKENS} MAX_TOTAL_TOKENS: ${MAX_TOTAL_TOKENS} DocSum_COMPONENT_NAME: ${DocSum_COMPONENT_NAME} diff --git a/DocSum/docker_compose/intel/cpu/xeon/compose_tgi.yaml b/DocSum/docker_compose/intel/cpu/xeon/compose_tgi.yaml index 4b0362bd09..2343d726c7 100644 --- a/DocSum/docker_compose/intel/cpu/xeon/compose_tgi.yaml +++ b/DocSum/docker_compose/intel/cpu/xeon/compose_tgi.yaml @@ -40,6 +40,7 @@ services: LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} MAX_INPUT_TOKENS: ${MAX_INPUT_TOKENS} MAX_TOTAL_TOKENS: ${MAX_TOTAL_TOKENS} DocSum_COMPONENT_NAME: ${DocSum_COMPONENT_NAME} diff --git a/DocSum/docker_compose/intel/hpu/gaudi/compose.yaml b/DocSum/docker_compose/intel/hpu/gaudi/compose.yaml index f44d789a93..2efa09e890 100644 --- a/DocSum/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/DocSum/docker_compose/intel/hpu/gaudi/compose.yaml @@ -45,6 +45,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} MAX_INPUT_TOKENS: ${MAX_INPUT_TOKENS} MAX_TOTAL_TOKENS: ${MAX_TOTAL_TOKENS} LLM_ENDPOINT: ${LLM_ENDPOINT} diff --git a/DocSum/docker_compose/intel/hpu/gaudi/compose_tgi.yaml b/DocSum/docker_compose/intel/hpu/gaudi/compose_tgi.yaml index 01008de27a..6b922ebc68 100644 --- a/DocSum/docker_compose/intel/hpu/gaudi/compose_tgi.yaml +++ b/DocSum/docker_compose/intel/hpu/gaudi/compose_tgi.yaml @@ -49,6 +49,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} MAX_INPUT_TOKENS: ${MAX_INPUT_TOKENS} MAX_TOTAL_TOKENS: ${MAX_TOTAL_TOKENS} LLM_ENDPOINT: ${LLM_ENDPOINT} diff --git a/DocSum/docker_compose/intel/set_env.sh b/DocSum/docker_compose/intel/set_env.sh index b31ceb5784..d2c061177d 100644 --- a/DocSum/docker_compose/intel/set_env.sh +++ b/DocSum/docker_compose/intel/set_env.sh @@ -13,7 +13,7 @@ export https_proxy=$https_proxy export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export LLM_ENDPOINT_PORT=8008 -export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" +export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" export MAX_INPUT_TOKENS=1024 export MAX_TOTAL_TOKENS=2048 From 5c7b41c2c05df1825a0cf5b71db9f785800cddba Mon Sep 17 00:00:00 2001 From: Abolfazl Shahbazi <12436063+ashahba@users.noreply.github.com> Date: Thu, 22 May 2025 22:43:57 -0700 Subject: [PATCH 071/217] Update docs to clone or download from latest v1.3 everywhere (#1979) Signed-off-by: Abolfazl Shahbazi <12436063+ashahba@users.noreply.github.com> --- AudioQnA/docker_compose/intel/cpu/xeon/README.md | 4 ++-- AudioQnA/docker_compose/intel/cpu/xeon/README_vllm.md | 4 ++-- AudioQnA/docker_compose/intel/hpu/gaudi/README.md | 4 ++-- AvatarChatbot/docker_compose/amd/gpu/rocm/README.md | 2 +- AvatarChatbot/docker_compose/intel/cpu/xeon/README.md | 2 +- AvatarChatbot/docker_compose/intel/hpu/gaudi/README.md | 2 +- ChatQnA/README_miscellaneous.md | 2 +- ChatQnA/docker_compose/intel/cpu/aipc/README.md | 2 +- ChatQnA/docker_compose/intel/cpu/xeon/README.md | 10 +++++----- .../docker_compose/intel/cpu/xeon/README_pinecone.md | 6 +++--- ChatQnA/docker_compose/intel/hpu/gaudi/README.md | 6 +++--- CodeGen/docker_compose/amd/gpu/rocm/README.md | 2 +- CodeTrans/docker_compose/amd/gpu/rocm/README.md | 4 ++-- GraphRAG/README.md | 4 ++-- MultimodalQnA/docker_compose/intel/cpu/xeon/README.md | 2 +- MultimodalQnA/docker_compose/intel/hpu/gaudi/README.md | 2 +- MultimodalQnA/tests/test_compose_milvus_on_xeon.sh | 2 +- MultimodalQnA/tests/test_compose_on_gaudi.sh | 2 +- MultimodalQnA/tests/test_compose_on_xeon.sh | 2 +- SearchQnA/docker_compose/amd/gpu/rocm/README.md | 4 ++-- SearchQnA/docker_compose/intel/cpu/xeon/README.md | 4 ++-- SearchQnA/docker_compose/intel/hpu/gaudi/README.md | 4 ++-- Translation/docker_compose/amd/gpu/rocm/README.md | 4 ++-- Translation/docker_compose/intel/cpu/xeon/README.md | 4 ++-- Translation/docker_compose/intel/hpu/gaudi/README.md | 4 ++-- 25 files changed, 44 insertions(+), 44 deletions(-) diff --git a/AudioQnA/docker_compose/intel/cpu/xeon/README.md b/AudioQnA/docker_compose/intel/cpu/xeon/README.md index 8a78922d41..3994d34219 100644 --- a/AudioQnA/docker_compose/intel/cpu/xeon/README.md +++ b/AudioQnA/docker_compose/intel/cpu/xeon/README.md @@ -31,10 +31,10 @@ git clone https://github.com/opea-project/GenAIExamples.git cd GenAIExamples/AudioQnA ``` -Then checkout a released version, such as v1.2: +Then checkout a released version, such as v1.3: ```bash -git checkout v1.2 +git checkout v1.3 ``` ### Configure the Deployment Environment diff --git a/AudioQnA/docker_compose/intel/cpu/xeon/README_vllm.md b/AudioQnA/docker_compose/intel/cpu/xeon/README_vllm.md index 770ef735c0..8602259532 100644 --- a/AudioQnA/docker_compose/intel/cpu/xeon/README_vllm.md +++ b/AudioQnA/docker_compose/intel/cpu/xeon/README_vllm.md @@ -30,8 +30,8 @@ git clone https://github.com/opea-project/GenAIComps.git If you are using a specific branch or tag, then we perform git checkout to the desired version. ```bash -### Replace "v1.2" with the code version you need (branch or tag) -cd cd ~/searchqna-test/GenAIExamples/SearchQnA/docker_image_build && git checkout v1.2 +### Replace "v1.3" with the code version you need (branch or tag) +cd cd ~/searchqna-test/GenAIExamples/SearchQnA/docker_image_build && git checkout v1.3 git clone https://github.com/opea-project/GenAIComps.git ``` diff --git a/AudioQnA/docker_compose/intel/hpu/gaudi/README.md b/AudioQnA/docker_compose/intel/hpu/gaudi/README.md index cb22d3abde..dbec6d11bd 100644 --- a/AudioQnA/docker_compose/intel/hpu/gaudi/README.md +++ b/AudioQnA/docker_compose/intel/hpu/gaudi/README.md @@ -31,10 +31,10 @@ git clone https://github.com/opea-project/GenAIExamples.git cd GenAIExamples/AudioQnA ``` -Then checkout a released version, such as v1.2: +Then checkout a released version, such as v1.3: ```bash -git checkout v1.2 +git checkout v1.3 ``` ### Configure the Deployment Environment diff --git a/AvatarChatbot/docker_compose/amd/gpu/rocm/README.md b/AvatarChatbot/docker_compose/amd/gpu/rocm/README.md index a73ec62119..a94924ab16 100644 --- a/AvatarChatbot/docker_compose/amd/gpu/rocm/README.md +++ b/AvatarChatbot/docker_compose/amd/gpu/rocm/README.md @@ -194,7 +194,7 @@ python3 ui/gradio/app_gradio_demo_avatarchatbot.py The UI can be viewed at http://${host_ip}:7861 UI Example -In the current version v1.0, you need to set the avatar figure image/video and the DL model choice in the environment variables before starting AvatarChatbot backend service and running the UI. Please just customize the audio question in the UI. +In the current version v1.3, you need to set the avatar figure image/video and the DL model choice in the environment variables before starting AvatarChatbot backend service and running the UI. Please just customize the audio question in the UI. \*\* We will enable change of avatar figure between runs in v2.0 ## Troubleshooting diff --git a/AvatarChatbot/docker_compose/intel/cpu/xeon/README.md b/AvatarChatbot/docker_compose/intel/cpu/xeon/README.md index bf686ce99e..b803392f80 100644 --- a/AvatarChatbot/docker_compose/intel/cpu/xeon/README.md +++ b/AvatarChatbot/docker_compose/intel/cpu/xeon/README.md @@ -164,7 +164,7 @@ python3 ui/gradio/app_gradio_demo_avatarchatbot.py The UI can be viewed at http://${host_ip}:7861 UI Example -In the current version v1.0, you need to set the avatar figure image/video and the DL model choice in the environment variables before starting AvatarChatbot backend service and running the UI. Please just customize the audio question in the UI. +In the current version v1.3, you need to set the avatar figure image/video and the DL model choice in the environment variables before starting AvatarChatbot backend service and running the UI. Please just customize the audio question in the UI. \*\* We will enable change of avatar figure between runs in v2.0 ## Troubleshooting diff --git a/AvatarChatbot/docker_compose/intel/hpu/gaudi/README.md b/AvatarChatbot/docker_compose/intel/hpu/gaudi/README.md index 994d400ce4..105987ec18 100644 --- a/AvatarChatbot/docker_compose/intel/hpu/gaudi/README.md +++ b/AvatarChatbot/docker_compose/intel/hpu/gaudi/README.md @@ -174,7 +174,7 @@ python3 ui/gradio/app_gradio_demo_avatarchatbot.py The UI can be viewed at http://${host_ip}:7861 UI Example -In the current version v1.0, you need to set the avatar figure image/video and the DL model choice in the environment variables before starting AvatarChatbot backend service and running the UI. Please just customize the audio question in the UI. +In the current version v1.3, you need to set the avatar figure image/video and the DL model choice in the environment variables before starting AvatarChatbot backend service and running the UI. Please just customize the audio question in the UI. \*\* We will enable change of avatar figure between runs in v2.0 ## Troubleshooting diff --git a/ChatQnA/README_miscellaneous.md b/ChatQnA/README_miscellaneous.md index 0b514c2780..42236ef8b8 100644 --- a/ChatQnA/README_miscellaneous.md +++ b/ChatQnA/README_miscellaneous.md @@ -13,7 +13,7 @@ To construct the MegaService with Rerank, we utilize the [GenAIExamples](https:/ ```bash git clone https://github.com/opea-project/GenAIExamples.git -git fetch && git checkout tags/v1.2 +git fetch && git checkout v1.3 cd GenAIExamples/ChatQnA docker build --no-cache -t opea/chatqna:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . ``` diff --git a/ChatQnA/docker_compose/intel/cpu/aipc/README.md b/ChatQnA/docker_compose/intel/cpu/aipc/README.md index 5a217b1f3b..77d7ddfcd0 100644 --- a/ChatQnA/docker_compose/intel/cpu/aipc/README.md +++ b/ChatQnA/docker_compose/intel/cpu/aipc/README.md @@ -266,7 +266,7 @@ For details on how to verify the correctness of the response, refer to [how-to-v ```bash # download pdf file -wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.1/comps/retrievers/redis/data/nke-10k-2023.pdf +wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.3/comps/third_parties/pathway/src/data/nke-10k-2023.pdf # upload pdf file with dataprep curl -X POST "http://${host_ip}:6007/v1/dataprep/ingest" \ diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/README.md b/ChatQnA/docker_compose/intel/cpu/xeon/README.md index eea4c6132d..166dc50c40 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/README.md +++ b/ChatQnA/docker_compose/intel/cpu/xeon/README.md @@ -29,10 +29,10 @@ git clone https://github.com/opea-project/GenAIExamples.git cd GenAIExamples/ChatQnA/docker_compose/intel/cpu/xeon/ ``` -Checkout a released version, such as v1.2: +Checkout a released version, such as v1.3: ``` -git checkout v1.2 +git checkout v1.3 ``` ### Generate a HuggingFace Access Token @@ -298,12 +298,12 @@ For details on how to verify the correctness of the response, refer to [how-to-v If you want to update the default knowledge base, you can use the following commands: - Update Knowledge Base via Local File [nke-10k-2023.pdf](https://github.com/opea-project/GenAIComps/blob/v1.1/comps/retrievers/redis/data/nke-10k-2023.pdf). Or - click [here](https://raw.githubusercontent.com/opea-project/GenAIComps/v1.1/comps/retrievers/redis/data/nke-10k-2023.pdf) to download the file via any web browser. + Update Knowledge Base via Local File [nke-10k-2023.pdf](https://raw.githubusercontent.com/opea-project/GenAIComps/v1.3/comps/third_parties/pathway/src/data/nke-10k-2023.pdf). Or + click [here](https://raw.githubusercontent.com/opea-project/GenAIComps/v1.3/comps/third_parties/pathway/src/data/nke-10k-2023.pdf) to download the file via any web browser. Or run this command to get the file on a terminal. ```bash - wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.1/comps/retrievers/redis/data/nke-10k-2023.pdf + wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.3/comps/third_parties/pathway/src/data/nke-10k-2023.pdf ``` Upload: diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/README_pinecone.md b/ChatQnA/docker_compose/intel/cpu/xeon/README_pinecone.md index 791c834cc6..b26435c335 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/README_pinecone.md +++ b/ChatQnA/docker_compose/intel/cpu/xeon/README_pinecone.md @@ -326,12 +326,12 @@ For details on how to verify the correctness of the response, refer to [how-to-v If you want to update the default knowledge base, you can use the following commands: -Update Knowledge Base via Local File [nke-10k-2023.pdf](https://github.com/opea-project/GenAIComps/blob/v1.1/comps/retrievers/redis/data/nke-10k-2023.pdf). Or -click [here](https://raw.githubusercontent.com/opea-project/GenAIComps/v1.1/comps/retrievers/redis/data/nke-10k-2023.pdf) to download the file via any web browser. +Update Knowledge Base via Local File [nke-10k-2023.pdf](https://raw.githubusercontent.com/opea-project/GenAIComps/v1.3/comps/third_parties/pathway/src/data/nke-10k-2023.pdf). Or +click [here](https://raw.githubusercontent.com/opea-project/GenAIComps/v1.3/comps/third_parties/pathway/src/data/nke-10k-2023.pdf) to download the file via any web browser. Or run this command to get the file on a terminal. ```bash -wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.1/comps/retrievers/redis/data/nke-10k-2023.pdf +wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.3/comps/third_parties/pathway/src/data/nke-10k-2023.pdf ``` diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/README.md b/ChatQnA/docker_compose/intel/hpu/gaudi/README.md index 4de795be43..7889af7504 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/README.md +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/README.md @@ -31,10 +31,10 @@ git clone https://github.com/opea-project/GenAIExamples.git cd GenAIExamples/ChatQnA/docker_compose/intel/hpu/gaudi/ ``` -Checkout a released version, such as v1.2: +Checkout a released version, such as v1.3: ``` -git checkout v1.2 +git checkout v1.3 ``` ### Generate a HuggingFace Access Token @@ -276,7 +276,7 @@ Many of these services provide pipeline support required for all ChatQnA deploym In the configuration of the `vllm-service` and the `tgi-service`, two variables play a primary role in determining the service's performance and functionality: `LLM_MODEL_ID` and `NUM_CARDS`. Both can be set using the appropriate environment variables. The `LLM_MODEL_ID` parameter specifies the particular large language model (LLM) that the service will utilize, effectively determining the capabilities and characteristics of the language processing tasks it can perform. This model identifier ensures that the service is aligned with the specific requirements of the application, whether it involves text generation, comprehension, or other language-related tasks. The `NUM_CARDS` parameter dictates the number of Gaudi devices allocated to the service. A higher number of Gaudi devices can enhance parallel processing capabilities, reduce latency, and improve throughput. -However, developers need to be aware of the models that have been tested with the respective service image supporting the `vllm-service` and `tgi-service`. For example, documentation for the OPEA GenAIComps v1.0 release specify the list of [validated LLM models](https://github.com/opea-project/GenAIComps/blob/v1.0/comps/llms/text-generation/README.md#validated-llm-models) for each Gaudi enabled service image. Specific models may have stringent requirements on the number of Intel® Gaudi® devices required to support them. +However, developers need to be aware of the models that have been tested with the respective service image supporting the `vllm-service` and `tgi-service`. For example, documentation for the OPEA GenAIComps v1.3 release specify the list of [validated LLM models](https://github.com/opea-project/GenAIComps/tree/v1.3/comps/llms/src/text-generation#validated-llm-models) for each Gaudi enabled service image. Specific models may have stringent requirements on the number of Intel® Gaudi® devices required to support them. #### Deepseek Model Support for Intel® Gaudi® Platform ChatQnA pipeline diff --git a/CodeGen/docker_compose/amd/gpu/rocm/README.md b/CodeGen/docker_compose/amd/gpu/rocm/README.md index f3119a2091..a3718f7ad0 100644 --- a/CodeGen/docker_compose/amd/gpu/rocm/README.md +++ b/CodeGen/docker_compose/amd/gpu/rocm/README.md @@ -483,7 +483,7 @@ ex.: (From ChatQna) In the configuration of the `vllm-service` and the `tgi-service`, two variables play a primary role in determining the service's performance and functionality. The `LLM_MODEL_ID` parameter specifies the particular large language model (LLM) that the service will utilize, effectively determining the capabilities and characteristics of the language processing tasks it can perform. This model identifier ensures that the service is aligned with the specific requirements of the application, whether it involves text generation, comprehension, or other language-related tasks. -However, developers need to be aware of the models that have been tested with the respective service image supporting the `vllm-service` and `tgi-service`. For example, documentation for the OPEA GenAIComps v1.0 release specify the list of [validated LLM models](https://github.com/opea-project/GenAIComps/blob/v1.0/comps/llms/text-generation/README.md#validated-llm-models) for each AMD ROCm enabled service image. Specific models may have stringent requirements on the number of AMD ROCm devices required to support them. +However, developers need to be aware of the models that have been tested with the respective service image supporting the `vllm-service` and `tgi-service`. For example, documentation for the OPEA GenAIComps v1.3 release specify the list of [validated LLM models](https://github.com/opea-project/GenAIComps/tree/v1.3/comps/llms/src/text-generation#validated-llm-models) for each AMD ROCm enabled service image. Specific models may have stringent requirements on the number of AMD ROCm devices required to support them. This guide should enable developer to deploy the default configuration or any of the other compose yaml files for different configurations. It also highlights the configurable parameters that can be set before deployment. diff --git a/CodeTrans/docker_compose/amd/gpu/rocm/README.md b/CodeTrans/docker_compose/amd/gpu/rocm/README.md index 9ea891b496..9fef7c8426 100644 --- a/CodeTrans/docker_compose/amd/gpu/rocm/README.md +++ b/CodeTrans/docker_compose/amd/gpu/rocm/README.md @@ -31,10 +31,10 @@ git clone https://github.com/opea-project/GenAIExamples.git cd GenAIExamples/CodeTrans ``` -Then checkout a released version, such as v1.2: +Then checkout a released version, such as v1.3: ```bash -git checkout v1.2 +git checkout v1.3 ``` ### Configure the Deployment Environment diff --git a/GraphRAG/README.md b/GraphRAG/README.md index 314e18a0ef..0cdc3b5905 100644 --- a/GraphRAG/README.md +++ b/GraphRAG/README.md @@ -70,7 +70,7 @@ Here is an example of `Nike 2023` pdf. ```bash # download pdf file -wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.1/comps/retrievers/redis/data/nke-10k-2023.pdf +wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.3/comps/third_parties/pathway/src/data/nke-10k-2023.pdf # upload pdf file with dataprep curl -X POST "http://${host_ip}:11103/v1/dataprep/ingest" \ -H "Content-Type: multipart/form-data" \ @@ -204,7 +204,7 @@ Here is an example of `Nike 2023` pdf. ```bash # download pdf file -wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.1/comps/retrievers/redis/data/nke-10k-2023.pdf +wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.3/comps/third_parties/pathway/src/data/nke-10k-2023.pdf # upload pdf file with dataprep curl -X POST "http://${host_ip}:6007/v1/dataprep/ingest" \ -H "Content-Type: multipart/form-data" \ diff --git a/MultimodalQnA/docker_compose/intel/cpu/xeon/README.md b/MultimodalQnA/docker_compose/intel/cpu/xeon/README.md index c251e07a8e..18a66dae77 100644 --- a/MultimodalQnA/docker_compose/intel/cpu/xeon/README.md +++ b/MultimodalQnA/docker_compose/intel/cpu/xeon/README.md @@ -354,7 +354,7 @@ export image_fn="apple.png" wget https://github.com/docarray/docarray/blob/main/tests/toydata/image-data/apple.png?raw=true -O ${image_fn} export pdf_fn="nke-10k-2023.pdf" -wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.1/comps/retrievers/redis/data/nke-10k-2023.pdf -O ${pdf_fn} +wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.3/comps/third_parties/pathway/src/data/nke-10k-2023.pdf -O ${pdf_fn} export caption_fn="apple.txt" echo "This is an apple." > ${caption_fn} diff --git a/MultimodalQnA/docker_compose/intel/hpu/gaudi/README.md b/MultimodalQnA/docker_compose/intel/hpu/gaudi/README.md index 84c842fe0d..b0a03e6769 100644 --- a/MultimodalQnA/docker_compose/intel/hpu/gaudi/README.md +++ b/MultimodalQnA/docker_compose/intel/hpu/gaudi/README.md @@ -296,7 +296,7 @@ export image_fn="apple.png" wget https://github.com/docarray/docarray/blob/main/tests/toydata/image-data/apple.png?raw=true -O ${image_fn} export pdf_fn="nke-10k-2023.pdf" -wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.1/comps/retrievers/redis/data/nke-10k-2023.pdf -O ${pdf_fn} +wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.3/comps/third_parties/pathway/src/data/nke-10k-2023.pdf -O ${pdf_fn} export caption_fn="apple.txt" echo "This is an apple." > ${caption_fn} diff --git a/MultimodalQnA/tests/test_compose_milvus_on_xeon.sh b/MultimodalQnA/tests/test_compose_milvus_on_xeon.sh index f23f8cd87a..60ea474f33 100644 --- a/MultimodalQnA/tests/test_compose_milvus_on_xeon.sh +++ b/MultimodalQnA/tests/test_compose_milvus_on_xeon.sh @@ -112,7 +112,7 @@ function prepare_data() { echo "Downloading image and video" wget https://github.com/docarray/docarray/blob/main/tests/toydata/image-data/apple.png?raw=true -O ${image_fn} wget http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/WeAreGoingOnBullrun.mp4 -O ${video_fn} - wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.1/comps/retrievers/redis/data/nke-10k-2023.pdf -O ${pdf_fn} + wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.3/comps/third_parties/pathway/src/data/nke-10k-2023.pdf -O ${pdf_fn} echo "Writing caption file" echo "This is an apple." > ${caption_fn} sleep 1m diff --git a/MultimodalQnA/tests/test_compose_on_gaudi.sh b/MultimodalQnA/tests/test_compose_on_gaudi.sh index 11bcf5c7bf..774347e435 100644 --- a/MultimodalQnA/tests/test_compose_on_gaudi.sh +++ b/MultimodalQnA/tests/test_compose_on_gaudi.sh @@ -121,7 +121,7 @@ function prepare_data() { wget https://github.com/docarray/docarray/blob/main/tests/toydata/image-data/apple.png?raw=true -O ${image_fn} wget https://github.com/intel/intel-extension-for-transformers/raw/refs/tags/v1.5/intel_extension_for_transformers/neural_chat/ui/customized/talkingbot/src/lib/components/talkbot/assets/mid-age-man.mp3 -O ${audio_fn} wget http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/WeAreGoingOnBullrun.mp4 -O ${video_fn} - wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.1/comps/retrievers/redis/data/nke-10k-2023.pdf -O ${pdf_fn} + wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.3/comps/third_parties/pathway/src/data/nke-10k-2023.pdf -O ${pdf_fn} echo "Writing caption file" echo "This is an apple." > ${caption_fn} diff --git a/MultimodalQnA/tests/test_compose_on_xeon.sh b/MultimodalQnA/tests/test_compose_on_xeon.sh index 9094faef3d..1a62915abe 100644 --- a/MultimodalQnA/tests/test_compose_on_xeon.sh +++ b/MultimodalQnA/tests/test_compose_on_xeon.sh @@ -118,7 +118,7 @@ function prepare_data() { wget https://github.com/docarray/docarray/blob/main/tests/toydata/image-data/apple.png?raw=true -O ${image_fn} wget https://github.com/intel/intel-extension-for-transformers/raw/refs/tags/v1.5/intel_extension_for_transformers/neural_chat/ui/customized/talkingbot/src/lib/components/talkbot/assets/mid-age-man.mp3 -O ${audio_fn} wget http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/WeAreGoingOnBullrun.mp4 -O ${video_fn} - wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.1/comps/retrievers/redis/data/nke-10k-2023.pdf -O ${pdf_fn} + wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.3/comps/third_parties/pathway/src/data/nke-10k-2023.pdf -O ${pdf_fn} echo "Writing caption file" echo "This is an apple." > ${caption_fn} sleep 1m diff --git a/SearchQnA/docker_compose/amd/gpu/rocm/README.md b/SearchQnA/docker_compose/amd/gpu/rocm/README.md index 89f160501a..40533aac9f 100644 --- a/SearchQnA/docker_compose/amd/gpu/rocm/README.md +++ b/SearchQnA/docker_compose/amd/gpu/rocm/README.md @@ -30,10 +30,10 @@ git clone https://github.com/opea-project/GenAIExamples.git cd GenAIExamples/SearchQnA/docker_compose/amd/gpu/rocm ``` -Checkout a released version, such as v1.2: +Checkout a released version, such as v1.3: ```bash -git checkout v1.2 +git checkout v1.3 ``` ### Generate a HuggingFace Access Token diff --git a/SearchQnA/docker_compose/intel/cpu/xeon/README.md b/SearchQnA/docker_compose/intel/cpu/xeon/README.md index af0730370b..6f81ff4153 100644 --- a/SearchQnA/docker_compose/intel/cpu/xeon/README.md +++ b/SearchQnA/docker_compose/intel/cpu/xeon/README.md @@ -29,10 +29,10 @@ git clone https://github.com/opea-project/GenAIExamples.git cd GenAIExamples/SearchQnA ``` -Then checkout a released version, such as v1.2: +Then checkout a released version, such as v1.3: ```bash -git checkout v1.2 +git checkout v1.3 ``` ### Configure the Deployment Environment diff --git a/SearchQnA/docker_compose/intel/hpu/gaudi/README.md b/SearchQnA/docker_compose/intel/hpu/gaudi/README.md index 0feecdffa8..a00992de9f 100644 --- a/SearchQnA/docker_compose/intel/hpu/gaudi/README.md +++ b/SearchQnA/docker_compose/intel/hpu/gaudi/README.md @@ -29,10 +29,10 @@ git clone https://github.com/opea-project/GenAIExamples.git cd GenAIExamples/SearchQnA ``` -Then checkout a released version, such as v1.2: +Then checkout a released version, such as v1.3: ```bash -git checkout v1.2 +git checkout v1.3 ``` ### Configure the Deployment Environment diff --git a/Translation/docker_compose/amd/gpu/rocm/README.md b/Translation/docker_compose/amd/gpu/rocm/README.md index 827df63f29..7e2dca4e25 100644 --- a/Translation/docker_compose/amd/gpu/rocm/README.md +++ b/Translation/docker_compose/amd/gpu/rocm/README.md @@ -27,10 +27,10 @@ git clone https://github.com/opea-project/GenAIExamples.git cd GenAIExamples/Translation/docker_compose/amd/gpu/rocm/ ``` -Checkout a released version, such as v1.2: +Checkout a released version, such as v1.3: ``` -git checkout v1.2 +git checkout v1.3 ``` ### Generate a HuggingFace Access Token diff --git a/Translation/docker_compose/intel/cpu/xeon/README.md b/Translation/docker_compose/intel/cpu/xeon/README.md index 095ca54c38..c377bc923e 100644 --- a/Translation/docker_compose/intel/cpu/xeon/README.md +++ b/Translation/docker_compose/intel/cpu/xeon/README.md @@ -27,10 +27,10 @@ git clone https://github.com/opea-project/GenAIExamples.git cd GenAIExamples/Translation/docker_compose/intel/cpu/xeon/ ``` -Checkout a released version, such as v1.2: +Checkout a released version, such as v1.3: ``` -git checkout v1.2 +git checkout v1.3 ``` ### Generate a HuggingFace Access Token diff --git a/Translation/docker_compose/intel/hpu/gaudi/README.md b/Translation/docker_compose/intel/hpu/gaudi/README.md index 097cb42c81..663494c916 100644 --- a/Translation/docker_compose/intel/hpu/gaudi/README.md +++ b/Translation/docker_compose/intel/hpu/gaudi/README.md @@ -27,10 +27,10 @@ git clone https://github.com/opea-project/GenAIExamples.git cd GenAIExamples/Translation/docker_compose/intel/hpu/gaudi/ ``` -Checkout a released version, such as v1.2: +Checkout a released version, such as v1.3: ``` -git checkout v1.2 +git checkout v1.3 ``` ### Generate a HuggingFace Access Token From 162f5a8fd2aa7d5347b0d468bdce116d6ef91ac9 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Sun, 25 May 2025 20:42:32 +0800 Subject: [PATCH 072/217] Fix error paths in README.md. (#1983) Signed-off-by: ZePan110 --- CodeGen/docker_compose/intel/cpu/xeon/README.md | 5 +++-- CodeGen/docker_compose/intel/hpu/gaudi/README.md | 5 +++-- CodeTrans/docker_compose/intel/cpu/xeon/README.md | 5 +++-- CodeTrans/docker_compose/intel/hpu/gaudi/README.md | 5 +++-- DBQnA/docker_compose/intel/cpu/xeon/README.md | 1 + DocSum/docker_compose/intel/cpu/xeon/README.md | 7 +++---- DocSum/docker_compose/intel/hpu/gaudi/README.md | 7 +++---- SearchQnA/docker_compose/intel/cpu/xeon/README.md | 4 ++-- SearchQnA/docker_compose/intel/hpu/gaudi/README.md | 4 ++-- 9 files changed, 23 insertions(+), 20 deletions(-) diff --git a/CodeGen/docker_compose/intel/cpu/xeon/README.md b/CodeGen/docker_compose/intel/cpu/xeon/README.md index 57eda8f821..88f0a51c6c 100644 --- a/CodeGen/docker_compose/intel/cpu/xeon/README.md +++ b/CodeGen/docker_compose/intel/cpu/xeon/README.md @@ -28,7 +28,7 @@ This guide focuses on running the pre-configured CodeGen service using Docker Co - Clone the `GenAIExamples` repository: ```bash git clone https://github.com/opea-project/GenAIExamples.git - cd GenAIExamples/CodeGen/docker_compose/intel/cpu/xeon + cd GenAIExamples/CodeGen/docker_compose ``` ## Quick Start Deployment @@ -48,7 +48,8 @@ This uses the default vLLM-based deployment profile (`codegen-xeon-vllm`). # export http_proxy="your_http_proxy" # export https_proxy="your_https_proxy" # export no_proxy="localhost,127.0.0.1,${HOST_IP}" # Add other hosts if necessary - source ../../set_env.sh + source intel/set_env.sh + cd /intel/cpu/xeon ``` _Note: The compose file might read additional variables from set_env.sh. Ensure all required variables like ports (`LLM_SERVICE_PORT`, `MEGA_SERVICE_PORT`, etc.) are set if not using defaults from the compose file._ diff --git a/CodeGen/docker_compose/intel/hpu/gaudi/README.md b/CodeGen/docker_compose/intel/hpu/gaudi/README.md index e94ccbf30d..4af050f051 100644 --- a/CodeGen/docker_compose/intel/hpu/gaudi/README.md +++ b/CodeGen/docker_compose/intel/hpu/gaudi/README.md @@ -28,7 +28,7 @@ This guide focuses on running the pre-configured CodeGen service using Docker Co - Clone the `GenAIExamples` repository: ```bash git clone https://github.com/opea-project/GenAIExamples.git - cd GenAIExamples/CodeGen/docker_compose/intel/hpu/gaudi + cd GenAIExamples/CodeGen/docker_compose ``` ## Quick Start Deployment @@ -48,7 +48,8 @@ This uses the default vLLM-based deployment profile (`codegen-gaudi-vllm`). # export http_proxy="your_http_proxy" # export https_proxy="your_https_proxy" # export no_proxy="localhost,127.0.0.1,${HOST_IP}" # Add other hosts if necessary - source ../../set_env.sh + source intel/set_env.sh + cd /intel/hpu/gaudi ``` _Note: The compose file might read additional variables from set_env.sh. Ensure all required variables like ports (`LLM_SERVICE_PORT`, `MEGA_SERVICE_PORT`, etc.) are set if not using defaults from the compose file._ diff --git a/CodeTrans/docker_compose/intel/cpu/xeon/README.md b/CodeTrans/docker_compose/intel/cpu/xeon/README.md index a4061fd42c..b01492ff12 100755 --- a/CodeTrans/docker_compose/intel/cpu/xeon/README.md +++ b/CodeTrans/docker_compose/intel/cpu/xeon/README.md @@ -46,7 +46,8 @@ export http_proxy="Your_HTTP_Proxy" # http proxy if any export https_proxy="Your_HTTPs_Proxy" # https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip # additional no proxies if needed export NGINX_PORT=${your_nginx_port} # your usable port for nginx, 80 for example -source docker_compose/intel/set_env.sh +cd docker_compose/intel/ +source set_env.sh ``` Consult the section on [CodeTrans Service configuration](#codetrans-configuration) for information on how service specific configuration parameters affect deployments. @@ -56,7 +57,7 @@ Consult the section on [CodeTrans Service configuration](#codetrans-configuratio To deploy the CodeTrans services, execute the `docker compose up` command with the appropriate arguments. For a default deployment, execute the command below. It uses the 'compose.yaml' file. ```bash -cd docker_compose/intel/cpu/xeon +cd cpu/xeon docker compose -f compose.yaml up -d ``` diff --git a/CodeTrans/docker_compose/intel/hpu/gaudi/README.md b/CodeTrans/docker_compose/intel/hpu/gaudi/README.md index 9d6bc8ae44..00551eb406 100755 --- a/CodeTrans/docker_compose/intel/hpu/gaudi/README.md +++ b/CodeTrans/docker_compose/intel/hpu/gaudi/README.md @@ -46,7 +46,8 @@ export http_proxy="Your_HTTP_Proxy" # http proxy if any export https_proxy="Your_HTTPs_Proxy" # https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip # additional no proxies if needed export NGINX_PORT=${your_nginx_port} # your usable port for nginx, 80 for example -source docker_compose/intel/set_env.sh +cd docker_compose/intel +source set_env.sh ``` Consult the section on [CodeTrans Service configuration](#codetrans-configuration) for information on how service specific configuration parameters affect deployments. @@ -56,7 +57,7 @@ Consult the section on [CodeTrans Service configuration](#codetrans-configuratio To deploy the CodeTrans services, execute the `docker compose up` command with the appropriate arguments. For a default deployment, execute the command below. It uses the 'compose.yaml' file. ```bash -cd docker_compose/intel/hpu/gaudi +cd hpu/gaudi docker compose -f compose.yaml up -d ``` diff --git a/DBQnA/docker_compose/intel/cpu/xeon/README.md b/DBQnA/docker_compose/intel/cpu/xeon/README.md index 7b02568f56..c227e1fe46 100644 --- a/DBQnA/docker_compose/intel/cpu/xeon/README.md +++ b/DBQnA/docker_compose/intel/cpu/xeon/README.md @@ -73,6 +73,7 @@ or edit the file set_env.sh to set those environment variables, ```bash +cd GenAIExamples/DBQnA/docker_compose/intel/cpu/xeon/ source set_env.sh ``` diff --git a/DocSum/docker_compose/intel/cpu/xeon/README.md b/DocSum/docker_compose/intel/cpu/xeon/README.md index f62a62c1e8..23af6bf96b 100644 --- a/DocSum/docker_compose/intel/cpu/xeon/README.md +++ b/DocSum/docker_compose/intel/cpu/xeon/README.md @@ -27,9 +27,8 @@ Clone the GenAIExample repository and access the ChatQnA Intel Xeon platform Doc ```bash git clone https://github.com/opea-project/GenAIExamples.git -cd GenAIExamples/DocSum/docker_compose/intel -source set_env.sh -cd cpu/xeon/ +cd GenAIExamples/DocSum/docker_compose +source intel/set_env.sh ``` NOTE: by default vLLM does "warmup" at start, to optimize its performance for the specified model and the underlying platform, which can take long time. For development (and e.g. autoscaling) it can be skipped with `export VLLM_SKIP_WARMUP=true`. @@ -49,7 +48,7 @@ Some HuggingFace resources, such as some models, are only accessible if you have To deploy the DocSum services, execute the `docker compose up` command with the appropriate arguments. For a default deployment, execute: ```bash -cd cpu/xeon/ +cd intel/cpu/xeon/ docker compose up -d ``` diff --git a/DocSum/docker_compose/intel/hpu/gaudi/README.md b/DocSum/docker_compose/intel/hpu/gaudi/README.md index 98a0b132ac..c0a2180441 100644 --- a/DocSum/docker_compose/intel/hpu/gaudi/README.md +++ b/DocSum/docker_compose/intel/hpu/gaudi/README.md @@ -29,9 +29,8 @@ Clone the GenAIExample repository and access the DocSum Intel® Gaudi® platform ```bash git clone https://github.com/opea-project/GenAIExamples.git -cd GenAIExamples/DocSum/docker_compose/intel -source set_env.sh -cd hpu/gaudi/ +cd GenAIExamples/DocSum/docker_compose +source intel/set_env.sh ``` NOTE: by default vLLM does "warmup" at start, to optimize its performance for the specified model and the underlying platform, which can take long time. For development (and e.g. autoscaling) it can be skipped with `export VLLM_SKIP_WARMUP=true`. @@ -51,7 +50,7 @@ Some HuggingFace resources, such as some models, are only accessible if you have To deploy the DocSum services, execute the `docker compose up` command with the appropriate arguments. For a default deployment, execute: ```bash -cd hpu/gaudi/ +cd intel/hpu/gaudi/ docker compose up -d ``` diff --git a/SearchQnA/docker_compose/intel/cpu/xeon/README.md b/SearchQnA/docker_compose/intel/cpu/xeon/README.md index 6f81ff4153..742a6ae1cd 100644 --- a/SearchQnA/docker_compose/intel/cpu/xeon/README.md +++ b/SearchQnA/docker_compose/intel/cpu/xeon/README.md @@ -26,7 +26,7 @@ Clone the GenAIExample repository and access the SearchQnA Intel® Xeon® platfo ```bash git clone https://github.com/opea-project/GenAIExamples.git -cd GenAIExamples/SearchQnA +cd GenAIExamples/SearchQnA/docker_compose/intel ``` Then checkout a released version, such as v1.3: @@ -58,7 +58,7 @@ Consult the section on [SearchQnA Service configuration](#SearchQnA-configuratio To deploy the SearchQnA services, execute the `docker compose up` command with the appropriate arguments. For a default deployment, execute the command below. It uses the 'compose.yaml' file. ```bash -cd docker_compose/intel/cpu/xeon +cd cpu/xeon docker compose -f compose.yaml up -d ``` diff --git a/SearchQnA/docker_compose/intel/hpu/gaudi/README.md b/SearchQnA/docker_compose/intel/hpu/gaudi/README.md index a00992de9f..611b4a3c44 100644 --- a/SearchQnA/docker_compose/intel/hpu/gaudi/README.md +++ b/SearchQnA/docker_compose/intel/hpu/gaudi/README.md @@ -26,7 +26,7 @@ Clone the GenAIExample repository and access the searchqna Intel® Gaudi® platf ```bash git clone https://github.com/opea-project/GenAIExamples.git -cd GenAIExamples/SearchQnA +cd GenAIExamples/SearchQnA/docker_compose/intel ``` Then checkout a released version, such as v1.3: @@ -58,7 +58,7 @@ Consult the section on [SearchQnA Service configuration](#SearchQnA-configuratio To deploy the SearchQnA services, execute the `docker compose up` command with the appropriate arguments. For a default deployment, execute the command below. It uses the 'compose.yaml' file. ```bash -cd docker_compose/intel/hpu/gaudi +cd hpu/gaudi docker compose -f compose.yaml up -d ``` From 52eef7eca157c03430fdda3771e25a948d51bf86 Mon Sep 17 00:00:00 2001 From: xiguiw <111278656+xiguiw@users.noreply.github.com> Date: Tue, 27 May 2025 20:25:51 +0800 Subject: [PATCH 073/217] CodeGen update input prompt template (#1997) Signed-off-by: Wang, Xigui --- CodeGen/codegen.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/CodeGen/codegen.py b/CodeGen/codegen.py index 63a0482777..cec8b97286 100644 --- a/CodeGen/codegen.py +++ b/CodeGen/codegen.py @@ -29,6 +29,7 @@ REDIS_RETRIEVER_PORT = int(os.getenv("REDIS_RETRIEVER_PORT", 7000)) TEI_EMBEDDING_HOST_IP = os.getenv("TEI_EMBEDDING_HOST_IP", "0.0.0.0") EMBEDDER_PORT = int(os.getenv("EMBEDDER_PORT", 6000)) +LLM_MODEL_ID = os.getenv("LLM_MODEL_ID", "Qwen/Qwen2.5-Coder-7B-Instruct") OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", None) grader_prompt = """You are a grader assessing relevance of a retrieved document to a user question. \n @@ -67,11 +68,22 @@ def align_inputs(self, inputs, cur_node, runtime_graph, llm_parameters_dict, **k inputs["input"] = inputs["query"] # Check if the current service type is RETRIEVER - if self.services[cur_node].service_type == ServiceType.RETRIEVER: + elif self.services[cur_node].service_type == ServiceType.RETRIEVER: # Extract the embedding from the inputs embedding = inputs["data"][0]["embedding"] # Align the inputs for the retriever service inputs = {"index_name": llm_parameters_dict["index_name"], "text": self.input_query, "embedding": embedding} + elif self.services[cur_node].service_type == ServiceType.LLM: + # convert TGI/vLLM to unified OpenAI /v1/chat/completions format + next_inputs = {} + next_inputs["model"] = LLM_MODEL_ID + next_inputs["messages"] = [{"role": "user", "content": inputs["query"]}] + next_inputs["max_tokens"] = llm_parameters_dict["max_tokens"] + next_inputs["top_p"] = llm_parameters_dict["top_p"] + next_inputs["stream"] = inputs["stream"] + next_inputs["frequency_penalty"] = inputs["frequency_penalty"] + next_inputs["temperature"] = inputs["temperature"] + inputs = next_inputs return inputs From 91611a5af28345b99b62ed783417982d33764786 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Thu, 29 May 2025 11:01:34 +0800 Subject: [PATCH 074/217] Add tests for different input formats (#2006) Signed-off-by: ZePan110 --- CodeGen/tests/test_compose_on_gaudi.sh | 7 +++++++ CodeGen/tests/test_compose_on_xeon.sh | 7 +++++++ 2 files changed, 14 insertions(+) diff --git a/CodeGen/tests/test_compose_on_gaudi.sh b/CodeGen/tests/test_compose_on_gaudi.sh index 87acfbaa5a..38354233d9 100644 --- a/CodeGen/tests/test_compose_on_gaudi.sh +++ b/CodeGen/tests/test_compose_on_gaudi.sh @@ -143,6 +143,13 @@ function validate_megaservice() { "codegen-gaudi-backend-server" \ '{ "index_name": "test_redis", "agents_flag": "True", "messages": "def print_hello_world():", "max_tokens": 256}' + validate_services \ + "${ip_address}:7778/v1/codegen" \ + "class" \ + "mega-codegen" \ + "codegen-xeon-backend-server" \ + '{"model": "Qwen/Qwen2.5-Coder-7B-Instruct", "messages": [{"role": "user", "content": "Implement a basic Python class"}], "max_tokens":32}' + } function validate_frontend() { diff --git a/CodeGen/tests/test_compose_on_xeon.sh b/CodeGen/tests/test_compose_on_xeon.sh index a50e5f0a7e..4d7267d615 100644 --- a/CodeGen/tests/test_compose_on_xeon.sh +++ b/CodeGen/tests/test_compose_on_xeon.sh @@ -146,6 +146,13 @@ function validate_megaservice() { "codegen-xeon-backend-server" \ '{ "index_name": "test_redis", "agents_flag": "True", "messages": "def print_hello_world():", "max_tokens": 256}' + validate_services \ + "${ip_address}:7778/v1/codegen" \ + "class" \ + "mega-codegen" \ + "codegen-xeon-backend-server" \ + '{"model": "Qwen/Qwen2.5-Coder-7B-Instruct", "messages": [{"role": "user", "content": "Implement a basic Python class"}], "max_tokens":32}' + } function validate_frontend() { From 49b18857ff885ff5d8d08418cc8edf3e9ec70b0d Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Thu, 29 May 2025 11:02:09 +0800 Subject: [PATCH 075/217] Fix security issues in workflows (#1977) Signed-off-by: ZePan110 --- .github/workflows/_build_comps_base_image.yml | 7 +++- .github/workflows/_build_image.yml | 17 +++++++- .github/workflows/_example-workflow.yml | 17 +++++++- .github/workflows/_get-image-list.yml | 3 +- .github/workflows/_gmc-e2e.yml | 3 +- .github/workflows/_gmc-workflow.yml | 3 +- .github/workflows/dockerhub-description.yml | 2 + .github/workflows/manual-docker-clean.yml | 2 + .github/workflows/manual-example-workflow.yml | 19 ++++++++- .github/workflows/manual-freeze-tag.yml | 3 +- .github/workflows/manual-image-build.yml | 16 ++++++++ .../workflows/manual-reset-local-registry.yml | 16 ++++++++ .github/workflows/mix-trellix.yml | 3 +- .../nightly-docker-build-publish.yml | 40 +++++++++++++++++++ .github/workflows/pr-chart-e2e.yml | 3 +- .../workflows/pr-check-duplicated-image.yml | 3 +- .github/workflows/pr-code-scan.yml | 4 +- .github/workflows/pr-docker-compose-e2e.yml | 3 ++ ...pr-dockerfile-path-and-build-yaml-scan.yml | 3 +- .github/workflows/pr-link-path-scan.yml | 3 ++ .github/workflows/push-image-build.yml | 17 ++++++++ .../workflows/push-images-path-detection.yml | 4 +- .../workflows/push-infra-issue-creation.yml | 4 ++ .github/workflows/weekly-example-test.yml | 24 +++++++++++ 24 files changed, 205 insertions(+), 14 deletions(-) diff --git a/.github/workflows/_build_comps_base_image.yml b/.github/workflows/_build_comps_base_image.yml index 2f3cb00312..04c8a55440 100644 --- a/.github/workflows/_build_comps_base_image.yml +++ b/.github/workflows/_build_comps_base_image.yml @@ -2,7 +2,12 @@ # SPDX-License-Identifier: Apache-2.0 name: Build Comps Base Image -permissions: read-all + +permissions: + attestations: read + models: read + security-events: read + on: workflow_call: inputs: diff --git a/.github/workflows/_build_image.yml b/.github/workflows/_build_image.yml index 79cab22216..a62686b3c5 100644 --- a/.github/workflows/_build_image.yml +++ b/.github/workflows/_build_image.yml @@ -2,7 +2,22 @@ # SPDX-License-Identifier: Apache-2.0 name: Build Images -permissions: read-all +permissions: + actions: read + contents: read + checks: read + deployments: read + discussions: read + issues: read + packages: read + pages: read + pull-requests: read + repository-projects: read + statuses: read + security-events: read + id-token: write + attestations: read + models: read on: workflow_call: inputs: diff --git a/.github/workflows/_example-workflow.yml b/.github/workflows/_example-workflow.yml index bc54f6e63b..35b1f16cd1 100644 --- a/.github/workflows/_example-workflow.yml +++ b/.github/workflows/_example-workflow.yml @@ -2,7 +2,22 @@ # SPDX-License-Identifier: Apache-2.0 name: Example jobs -permissions: read-all +permissions: + actions: read + contents: read + checks: read + deployments: read + discussions: read + issues: read + packages: read + pages: read + pull-requests: read + repository-projects: read + statuses: read + security-events: read + id-token: write + attestations: read + models: read on: workflow_call: inputs: diff --git a/.github/workflows/_get-image-list.yml b/.github/workflows/_get-image-list.yml index 9abd893ecc..7a790d764a 100644 --- a/.github/workflows/_get-image-list.yml +++ b/.github/workflows/_get-image-list.yml @@ -2,7 +2,8 @@ # SPDX-License-Identifier: Apache-2.0 name: Get Image List -permissions: read-all +permissions: + contents: read on: workflow_call: inputs: diff --git a/.github/workflows/_gmc-e2e.yml b/.github/workflows/_gmc-e2e.yml index 331eea0c81..ba50e8b955 100644 --- a/.github/workflows/_gmc-e2e.yml +++ b/.github/workflows/_gmc-e2e.yml @@ -3,7 +3,8 @@ # This workflow will only test GMC pipeline and will not install GMC any more name: Single GMC E2e Test For CD Workflow Call - +permissions: + contents: read on: workflow_call: inputs: diff --git a/.github/workflows/_gmc-workflow.yml b/.github/workflows/_gmc-workflow.yml index 77c01177a5..32ff08266d 100644 --- a/.github/workflows/_gmc-workflow.yml +++ b/.github/workflows/_gmc-workflow.yml @@ -2,7 +2,8 @@ # SPDX-License-Identifier: Apache-2.0 name: Build and deploy GMC system on call and manual - +permissions: + contents: read on: workflow_dispatch: inputs: diff --git a/.github/workflows/dockerhub-description.yml b/.github/workflows/dockerhub-description.yml index 4dcfee1f36..296f464f47 100644 --- a/.github/workflows/dockerhub-description.yml +++ b/.github/workflows/dockerhub-description.yml @@ -2,6 +2,8 @@ # SPDX-License-Identifier: Apache-2.0 name: Update Docker Hub Description +permissions: + contents: read on: schedule: - cron: "0 0 * * 0" diff --git a/.github/workflows/manual-docker-clean.yml b/.github/workflows/manual-docker-clean.yml index 25cf228721..886cf27234 100644 --- a/.github/workflows/manual-docker-clean.yml +++ b/.github/workflows/manual-docker-clean.yml @@ -2,6 +2,8 @@ # SPDX-License-Identifier: Apache-2.0 name: Clean up container on manual event +permissions: + contents: read on: workflow_dispatch: inputs: diff --git a/.github/workflows/manual-example-workflow.yml b/.github/workflows/manual-example-workflow.yml index 919ccdeae7..338454d9aa 100644 --- a/.github/workflows/manual-example-workflow.yml +++ b/.github/workflows/manual-example-workflow.yml @@ -2,6 +2,24 @@ # SPDX-License-Identifier: Apache-2.0 name: Examples CD workflow on manual event + +permissions: + actions: read + contents: read + checks: read + deployments: read + discussions: read + issues: read + packages: read + pages: read + pull-requests: read + repository-projects: read + statuses: read + security-events: read + id-token: write + attestations: read + models: read + on: workflow_dispatch: inputs: @@ -51,7 +69,6 @@ on: required: false type: boolean -permissions: read-all jobs: get-test-matrix: runs-on: ubuntu-latest diff --git a/.github/workflows/manual-freeze-tag.yml b/.github/workflows/manual-freeze-tag.yml index 6dd55c9032..88c1bb6c85 100644 --- a/.github/workflows/manual-freeze-tag.yml +++ b/.github/workflows/manual-freeze-tag.yml @@ -2,7 +2,8 @@ # SPDX-License-Identifier: Apache-2.0 name: Freeze OPEA images release tag - +permissions: + contents: read on: workflow_dispatch: inputs: diff --git a/.github/workflows/manual-image-build.yml b/.github/workflows/manual-image-build.yml index 92da9c2231..fbfd2bef26 100644 --- a/.github/workflows/manual-image-build.yml +++ b/.github/workflows/manual-image-build.yml @@ -2,6 +2,22 @@ # SPDX-License-Identifier: Apache-2.0 name: Build specific images on manual event +permissions: + actions: read + contents: read + checks: read + deployments: read + discussions: read + issues: read + packages: read + pages: read + pull-requests: read + repository-projects: read + statuses: read + security-events: read + id-token: write + attestations: read + models: read on: workflow_dispatch: inputs: diff --git a/.github/workflows/manual-reset-local-registry.yml b/.github/workflows/manual-reset-local-registry.yml index de9cfd78e9..88c839ab82 100644 --- a/.github/workflows/manual-reset-local-registry.yml +++ b/.github/workflows/manual-reset-local-registry.yml @@ -2,6 +2,22 @@ # SPDX-License-Identifier: Apache-2.0 name: Clean up Local Registry on manual event +permissions: + actions: read + contents: read + checks: read + deployments: read + discussions: read + issues: read + packages: read + pages: read + pull-requests: read + repository-projects: read + statuses: read + security-events: read + id-token: write + attestations: read + models: read on: workflow_dispatch: inputs: diff --git a/.github/workflows/mix-trellix.yml b/.github/workflows/mix-trellix.yml index 8779f3b9ad..65f18e6dbc 100644 --- a/.github/workflows/mix-trellix.yml +++ b/.github/workflows/mix-trellix.yml @@ -2,7 +2,8 @@ # SPDX-License-Identifier: Apache-2.0 name: Trellix Command Line Scanner - +permissions: + contents: read on: workflow_dispatch: schedule: diff --git a/.github/workflows/nightly-docker-build-publish.yml b/.github/workflows/nightly-docker-build-publish.yml index adac4b6d14..1d776c8433 100644 --- a/.github/workflows/nightly-docker-build-publish.yml +++ b/.github/workflows/nightly-docker-build-publish.yml @@ -2,6 +2,8 @@ # SPDX-License-Identifier: Apache-2.0 name: Nightly build/publish latest docker images +permissions: + security-events: read on: schedule: @@ -33,12 +35,32 @@ jobs: echo "PUBLISH_TAGS=$PUBLISH_TAGS" >> $GITHUB_OUTPUT build-comps-base: + permissions: + attestations: read + models: read + security-events: read needs: [get-build-matrix] uses: ./.github/workflows/_build_comps_base_image.yml with: node: gaudi build-images: + permissions: + actions: read + contents: read + checks: read + deployments: read + discussions: read + issues: read + packages: read + pages: read + pull-requests: read + repository-projects: read + statuses: read + security-events: read + id-token: write + attestations: read + models: read needs: [get-build-matrix, build-comps-base] strategy: matrix: @@ -53,6 +75,22 @@ jobs: test-example: needs: [get-build-matrix] + permissions: + actions: read + contents: read + checks: read + deployments: read + discussions: read + issues: read + packages: read + pages: read + pull-requests: read + repository-projects: read + statuses: read + security-events: read + id-token: write + attestations: read + models: read if: ${{ needs.get-build-matrix.outputs.examples_json != '' }} strategy: matrix: @@ -69,6 +107,8 @@ jobs: get-image-list: needs: [get-build-matrix] + permissions: + contents: read uses: ./.github/workflows/_get-image-list.yml with: examples: ${{ needs.get-build-matrix.outputs.EXAMPLES }} diff --git a/.github/workflows/pr-chart-e2e.yml b/.github/workflows/pr-chart-e2e.yml index 876960e7d9..3990e5fce9 100644 --- a/.github/workflows/pr-chart-e2e.yml +++ b/.github/workflows/pr-chart-e2e.yml @@ -2,7 +2,8 @@ # SPDX-License-Identifier: Apache-2.0 name: E2E Test with Helm Charts - +permissions: + contents: read on: pull_request_target: branches: [main] diff --git a/.github/workflows/pr-check-duplicated-image.yml b/.github/workflows/pr-check-duplicated-image.yml index 0cdba415a2..2922b8f4fe 100644 --- a/.github/workflows/pr-check-duplicated-image.yml +++ b/.github/workflows/pr-check-duplicated-image.yml @@ -2,7 +2,8 @@ # SPDX-License-Identifier: Apache-2.0 name: Check Duplicated Images - +permissions: + contents: read on: pull_request: branches: [main] diff --git a/.github/workflows/pr-code-scan.yml b/.github/workflows/pr-code-scan.yml index 7accb94ea2..10d3ea5fa4 100644 --- a/.github/workflows/pr-code-scan.yml +++ b/.github/workflows/pr-code-scan.yml @@ -2,7 +2,9 @@ # SPDX-License-Identifier: Apache-2.0 name: Code Scan - +permissions: + contents: read + security-events: write on: pull_request: branches: [main] diff --git a/.github/workflows/pr-docker-compose-e2e.yml b/.github/workflows/pr-docker-compose-e2e.yml index a7604f29af..d7aba56bb5 100644 --- a/.github/workflows/pr-docker-compose-e2e.yml +++ b/.github/workflows/pr-docker-compose-e2e.yml @@ -3,6 +3,9 @@ name: E2E test with docker compose +permissions: + contents: read + on: pull_request_target: branches: ["main", "*rc"] diff --git a/.github/workflows/pr-dockerfile-path-and-build-yaml-scan.yml b/.github/workflows/pr-dockerfile-path-and-build-yaml-scan.yml index 3b8be26137..2775c2ae13 100644 --- a/.github/workflows/pr-dockerfile-path-and-build-yaml-scan.yml +++ b/.github/workflows/pr-dockerfile-path-and-build-yaml-scan.yml @@ -2,7 +2,8 @@ # SPDX-License-Identifier: Apache-2.0 name: Compose file and dockerfile path checking - +permissions: + contents: read on: pull_request: branches: [main] diff --git a/.github/workflows/pr-link-path-scan.yml b/.github/workflows/pr-link-path-scan.yml index 1f389a7c69..d165b82ea1 100644 --- a/.github/workflows/pr-link-path-scan.yml +++ b/.github/workflows/pr-link-path-scan.yml @@ -3,6 +3,9 @@ name: Check hyperlinks and relative path validity +permissions: + contents: read + on: pull_request: branches: [main] diff --git a/.github/workflows/push-image-build.yml b/.github/workflows/push-image-build.yml index 0cbbb970db..9f551a67bc 100644 --- a/.github/workflows/push-image-build.yml +++ b/.github/workflows/push-image-build.yml @@ -3,6 +3,23 @@ # Test name: Build latest images on push event +permissions: + actions: read + contents: read + checks: read + deployments: read + discussions: read + issues: read + packages: read + pages: read + pull-requests: read + repository-projects: read + statuses: read + security-events: read + id-token: write + attestations: read + models: read + on: push: branches: [ 'main' ] diff --git a/.github/workflows/push-images-path-detection.yml b/.github/workflows/push-images-path-detection.yml index 299ee4d180..9edfad2678 100644 --- a/.github/workflows/push-images-path-detection.yml +++ b/.github/workflows/push-images-path-detection.yml @@ -3,10 +3,12 @@ name: Check the validity of links in docker_images_list. +permissions: + contents: read + on: push: branches: [main] - types: [opened, reopened, ready_for_review, synchronize] jobs: check-dockerfile-paths: diff --git a/.github/workflows/push-infra-issue-creation.yml b/.github/workflows/push-infra-issue-creation.yml index 132f64d1a8..2dd2de23c0 100644 --- a/.github/workflows/push-infra-issue-creation.yml +++ b/.github/workflows/push-infra-issue-creation.yml @@ -8,6 +8,10 @@ on: - "**/docker_compose/**/compose*.yaml" name: Create an issue to GenAIInfra on push + +permissions: + contents: read + jobs: job1: name: Create issue diff --git a/.github/workflows/weekly-example-test.yml b/.github/workflows/weekly-example-test.yml index 4b8391a1dd..832cc11681 100644 --- a/.github/workflows/weekly-example-test.yml +++ b/.github/workflows/weekly-example-test.yml @@ -3,6 +3,10 @@ name: Weekly test all examples on multiple HWs +permissions: + contents: read + id-token: write + on: schedule: - cron: "30 2 * * 6" # UTC time @@ -31,6 +35,10 @@ jobs: build-comps-base: needs: [get-test-matrix] + permissions: + attestations: read + models: read + security-events: read strategy: matrix: node: ${{ fromJson(needs.get-test-matrix.outputs.nodes) }} @@ -39,6 +47,22 @@ jobs: node: ${{ matrix.node }} run-examples: + permissions: + contents: read + id-token: write + actions: read + attestations: read + checks: read + deployments: read + discussions: read + issues: read + models: read + packages: read + pages: read + pull-requests: read + repository-projects: read + statuses: read + security-events: read needs: [get-test-matrix, build-comps-base] strategy: matrix: From fdc55ee96a1a7d33b39e1bbaeabd19b1b9f9fdef Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Thu, 29 May 2025 11:27:06 +0800 Subject: [PATCH 076/217] Integrate MultimodalQnA set_env to ut scripts. (#1965) Integrate MultimodalQnA set_env to ut scripts. Add README.md for UT scripts. Signed-off-by: ZePan110 --- .../docker_compose/amd/gpu/rocm/compose.yaml | 2 +- .../amd/gpu/rocm/compose_vllm.yaml | 2 +- .../docker_compose/amd/gpu/rocm/set_env.sh | 8 +-- .../amd/gpu/rocm/set_env_vllm.sh | 14 ++--- .../intel/cpu/xeon/compose.yaml | 2 +- .../intel/cpu/xeon/compose_milvus.yaml | 2 +- .../docker_compose/intel/cpu/xeon/set_env.sh | 54 ------------------- .../intel/hpu/gaudi/compose.yaml | 2 +- .../intel/hpu/gaudi/compose_milvus.yaml | 2 +- .../intel/{hpu/gaudi => }/set_env.sh | 4 +- MultimodalQnA/tests/README.md | 45 ++++++++++++++++ .../tests/test_compose_milvus_on_xeon.sh | 29 +--------- MultimodalQnA/tests/test_compose_on_gaudi.sh | 38 +------------ MultimodalQnA/tests/test_compose_on_rocm.sh | 32 +---------- MultimodalQnA/tests/test_compose_on_xeon.sh | 37 +------------ .../tests/test_compose_vllm_on_rocm.sh | 31 +---------- 16 files changed, 74 insertions(+), 230 deletions(-) delete mode 100755 MultimodalQnA/docker_compose/intel/cpu/xeon/set_env.sh rename MultimodalQnA/docker_compose/intel/{hpu/gaudi => }/set_env.sh (96%) create mode 100644 MultimodalQnA/tests/README.md diff --git a/MultimodalQnA/docker_compose/amd/gpu/rocm/compose.yaml b/MultimodalQnA/docker_compose/amd/gpu/rocm/compose.yaml index 1691cbb33f..2688b1ed68 100644 --- a/MultimodalQnA/docker_compose/amd/gpu/rocm/compose.yaml +++ b/MultimodalQnA/docker_compose/amd/gpu/rocm/compose.yaml @@ -166,7 +166,7 @@ services: MM_EMBEDDING_PORT_MICROSERVICE: ${MM_EMBEDDING_PORT_MICROSERVICE} MM_RETRIEVER_SERVICE_HOST_IP: ${MM_RETRIEVER_SERVICE_HOST_IP} LVM_SERVICE_HOST_IP: ${LVM_SERVICE_HOST_IP} - WHISPER_SERVER_PORT: ${WHISPER_PORT} + WHISPER_SERVER_PORT: ${WHISPER_SERVER_PORT} WHISPER_SERVER_ENDPOINT: ${WHISPER_SERVER_ENDPOINT} ipc: host restart: always diff --git a/MultimodalQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml b/MultimodalQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml index 40166110ab..5575e7cdc8 100644 --- a/MultimodalQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml +++ b/MultimodalQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml @@ -170,7 +170,7 @@ services: MM_EMBEDDING_PORT_MICROSERVICE: ${MM_EMBEDDING_PORT_MICROSERVICE} MM_RETRIEVER_SERVICE_HOST_IP: ${MM_RETRIEVER_SERVICE_HOST_IP} LVM_SERVICE_HOST_IP: ${LVM_SERVICE_HOST_IP} - WHISPER_SERVER_PORT: ${WHISPER_PORT} + WHISPER_SERVER_PORT: ${WHISPER_SERVER_PORT} WHISPER_SERVER_ENDPOINT: ${WHISPER_SERVER_ENDPOINT} ipc: host restart: always diff --git a/MultimodalQnA/docker_compose/amd/gpu/rocm/set_env.sh b/MultimodalQnA/docker_compose/amd/gpu/rocm/set_env.sh index 5c7516e7a4..31635d5768 100644 --- a/MultimodalQnA/docker_compose/amd/gpu/rocm/set_env.sh +++ b/MultimodalQnA/docker_compose/amd/gpu/rocm/set_env.sh @@ -3,8 +3,8 @@ # Copyright (C) 2024 Advanced Micro Devices, Inc. # SPDX-License-Identifier: Apache-2.0 -export HOST_IP=${your_host_ip_address} -export MULTIMODAL_HUGGINGFACEHUB_API_TOKEN=${your_huggingfacehub_token} +export HOST_IP=${ip_address} +export MULTIMODAL_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} export MULTIMODAL_TGI_SERVICE_PORT="8399" export no_proxy=${your_no_proxy} export http_proxy=${your_http_proxy} @@ -31,5 +31,5 @@ export DATAPREP_GEN_TRANSCRIPT_SERVICE_ENDPOINT="http://${HOST_IP}:6007/v1/datap export DATAPREP_GEN_CAPTION_SERVICE_ENDPOINT="http://${HOST_IP}:6007/v1/dataprep/generate_captions" export DATAPREP_GET_FILE_ENDPOINT="http://${HOST_IP}:6007/v1/dataprep/get" export DATAPREP_DELETE_FILE_ENDPOINT="http://${HOST_IP}:6007/v1/dataprep/delete" -export WHISPER_PORT="7066" -export WHISPER_SERVER_ENDPOINT="http://${host_ip}:${WHISPER_PORT}/v1/asr" +export WHISPER_SERVER_PORT=7066 +export WHISPER_SERVER_ENDPOINT="http://${host_ip}:${WHISPER_SERVER_PORT}/v1/asr" diff --git a/MultimodalQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh b/MultimodalQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh index 623d0c5272..6bf17d9b0b 100644 --- a/MultimodalQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh +++ b/MultimodalQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh @@ -3,9 +3,9 @@ # Copyright (C) 2024 Advanced Micro Devices, Inc. # SPDX-License-Identifier: Apache-2.0 -export HOST_IP=${your_host_ip_address} -export MULTIMODAL_HUGGINGFACEHUB_API_TOKEN=${your_huggingfacehub_token} -export MULTIMODAL_TGI_SERVICE_PORT="8399" +export HOST_IP=${ip_address} +export MULTIMODAL_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} +export MULTIMODAL_VLLM_SERVICE_PORT="8399" export no_proxy=${your_no_proxy} export http_proxy=${your_http_proxy} export https_proxy=${your_http_proxy} @@ -17,9 +17,9 @@ export REDIS_URL="redis://${HOST_IP}:6379" export REDIS_HOST=${HOST_IP} export INDEX_NAME="mm-rag-redis" export VLLM_SERVER_PORT=8081 -export LVM_ENDPOINT="http://${HOST_IP}:${VLLM_SERVER_PORT}" +export LVM_ENDPOINT="http://${HOST_IP}:8399" export EMBEDDING_MODEL_ID="BridgeTower/bridgetower-large-itm-mlm-itc" -export LVM_MODEL_ID="Xkev/Llama-3.2V-11B-cot" +export MULTIMODAL_LLM_MODEL_ID="Xkev/Llama-3.2V-11B-cot" export WHISPER_MODEL="base" export MM_EMBEDDING_SERVICE_HOST_IP=${HOST_IP} export MM_RETRIEVER_SERVICE_HOST_IP=${HOST_IP} @@ -31,5 +31,5 @@ export DATAPREP_GEN_TRANSCRIPT_SERVICE_ENDPOINT="http://${HOST_IP}:6007/v1/datap export DATAPREP_GEN_CAPTION_SERVICE_ENDPOINT="http://${HOST_IP}:6007/v1/dataprep/generate_captions" export DATAPREP_GET_FILE_ENDPOINT="http://${HOST_IP}:6007/v1/dataprep/get" export DATAPREP_DELETE_FILE_ENDPOINT="http://${HOST_IP}:6007/v1/dataprep/delete" -export WHISPER_PORT="7066" -export WHISPER_SERVER_ENDPOINT="http://${host_ip}:${WHISPER_PORT}/v1/asr" +export WHISPER_SERVER_PORT=7066 +export WHISPER_SERVER_ENDPOINT="http://${HOST_IP}:${WHISPER_SERVER_PORT}/v1/asr" diff --git a/MultimodalQnA/docker_compose/intel/cpu/xeon/compose.yaml b/MultimodalQnA/docker_compose/intel/cpu/xeon/compose.yaml index bed2374fbd..2f2318de07 100644 --- a/MultimodalQnA/docker_compose/intel/cpu/xeon/compose.yaml +++ b/MultimodalQnA/docker_compose/intel/cpu/xeon/compose.yaml @@ -39,7 +39,7 @@ services: - redis-vector-db - lvm-llava ports: - - "${DATAPREP_MMR_PORT}:5000" + - "${DATAPREP_MMR_PORT:-6007}:5000" environment: no_proxy: ${no_proxy} http_proxy: ${http_proxy} diff --git a/MultimodalQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml b/MultimodalQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml index 77a2e0bb01..250d2633a5 100644 --- a/MultimodalQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml +++ b/MultimodalQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml @@ -81,7 +81,7 @@ services: - milvus-standalone - lvm-llava ports: - - "${DATAPREP_MMR_PORT}:5000" + - "${DATAPREP_MMR_PORT:-6007}:5000" environment: no_proxy: ${no_proxy} http_proxy: ${http_proxy} diff --git a/MultimodalQnA/docker_compose/intel/cpu/xeon/set_env.sh b/MultimodalQnA/docker_compose/intel/cpu/xeon/set_env.sh deleted file mode 100755 index 4cb7b5ba92..0000000000 --- a/MultimodalQnA/docker_compose/intel/cpu/xeon/set_env.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env bash - -# Copyright (C) 2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 -pushd "../../../../../" > /dev/null -source .set_env.sh -popd > /dev/null - -export host_ip=$(hostname -I | awk '{print $1}') - -export MM_EMBEDDING_SERVICE_HOST_IP=${host_ip} -export MM_RETRIEVER_SERVICE_HOST_IP=${host_ip} -export LVM_SERVICE_HOST_IP=${host_ip} -export MEGA_SERVICE_HOST_IP=${host_ip} - -export TTS_PORT=7055 -export TTS_ENDPOINT="http://${host_ip}:${TTS_PORT}/v1/tts" - -export WHISPER_PORT=7066 -export WHISPER_SERVER_ENDPOINT="http://${host_ip}:${WHISPER_PORT}/v1/asr" -export WHISPER_MODEL="base" -export MAX_IMAGES=1 - -export REDIS_DB_PORT=6379 -export REDIS_INSIGHTS_PORT=8001 -export REDIS_URL="redis://${host_ip}:${REDIS_DB_PORT}" -export REDIS_HOST=${host_ip} -export INDEX_NAME="mm-rag-redis" - -export DATAPREP_MMR_PORT=6007 -export DATAPREP_INGEST_SERVICE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/ingest" -export DATAPREP_GEN_TRANSCRIPT_SERVICE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/generate_transcripts" -export DATAPREP_GEN_CAPTION_SERVICE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/generate_captions" -export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/get" -export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/delete" - -export EMM_BRIDGETOWER_PORT=6006 -export EMBEDDING_MODEL_ID="BridgeTower/bridgetower-large-itm-mlm-itc" -export MMEI_EMBEDDING_ENDPOINT="http://${host_ip}:$EMM_BRIDGETOWER_PORT" -export MM_EMBEDDING_PORT_MICROSERVICE=6000 -export BRIDGE_TOWER_EMBEDDING=true - -export REDIS_RETRIEVER_PORT=7000 - -export LVM_PORT=9399 -export LLAVA_SERVER_PORT=8399 -export LVM_MODEL_ID="llava-hf/llava-1.5-7b-hf" -export LVM_ENDPOINT="http://${host_ip}:${LLAVA_SERVER_PORT}" - -export MEGA_SERVICE_PORT=8888 -export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:${MEGA_SERVICE_PORT}/v1/multimodalqna" - -export UI_PORT=5173 -export UI_TIMEOUT=240 diff --git a/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose.yaml b/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose.yaml index c5e1b29bda..c3dcc9f8cc 100644 --- a/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose.yaml @@ -41,7 +41,7 @@ services: - redis-vector-db - lvm ports: - - "${DATAPREP_MMR_PORT}:5000" + - "${DATAPREP_MMR_PORT:-6007}:5000" environment: no_proxy: ${no_proxy} http_proxy: ${http_proxy} diff --git a/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose_milvus.yaml b/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose_milvus.yaml index 98df452697..165760003c 100644 --- a/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose_milvus.yaml +++ b/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose_milvus.yaml @@ -79,7 +79,7 @@ services: - "milvus-standalone" - "lvm" ports: - - "${DATAPREP_MMR_PORT}:5000" + - "${DATAPREP_MMR_PORT:-6007}:5000" environment: no_proxy: ${no_proxy} http_proxy: ${http_proxy} diff --git a/MultimodalQnA/docker_compose/intel/hpu/gaudi/set_env.sh b/MultimodalQnA/docker_compose/intel/set_env.sh similarity index 96% rename from MultimodalQnA/docker_compose/intel/hpu/gaudi/set_env.sh rename to MultimodalQnA/docker_compose/intel/set_env.sh index c92076253f..8d31674a29 100755 --- a/MultimodalQnA/docker_compose/intel/hpu/gaudi/set_env.sh +++ b/MultimodalQnA/docker_compose/intel/set_env.sh @@ -2,12 +2,12 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -pushd "../../../../../" > /dev/null +pushd "../../../" > /dev/null source .set_env.sh popd > /dev/null export host_ip=$(hostname -I | awk '{print $1}') - +export HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} export MM_EMBEDDING_SERVICE_HOST_IP=${host_ip} export MM_RETRIEVER_SERVICE_HOST_IP=${host_ip} export LVM_SERVICE_HOST_IP=${host_ip} diff --git a/MultimodalQnA/tests/README.md b/MultimodalQnA/tests/README.md new file mode 100644 index 0000000000..279576500f --- /dev/null +++ b/MultimodalQnA/tests/README.md @@ -0,0 +1,45 @@ +# MultimodalQnA E2E test scripts + +## Set the required environment variable + +```bash +export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +``` + +## Run test + +On Intel Xeon with vLLM: + +```bash +bash test_compose_on_xeon.sh +``` + +On Intel Xeon with TGI: + +```bash +bash test_compose_tgi_on_xeon.sh +``` + +On Intel Gaudi with vLLM: + +```bash +bash test_compose_on_gaudi.sh +``` + +On Intel Gaudi with TGI: + +```bash +bash test_compose_tgi_on_gaudi.sh +``` + +On AMD ROCm with TGI: + +```bash +bash test_compose_on_rocm.sh +``` + +On AMD ROCm with vLLM: + +```bash +bash test_compose_vllm_on_rocm.sh +``` diff --git a/MultimodalQnA/tests/test_compose_milvus_on_xeon.sh b/MultimodalQnA/tests/test_compose_milvus_on_xeon.sh index 60ea474f33..c82e0a7c62 100644 --- a/MultimodalQnA/tests/test_compose_milvus_on_xeon.sh +++ b/MultimodalQnA/tests/test_compose_milvus_on_xeon.sh @@ -65,37 +65,12 @@ function build_docker_images() { } function setup_env() { - export host_ip=${ip_address} - export MM_EMBEDDING_SERVICE_HOST_IP=${host_ip} - export MM_RETRIEVER_SERVICE_HOST_IP=${host_ip} - export LVM_SERVICE_HOST_IP=${host_ip} - export MEGA_SERVICE_HOST_IP=${host_ip} - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export WHISPER_PORT=7066 - export MAX_IMAGES=1 - export WHISPER_MODEL="base" - export WHISPER_SERVER_ENDPOINT="http://${host_ip}:${WHISPER_PORT}/v1/asr" export COLLECTION_NAME="LangChainCollection" export MILVUS_HOST=${host_ip} - export DATAPREP_MMR_PORT=6007 - export DATAPREP_INGEST_SERVICE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/ingest" - export DATAPREP_GEN_TRANSCRIPT_SERVICE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/generate_transcripts" - export DATAPREP_GEN_CAPTION_SERVICE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/generate_captions" - export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/get" - export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/delete" - export EMM_BRIDGETOWER_PORT=6006 - export BRIDGE_TOWER_EMBEDDING=true - export EMBEDDING_MODEL_ID="BridgeTower/bridgetower-large-itm-mlm-itc" - export MMEI_EMBEDDING_ENDPOINT="http://${host_ip}:$EMM_BRIDGETOWER_PORT" - export MM_EMBEDDING_PORT_MICROSERVICE=6000 export MILVUS_RETRIEVER_PORT=7000 - export LVM_PORT=9399 - export LLAVA_SERVER_PORT=8399 export LVM_MODEL_ID="llava-hf/llava-1.5-7b-hf" - export LVM_ENDPOINT="http://${host_ip}:$LLAVA_SERVER_PORT" - export MEGA_SERVICE_PORT=8888 - export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:$MEGA_SERVICE_PORT/v1/multimodalqna" - export UI_PORT=5173 + cd $WORKPATH/docker_compose/intel + source set_env.sh } diff --git a/MultimodalQnA/tests/test_compose_on_gaudi.sh b/MultimodalQnA/tests/test_compose_on_gaudi.sh index 774347e435..a945de0033 100644 --- a/MultimodalQnA/tests/test_compose_on_gaudi.sh +++ b/MultimodalQnA/tests/test_compose_on_gaudi.sh @@ -69,42 +69,8 @@ function build_docker_images() { } function setup_env() { - export host_ip=${ip_address} - export MM_EMBEDDING_SERVICE_HOST_IP=${host_ip} - export MM_RETRIEVER_SERVICE_HOST_IP=${host_ip} - export LVM_SERVICE_HOST_IP=${host_ip} - export MEGA_SERVICE_HOST_IP=${host_ip} - export REDIS_DB_PORT=6379 - export REDIS_INSIGHTS_PORT=8001 - export REDIS_URL="redis://${host_ip}:${REDIS_DB_PORT}" - export REDIS_HOST=${host_ip} - export INDEX_NAME="mm-rag-redis" - export WHISPER_PORT=7066 - export MAX_IMAGES=1 - export WHISPER_MODEL="base" - export WHISPER_SERVER_ENDPOINT="http://${host_ip}:${WHISPER_PORT}/v1/asr" - export TTS_PORT=7055 - export TTS_ENDPOINT="http://${host_ip}:${TTS_PORT}/v1/tts" - export DATAPREP_MMR_PORT=6007 - export DATAPREP_INGEST_SERVICE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/ingest" - export DATAPREP_GEN_TRANSCRIPT_SERVICE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/generate_transcripts" - export DATAPREP_GEN_CAPTION_SERVICE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/generate_captions" - export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/get" - export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/delete" - export EMM_BRIDGETOWER_PORT=6006 - export BRIDGE_TOWER_EMBEDDING=true - export EMBEDDING_MODEL_ID="BridgeTower/bridgetower-large-itm-mlm-itc" - export MMEI_EMBEDDING_ENDPOINT="http://${host_ip}:$EMM_BRIDGETOWER_PORT" - export MM_EMBEDDING_PORT_MICROSERVICE=6000 - export REDIS_RETRIEVER_PORT=7000 - export LVM_PORT=9399 - export LLAVA_SERVER_PORT=8399 - export TGI_GAUDI_PORT="${LLAVA_SERVER_PORT}:80" - export LVM_MODEL_ID="llava-hf/llava-v1.6-vicuna-13b-hf" - export LVM_ENDPOINT="http://${host_ip}:${LLAVA_SERVER_PORT}" - export MEGA_SERVICE_PORT=8888 - export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:${MEGA_SERVICE_PORT}/v1/multimodalqna" - export UI_PORT=5173 + cd $WORKPATH/docker_compose/intel + source set_env.sh } function start_services() { diff --git a/MultimodalQnA/tests/test_compose_on_rocm.sh b/MultimodalQnA/tests/test_compose_on_rocm.sh index efa363ed3c..208f7c78a4 100644 --- a/MultimodalQnA/tests/test_compose_on_rocm.sh +++ b/MultimodalQnA/tests/test_compose_on_rocm.sh @@ -42,38 +42,10 @@ function build_docker_images() { } function setup_env() { - export HOST_IP=${ip_address} export host_ip=${ip_address} - export MULTIMODAL_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export MULTIMODAL_TGI_SERVICE_PORT="8399" - export no_proxy=${your_no_proxy} - export http_proxy=${your_http_proxy} - export https_proxy=${your_http_proxy} - export BRIDGE_TOWER_EMBEDDING=true - export EMBEDDER_PORT=6006 - export MMEI_EMBEDDING_ENDPOINT="http://${HOST_IP}:$EMBEDDER_PORT" - export MM_EMBEDDING_PORT_MICROSERVICE=6000 - export WHISPER_SERVER_PORT=7066 - export WHISPER_SERVER_ENDPOINT="http://${HOST_IP}:${WHISPER_SERVER_PORT}/v1/asr" - export REDIS_URL="redis://${HOST_IP}:6379" - export REDIS_HOST=${HOST_IP} - export INDEX_NAME="mm-rag-redis" - export LLAVA_SERVER_PORT=8399 - export LVM_ENDPOINT="http://${HOST_IP}:8399" - export EMBEDDING_MODEL_ID="BridgeTower/bridgetower-large-itm-mlm-itc" - export LVM_MODEL_ID="Xkev/Llama-3.2V-11B-cot" - export WHISPER_MODEL="base" - export MM_EMBEDDING_SERVICE_HOST_IP=${HOST_IP} - export MM_RETRIEVER_SERVICE_HOST_IP=${HOST_IP} - export LVM_SERVICE_HOST_IP=${HOST_IP} - export MEGA_SERVICE_HOST_IP=${HOST_IP} - export BACKEND_SERVICE_ENDPOINT="http://${HOST_IP}:8888/v1/multimodalqna" - export DATAPREP_INGEST_SERVICE_ENDPOINT="http://${HOST_IP}:6007/v1/dataprep/ingest" - export DATAPREP_GEN_TRANSCRIPT_SERVICE_ENDPOINT="http://${HOST_IP}:6007/v1/dataprep/generate_transcripts" - export DATAPREP_GEN_CAPTION_SERVICE_ENDPOINT="http://${HOST_IP}:6007/v1/dataprep/generate_captions" - export DATAPREP_GET_FILE_ENDPOINT="http://${HOST_IP}:6007/v1/dataprep/get" - export DATAPREP_DELETE_FILE_ENDPOINT="http://${HOST_IP}:6007/v1/dataprep/delete" export MODEL_CACHE=${model_cache:-"/var/opea/multimodalqna-service/data"} + cd $WORKPATH/docker_compose/amd/gpu/rocm + source set_env.sh } function start_services() { diff --git a/MultimodalQnA/tests/test_compose_on_xeon.sh b/MultimodalQnA/tests/test_compose_on_xeon.sh index 1a62915abe..10f015aa7b 100644 --- a/MultimodalQnA/tests/test_compose_on_xeon.sh +++ b/MultimodalQnA/tests/test_compose_on_xeon.sh @@ -66,41 +66,8 @@ function build_docker_images() { } function setup_env() { - export host_ip=${ip_address} - export MM_EMBEDDING_SERVICE_HOST_IP=${host_ip} - export MM_RETRIEVER_SERVICE_HOST_IP=${host_ip} - export LVM_SERVICE_HOST_IP=${host_ip} - export MEGA_SERVICE_HOST_IP=${host_ip} - export WHISPER_PORT=7066 - export MAX_IMAGES=1 - export WHISPER_MODEL="base" - export WHISPER_SERVER_ENDPOINT="http://${host_ip}:${WHISPER_PORT}/v1/asr" - export TTS_PORT=7055 - export TTS_ENDPOINT="http://${host_ip}:${TTS_PORT}/v1/tts" - export REDIS_DB_PORT=6379 - export REDIS_INSIGHTS_PORT=8001 - export REDIS_URL="redis://${host_ip}:${REDIS_DB_PORT}" - export REDIS_HOST=${host_ip} - export INDEX_NAME="mm-rag-redis" - export DATAPREP_MMR_PORT=6007 - export DATAPREP_INGEST_SERVICE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/ingest" - export DATAPREP_GEN_TRANSCRIPT_SERVICE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/generate_transcripts" - export DATAPREP_GEN_CAPTION_SERVICE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/generate_captions" - export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/get" - export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/delete" - export EMM_BRIDGETOWER_PORT=6006 - export BRIDGE_TOWER_EMBEDDING=true - export EMBEDDING_MODEL_ID="BridgeTower/bridgetower-large-itm-mlm-itc" - export MMEI_EMBEDDING_ENDPOINT="http://${host_ip}:$EMM_BRIDGETOWER_PORT" - export MM_EMBEDDING_PORT_MICROSERVICE=6000 - export REDIS_RETRIEVER_PORT=7000 - export LVM_PORT=9399 - export LLAVA_SERVER_PORT=8399 - export LVM_MODEL_ID="llava-hf/llava-1.5-7b-hf" - export LVM_ENDPOINT="http://${host_ip}:$LLAVA_SERVER_PORT" - export MEGA_SERVICE_PORT=8888 - export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:$MEGA_SERVICE_PORT/v1/multimodalqna" - export UI_PORT=5173 + cd $WORKPATH/docker_compose/intel + source set_env.sh } diff --git a/MultimodalQnA/tests/test_compose_vllm_on_rocm.sh b/MultimodalQnA/tests/test_compose_vllm_on_rocm.sh index 65fb87d6c9..77d76dc49a 100644 --- a/MultimodalQnA/tests/test_compose_vllm_on_rocm.sh +++ b/MultimodalQnA/tests/test_compose_vllm_on_rocm.sh @@ -42,36 +42,9 @@ function build_docker_images() { function setup_env() { export HOST_IP=${ip_address} - export host_ip=${ip_address} - export MULTIMODAL_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export MULTIMODAL_VLLM_SERVICE_PORT="8399" - export no_proxy=${your_no_proxy} - export http_proxy=${your_http_proxy} - export https_proxy=${your_http_proxy} - export BRIDGE_TOWER_EMBEDDING=true - export EMBEDDER_PORT=6006 - export MMEI_EMBEDDING_ENDPOINT="http://${HOST_IP}:$EMBEDDER_PORT" - export MM_EMBEDDING_PORT_MICROSERVICE=6000 - export WHISPER_SERVER_PORT=7066 - export WHISPER_SERVER_ENDPOINT="http://${HOST_IP}:${WHISPER_SERVER_PORT}/v1/asr" - export REDIS_URL="redis://${HOST_IP}:6379" - export REDIS_HOST=${HOST_IP} - export INDEX_NAME="mm-rag-redis" - export LVM_ENDPOINT="http://${HOST_IP}:8399" - export EMBEDDING_MODEL_ID="BridgeTower/bridgetower-large-itm-mlm-itc" - export MULTIMODAL_LLM_MODEL_ID="Xkev/Llama-3.2V-11B-cot" - export WHISPER_MODEL="base" - export MM_EMBEDDING_SERVICE_HOST_IP=${HOST_IP} - export MM_RETRIEVER_SERVICE_HOST_IP=${HOST_IP} - export LVM_SERVICE_HOST_IP=${HOST_IP} - export MEGA_SERVICE_HOST_IP=${HOST_IP} - export BACKEND_SERVICE_ENDPOINT="http://${HOST_IP}:8888/v1/multimodalqna" - export DATAPREP_INGEST_SERVICE_ENDPOINT="http://${HOST_IP}:6007/v1/dataprep/ingest" - export DATAPREP_GEN_TRANSCRIPT_SERVICE_ENDPOINT="http://${HOST_IP}:6007/v1/dataprep/generate_transcripts" - export DATAPREP_GEN_CAPTION_SERVICE_ENDPOINT="http://${HOST_IP}:6007/v1/dataprep/generate_captions" - export DATAPREP_GET_FILE_ENDPOINT="http://${HOST_IP}:6007/v1/dataprep/get" - export DATAPREP_DELETE_FILE_ENDPOINT="http://${HOST_IP}:6007/v1/dataprep/delete" export MODEL_CACHE=${model_cache:-"/var/opea/multimodalqna-service/data"} + cd $WORKPATH/docker_compose/amd/gpu/rocm + source set_env_vllm.sh } function start_services() { From 9e38a567fde1d9cd45148350a06f29da4541aeb4 Mon Sep 17 00:00:00 2001 From: "chen, suyue" Date: Fri, 30 May 2025 11:26:57 +0800 Subject: [PATCH 077/217] Optimize benchmark scripts (#1949) Signed-off-by: chensuyue --- ChatQnA/benchmark_chatqna.yaml | 2 +- deploy.py | 2 +- deploy_and_benchmark.py | 94 ++++++++++++++++++++++------------ 3 files changed, 64 insertions(+), 34 deletions(-) diff --git a/ChatQnA/benchmark_chatqna.yaml b/ChatQnA/benchmark_chatqna.yaml index ae74aa9b92..e528bb9d7a 100644 --- a/ChatQnA/benchmark_chatqna.yaml +++ b/ChatQnA/benchmark_chatqna.yaml @@ -37,7 +37,7 @@ deploy: llm: engine: vllm # or tgi - model_id: "meta-llama/Meta-Llama-3-8B-Instruct" # mandatory + model_id: "meta-llama/Llama-3.1-8B-Instruct" # mandatory replicaCount: with_teirerank: [7, 15, 31, 63] # When teirerank.enabled is True without_teirerank: [8, 16, 32, 64] # When teirerank.enabled is False diff --git a/deploy.py b/deploy.py index e74700ca53..184b82e187 100644 --- a/deploy.py +++ b/deploy.py @@ -192,7 +192,7 @@ def configure_rerank(values, with_rerank, deploy_config, example_type, node_sele values["teirerank"]["nodeSelector"] = {key: value for key, value in node_selector.items()} else: if example_type == "chatqna": - values["image"] = {"repository": "opea/chatqna-without-rerank"} + values["CHATQNA_TYPE"] = "CHATQNA_NO_RERANK" if "teirerank" not in values: values["teirerank"] = {"enabled": False} elif "enabled" not in values["teirerank"]: diff --git a/deploy_and_benchmark.py b/deploy_and_benchmark.py index bb729c7b48..495a554525 100644 --- a/deploy_and_benchmark.py +++ b/deploy_and_benchmark.py @@ -143,13 +143,14 @@ def pull_helm_chart(chart_pull_url, version, chart_name): return untar_dir -def main(yaml_file, target_node=None, test_mode="oob"): +def main(yaml_file, target_node=None, test_mode="oob", clean_up=True): """Main function to process deployment configuration. Args: yaml_file: Path to the YAML configuration file target_node: Optional target number of nodes to deploy. If not specified, will process all nodes. test_mode: Test mode, either "oob" (out of box) or "tune". Defaults to "oob". + clean_up: Whether to clean up after the test. Defaults to True. """ if test_mode not in ["oob", "tune"]: print("Error: test_mode must be either 'oob' or 'tune'") @@ -185,6 +186,11 @@ def main(yaml_file, target_node=None, test_mode="oob"): if not chart_dir: return + # Set HF_TOKEN + HF_TOKEN = deploy_config.get("HUGGINGFACEHUB_API_TOKEN", "") + os.environ["HF_TOKEN"] = HF_TOKEN + os.environ["HUGGINGFACEHUB_API_TOKEN"] = HF_TOKEN + for node in nodes_to_process: try: print(f"\nProcessing configuration for {node} nodes...") @@ -278,6 +284,9 @@ def main(yaml_file, target_node=None, test_mode="oob"): chart_dir, ] result = subprocess.run(cmd, check=True, capture_output=True, text=True) + print("Show deploy logs...") + print(result.stdout) + print("End of show deploy logs.") match = re.search(r"values_file_path: (\S+)", result.stdout) if match: @@ -306,6 +315,9 @@ def main(yaml_file, target_node=None, test_mode="oob"): "--update-service", ] result = subprocess.run(cmd, check=True, capture_output=True, text=True) + print("Show deploy logs...") + print(result.stdout) + print("End of show deploy logs.") if result.returncode != 0: print(f"Update failed for {node} nodes configuration with {param_name} {batch_param}") break # Skip remaining {param_name} for this node @@ -372,36 +384,48 @@ def main(yaml_file, target_node=None, test_mode="oob"): os.remove(temp_config_file) finally: - # Uninstall the deployment - print(f"\nUninstalling deployment for {node} nodes...") - cmd = [ - python_cmd, - "deploy.py", - "--chart-name", - chart_name, - "--namespace", - namespace, - "--uninstall", - ] - try: - result = subprocess.run(cmd, check=True) - if result.returncode != 0: - print(f"Failed to uninstall deployment for {node} nodes") - except Exception as e: - print(f"Error while uninstalling deployment for {node} nodes: {str(e)}") - - # Delete labels for current node configuration - print(f"Deleting labels for {node} nodes...") - cmd = [python_cmd, "deploy.py", "--chart-name", chart_name, "--num-nodes", str(node), "--delete-label"] - if current_node_names: - cmd.extend(["--node-names"] + current_node_names) - - try: - result = subprocess.run(cmd, check=True) - if result.returncode != 0: - print(f"Failed to delete labels for {node} nodes") - except Exception as e: - print(f"Error while deleting labels for {node} nodes: {str(e)}") + if clean_up: + # Uninstall the deployment + print(f"\nUninstalling deployment for {node} nodes...") + cmd = [ + python_cmd, + "deploy.py", + "--chart-name", + chart_name, + "--namespace", + namespace, + "--uninstall", + ] + try: + result = subprocess.run(cmd, check=True) + if result.returncode != 0: + print(f"Failed to uninstall deployment for {node} nodes") + except Exception as e: + print(f"Error while uninstalling deployment for {node} nodes: {str(e)}") + + # Delete labels for current node configuration + print(f"Deleting labels for {node} nodes...") + cmd = [ + python_cmd, + "deploy.py", + "--chart-name", + chart_name, + "--num-nodes", + str(node), + "--delete-label", + ] + if current_node_names: + cmd.extend(["--node-names"] + current_node_names) + + try: + result = subprocess.run(cmd, check=True) + if result.returncode != 0: + print(f"Failed to delete labels for {node} nodes") + except Exception as e: + print(f"Error while deleting labels for {node} nodes: {str(e)}") + else: + print("Skipping cleanup for local debug. Manual cleanup may be required.") + exit(0) except Exception as e: print(f"Error processing configuration for {node} nodes: {str(e)}") @@ -419,6 +443,12 @@ def main(yaml_file, target_node=None, test_mode="oob"): parser.add_argument("yaml_file", help="Path to the YAML configuration file") parser.add_argument("--target-node", type=int, help="Optional: Target number of nodes to deploy.", default=None) parser.add_argument("--test-mode", type=str, help="Test mode, either 'oob' (out of box) or 'tune'.", default="oob") + parser.add_argument( + "--no-clean-up", + action="store_false", + dest="clean_up", + help="Clean up after test, which can be closed for local debug.", + ) args = parser.parse_args() - main(args.yaml_file, args.target_node, args.test_mode) + main(args.yaml_file, args.target_node, args.test_mode, args.clean_up) From 3c6edf27f085663f685271ab533a293055af1de1 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Fri, 30 May 2025 14:54:07 +0800 Subject: [PATCH 078/217] Fix permissions error. (#2008) Signed-off-by: ZePan110 --- .github/workflows/_get-test-matrix.yml | 12 ++++++++++- .github/workflows/_run-docker-compose.yml | 12 ++++++++++- .github/workflows/pr-docker-compose-e2e.yml | 22 +++++++++++++++++++++ 3 files changed, 44 insertions(+), 2 deletions(-) diff --git a/.github/workflows/_get-test-matrix.yml b/.github/workflows/_get-test-matrix.yml index 4d2fefabdd..25f39607da 100644 --- a/.github/workflows/_get-test-matrix.yml +++ b/.github/workflows/_get-test-matrix.yml @@ -3,7 +3,17 @@ # Support push and pull_request events name: Get Test Matrix -permissions: read-all +permissions: + actions: read + contents: read + checks: read + deployments: read + issues: read + packages: read + pages: read + pull-requests: read + statuses: read + security-events: read on: workflow_call: inputs: diff --git a/.github/workflows/_run-docker-compose.yml b/.github/workflows/_run-docker-compose.yml index fa9b560c09..7af15e11be 100644 --- a/.github/workflows/_run-docker-compose.yml +++ b/.github/workflows/_run-docker-compose.yml @@ -2,7 +2,17 @@ # SPDX-License-Identifier: Apache-2.0 name: Image Build -permissions: read-all +permissions: + actions: read + contents: read + checks: write + deployments: write + issues: write + packages: write + pages: write + pull-requests: write + statuses: write + security-events: read on: workflow_call: inputs: diff --git a/.github/workflows/pr-docker-compose-e2e.yml b/.github/workflows/pr-docker-compose-e2e.yml index d7aba56bb5..d502ea0094 100644 --- a/.github/workflows/pr-docker-compose-e2e.yml +++ b/.github/workflows/pr-docker-compose-e2e.yml @@ -28,12 +28,34 @@ concurrency: jobs: get-test-matrix: + permissions: + actions: read + contents: read + checks: read + deployments: read + issues: read + packages: read + pages: read + pull-requests: read + statuses: read + security-events: read if: ${{ !github.event.pull_request.draft }} uses: ./.github/workflows/_get-test-matrix.yml with: diff_excluded_files: '\.github|\.md|\.txt|kubernetes|gmc|assets|benchmark' example-test: + permissions: + actions: read + contents: read + checks: write + deployments: write + issues: write + packages: write + pages: write + pull-requests: write + statuses: write + security-events: read needs: [get-test-matrix] if: ${{ needs.get-test-matrix.outputs.run_matrix != '' }} strategy: From 0d3f92086673bac39581314c8ed0c65d5847f998 Mon Sep 17 00:00:00 2001 From: "chen, suyue" Date: Tue, 3 Jun 2025 10:15:25 +0800 Subject: [PATCH 079/217] Build comps-base:ci for AgentQnA test (#2010) Signed-off-by: chensuyue --- .github/workflows/pr-docker-compose-e2e.yml | 2 +- AgentQnA/tests/step1_build_images.sh | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pr-docker-compose-e2e.yml b/.github/workflows/pr-docker-compose-e2e.yml index d502ea0094..29a2b40300 100644 --- a/.github/workflows/pr-docker-compose-e2e.yml +++ b/.github/workflows/pr-docker-compose-e2e.yml @@ -15,7 +15,7 @@ on: - "**.py" - "**/docker_compose/**" - "**/docker_image_build/**" - - "**/tests/test_compose**" + - "**/tests/**" - "**/ui/**" - "!**.md" - "!**.txt" diff --git a/AgentQnA/tests/step1_build_images.sh b/AgentQnA/tests/step1_build_images.sh index 8edd7b623a..58b5c8d6e8 100644 --- a/AgentQnA/tests/step1_build_images.sh +++ b/AgentQnA/tests/step1_build_images.sh @@ -13,6 +13,10 @@ function get_genai_comps() { if [ ! -d "GenAIComps" ] ; then git clone --depth 1 --branch ${opea_branch:-"main"} https://github.com/opea-project/GenAIComps.git fi + pushd GenAIComps + echo "GenAIComps test commit is $(git rev-parse HEAD)" + docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . + popd && sleep 1s } function build_docker_images_for_retrieval_tool(){ From 8d6579b67a34a80631bdf913d434b769a633b71f Mon Sep 17 00:00:00 2001 From: "chen, suyue" Date: Tue, 3 Jun 2025 10:19:02 +0800 Subject: [PATCH 080/217] Stop CI test on rocm due to lack of test machine (#2017) Signed-off-by: chensuyue --- .github/workflows/scripts/get_test_matrix.sh | 5 +++++ .github/workflows/weekly-example-test.yml | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/scripts/get_test_matrix.sh b/.github/workflows/scripts/get_test_matrix.sh index 5ad6992104..1b5b807ec1 100644 --- a/.github/workflows/scripts/get_test_matrix.sh +++ b/.github/workflows/scripts/get_test_matrix.sh @@ -40,6 +40,11 @@ for example in ${examples}; do done fi for hw in ${run_hardware}; do + # TODO: remove this condition when ROCm hardware is available + if [[ "${hw}" == "rocm" ]]; then + echo "Skip test on ROCm hardware for 2 weeks due to lack of test machine..." + continue + fi run_matrix="${run_matrix}{\"example\":\"${example}\",\"hardware\":\"${hw}\"}," done done diff --git a/.github/workflows/weekly-example-test.yml b/.github/workflows/weekly-example-test.yml index 832cc11681..c3b9c000b6 100644 --- a/.github/workflows/weekly-example-test.yml +++ b/.github/workflows/weekly-example-test.yml @@ -14,7 +14,7 @@ on: env: EXAMPLES: ${{ vars.NIGHTLY_RELEASE_EXAMPLES }} - NODES: "gaudi,xeon,rocm,arc" + NODES: "gaudi,xeon,arc" jobs: get-test-matrix: From 144b56a424e8a1d80a90be3a3d74ab1c223418a9 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Tue, 3 Jun 2025 11:21:10 +0800 Subject: [PATCH 081/217] Fix workflow permission issues. (#2018) Signed-off-by: ZePan110 --- .github/workflows/_example-workflow.yml | 29 +++++++++++++++ .github/workflows/manual-example-workflow.yml | 16 +++++++++ .github/workflows/manual-image-build.yml | 16 +++++++++ .../workflows/manual-reset-local-registry.yml | 16 +++++++++ .../nightly-docker-build-publish.yml | 36 +++++++++++++------ .github/workflows/push-image-build.yml | 16 +++++++++ .github/workflows/weekly-example-test.yml | 14 ++++---- 7 files changed, 125 insertions(+), 18 deletions(-) diff --git a/.github/workflows/_example-workflow.yml b/.github/workflows/_example-workflow.yml index 35b1f16cd1..6906fd4910 100644 --- a/.github/workflows/_example-workflow.yml +++ b/.github/workflows/_example-workflow.yml @@ -69,6 +69,22 @@ jobs: # Image Build #################################################################################################### build-images: + permissions: + actions: read + contents: read + checks: read + deployments: read + discussions: read + issues: read + packages: read + pages: read + pull-requests: read + repository-projects: read + statuses: read + security-events: read + id-token: write + attestations: read + models: read uses: ./.github/workflows/_build_image.yml with: node: ${{ inputs.node }} @@ -83,6 +99,17 @@ jobs: # Docker Compose Test #################################################################################################### test-example-compose: + permissions: + actions: read + contents: read + checks: write + deployments: write + issues: write + packages: write + pages: write + pull-requests: write + statuses: write + security-events: read needs: [build-images] if: ${{ inputs.test_compose }} uses: ./.github/workflows/_run-docker-compose.yml @@ -99,6 +126,8 @@ jobs: # helmchart Test #################################################################################################### test-helmchart: + permissions: + contents: read if: ${{ fromJSON(inputs.test_helmchart) }} uses: ./.github/workflows/_helm-e2e.yml with: diff --git a/.github/workflows/manual-example-workflow.yml b/.github/workflows/manual-example-workflow.yml index 338454d9aa..1826512c68 100644 --- a/.github/workflows/manual-example-workflow.yml +++ b/.github/workflows/manual-example-workflow.yml @@ -99,6 +99,22 @@ jobs: opea_branch: ${{ inputs.opea_branch }} run-examples: + permissions: + actions: read + attestations: read + discussions: read + models: read + repository-projects: read + id-token: write + contents: read + checks: write + deployments: write + issues: write + packages: write + pages: write + pull-requests: write + statuses: write + security-events: read needs: [get-test-matrix, build-comps-base] strategy: matrix: diff --git a/.github/workflows/manual-image-build.yml b/.github/workflows/manual-image-build.yml index fbfd2bef26..a9cbbe0575 100644 --- a/.github/workflows/manual-image-build.yml +++ b/.github/workflows/manual-image-build.yml @@ -66,6 +66,22 @@ jobs: echo "nodes=$nodes_json" >> $GITHUB_OUTPUT image-build: + permissions: + actions: read + attestations: read + discussions: read + models: read + repository-projects: read + id-token: write + contents: read + checks: write + deployments: write + issues: write + packages: write + pages: write + pull-requests: write + statuses: write + security-events: read needs: get-test-matrix if: ${{ needs.get-test-matrix.outputs.nodes != '' }} strategy: diff --git a/.github/workflows/manual-reset-local-registry.yml b/.github/workflows/manual-reset-local-registry.yml index 88c839ab82..7ee79ab035 100644 --- a/.github/workflows/manual-reset-local-registry.yml +++ b/.github/workflows/manual-reset-local-registry.yml @@ -63,6 +63,22 @@ jobs: docker ps | grep registry build: + permissions: + actions: read + attestations: read + discussions: read + models: read + repository-projects: read + id-token: write + contents: read + checks: write + deployments: write + issues: write + packages: write + pages: write + pull-requests: write + statuses: write + security-events: read needs: [get-build-matrix, clean-up] if: ${{ needs.get-image-list.outputs.matrix != '' }} strategy: diff --git a/.github/workflows/nightly-docker-build-publish.yml b/.github/workflows/nightly-docker-build-publish.yml index 1d776c8433..729568f373 100644 --- a/.github/workflows/nightly-docker-build-publish.yml +++ b/.github/workflows/nightly-docker-build-publish.yml @@ -3,7 +3,21 @@ name: Nightly build/publish latest docker images permissions: + actions: read + contents: read + checks: read + deployments: read + discussions: read + issues: read + packages: read + pages: read + pull-requests: read + repository-projects: read + statuses: read security-events: read + id-token: write + attestations: read + models: read on: schedule: @@ -77,20 +91,20 @@ jobs: needs: [get-build-matrix] permissions: actions: read - contents: read - checks: read - deployments: read + attestations: read discussions: read - issues: read - packages: read - pages: read - pull-requests: read + models: read repository-projects: read - statuses: read - security-events: read id-token: write - attestations: read - models: read + contents: read + checks: write + deployments: write + issues: write + packages: write + pages: write + pull-requests: write + statuses: write + security-events: read if: ${{ needs.get-build-matrix.outputs.examples_json != '' }} strategy: matrix: diff --git a/.github/workflows/push-image-build.yml b/.github/workflows/push-image-build.yml index 9f551a67bc..7c474ea23f 100644 --- a/.github/workflows/push-image-build.yml +++ b/.github/workflows/push-image-build.yml @@ -40,6 +40,22 @@ jobs: test_mode: "docker_image_build" image-build: + permissions: + actions: read + attestations: read + discussions: read + models: read + repository-projects: read + id-token: write + contents: read + checks: write + deployments: write + issues: write + packages: write + pages: write + pull-requests: write + statuses: write + security-events: read needs: job1 if: ${{ needs.job1.outputs.run_matrix != '{"include":[]}' }} strategy: diff --git a/.github/workflows/weekly-example-test.yml b/.github/workflows/weekly-example-test.yml index c3b9c000b6..7dcb609160 100644 --- a/.github/workflows/weekly-example-test.yml +++ b/.github/workflows/weekly-example-test.yml @@ -52,16 +52,16 @@ jobs: id-token: write actions: read attestations: read - checks: read - deployments: read + checks: write + deployments: write discussions: read - issues: read + issues: write models: read - packages: read - pages: read - pull-requests: read + packages: write + pages: write + pull-requests: write repository-projects: read - statuses: read + statuses: write security-events: read needs: [get-test-matrix, build-comps-base] strategy: From bc91ee429c893d26a7fbed8a0aed09a0f7adc3cc Mon Sep 17 00:00:00 2001 From: Mustafa <109312699+MSCetin37@users.noreply.github.com> Date: Mon, 2 Jun 2025 23:06:32 -0700 Subject: [PATCH 082/217] Refine the README, folder/file hierarchy and test file for FinanceAgent (#1996) Signed-off-by: Mustafa Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- FinanceAgent/README.md | 180 +++----------- .../docker_compose/intel/hpu/gaudi/README.md | 205 ++++++++++++++++ .../intel/hpu/gaudi/compose.yaml | 230 ++++++++++++------ .../intel/hpu/gaudi/dataprep_compose.yaml | 82 ------- .../intel/hpu/gaudi/launch_agents.sh | 36 --- .../intel/hpu/gaudi/launch_dataprep.sh | 15 -- .../intel/hpu/gaudi/launch_vllm.sh | 7 - .../intel/hpu/gaudi/vllm_compose.yaml | 35 --- FinanceAgent/docker_compose/intel/set_env.sh | 89 +++++++ FinanceAgent/tests/test_compose_on_gaudi.sh | 140 ++++++----- 10 files changed, 559 insertions(+), 460 deletions(-) create mode 100644 FinanceAgent/docker_compose/intel/hpu/gaudi/README.md delete mode 100644 FinanceAgent/docker_compose/intel/hpu/gaudi/dataprep_compose.yaml delete mode 100644 FinanceAgent/docker_compose/intel/hpu/gaudi/launch_agents.sh delete mode 100644 FinanceAgent/docker_compose/intel/hpu/gaudi/launch_dataprep.sh delete mode 100644 FinanceAgent/docker_compose/intel/hpu/gaudi/launch_vllm.sh delete mode 100644 FinanceAgent/docker_compose/intel/hpu/gaudi/vllm_compose.yaml create mode 100644 FinanceAgent/docker_compose/intel/set_env.sh diff --git a/FinanceAgent/README.md b/FinanceAgent/README.md index 64ce01cc0a..640f7113d0 100644 --- a/FinanceAgent/README.md +++ b/FinanceAgent/README.md @@ -1,6 +1,26 @@ -# Finance Agent +# Finance Agent Example -## 1. Overview +## Table of Contents + +- [Overview](#overview) +- [Problem Motivation](#problem-motivation) +- [Architecture](#architecture) + - [High-Level Diagram](#high-level-diagram) + - [OPEA Microservices Diagram for Data Handling](#opea-microservices-diagram-for-data-handling) +- [Deployment Options](#deployment-options) +- [Contribution](#contribution) + +## Overview + +The Finance Agent exemplifies a hierarchical multi-agent system designed to streamline financial document processing and analysis for users. It offers three core functionalities: summarizing lengthy financial documents, answering queries related to these documents, and conducting research to generate investment reports on public companies. + +Navigating and analyzing extensive financial documents can be both challenging and time-consuming. Users often need concise summaries, answers to specific queries, or comprehensive investment reports. The Finance Agent effectively addresses these needs by automating document summarization, query answering, and research tasks, thereby enhancing productivity and decision-making efficiency. + +Users interact with the system through a graphical user interface (UI), where a supervisor agent manages requests by delegating tasks to worker agents or the summarization microservice. The system also supports document uploads via the UI for processing. + +## Architecture + +### High-Level Diagram The architecture of this Finance Agent example is shown in the figure below. The agent is a hierarchical multi-agent system and has 3 main functions: @@ -12,6 +32,8 @@ The user interacts with the supervisor agent through the graphical UI. The super ![Finance Agent Architecture](assets/finance_agent_arch.png) +### OPEA Microservices Diagram for Data Handling + The architectural diagram of the `dataprep` microservice is shown below. We use [docling](https://github.com/docling-project/docling) to extract text from PDFs and URLs into markdown format. Both the full document content and tables are extracted. We then use an LLM to extract metadata from the document, including the company name, year, quarter, document type, and document title. The full document markdown then gets chunked, and LLM is used to summarize each chunk, and the summaries are embedded and saved to a vector database. Each table is also summarized by LLM and the summaries are embedded and saved to the vector database. The chunks and tables are also saved into a KV store. The pipeline is designed as such to improve retrieval accuracy of the `search_knowledge_base` tool used by the Question Answering worker agent. ![dataprep architecture](assets/fin_agent_dataprep.png) @@ -30,154 +52,16 @@ The Question Answering worker agent uses `search_knowledge_base` tool to get rel ![finqa search tool arch](assets/finqa_tool.png) -## 2. Getting started - -### 2.1 Download repos - -```bash -mkdir /path/to/your/workspace/ -export WORKDIR=/path/to/your/workspace/ -cd $WORKDIR -git clone https://github.com/opea-project/GenAIExamples.git -``` - -### 2.2 Set up env vars - -```bash -export ip_address="External_Public_IP" -export no_proxy=${your_no_proxy},${ip_address} -export HF_CACHE_DIR=/path/to/your/model/cache/ -export HF_TOKEN= -export FINNHUB_API_KEY= # go to https://finnhub.io/ to get your free api key -export FINANCIAL_DATASETS_API_KEY= # go to https://docs.financialdatasets.ai/ to get your free api key -``` - -### 2.3 [Optional] Build docker images - -Only needed when docker pull failed. - -```bash -cd $WORKDIR/GenAIExamples/FinanceAgent/docker_image_build -# get GenAIComps repo -git clone https://github.com/opea-project/GenAIComps.git -# build the images -docker compose -f build.yaml build --no-cache -``` - -If deploy on Gaudi, also need to build vllm image. - -```bash -cd $WORKDIR -git clone https://github.com/HabanaAI/vllm-fork.git -# get the latest release tag of vllm gaudi -cd vllm-fork -VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)") -echo "Check out vLLM tag ${VLLM_VER}" -git checkout ${VLLM_VER} -docker build --no-cache -f Dockerfile.hpu -t opea/vllm-gaudi:latest --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -``` - -## 3. Deploy with docker compose - -### 3.1 Launch vllm endpoint - -Below is the command to launch a vllm endpoint on Gaudi that serves `meta-llama/Llama-3.3-70B-Instruct` model on 4 Gaudi cards. - -```bash -cd $WORKDIR/GenAIExamples/FinanceAgent/docker_compose/intel/hpu/gaudi -bash launch_vllm.sh -``` - -### 3.2 Prepare knowledge base - -The commands below will upload some example files into the knowledge base. You can also upload files through UI. - -First, launch the redis databases and the dataprep microservice. - -```bash -# inside $WORKDIR/GenAIExamples/FinanceAgent/docker_compose/intel/hpu/gaudi/ -bash launch_dataprep.sh -``` - -Validate datat ingest data and retrieval from database: - -```bash -python $WORKDIR/GenAIExamples/FinanceAgent/tests/test_redis_finance.py --port 6007 --test_option ingest -python $WORKDIR/GenAIExamples/FinanceAgent/tests/test_redis_finance.py --port 6007 --test_option get -``` - -### 3.3 Launch the multi-agent system - -The command below will launch 3 agent microservices, 1 docsum microservice, 1 UI microservice. - -```bash -# inside $WORKDIR/GenAIExamples/FinanceAgent/docker_compose/intel/hpu/gaudi/ -bash launch_agents.sh -``` - -### 3.4 Validate agents - -FinQA Agent: - -```bash -export agent_port="9095" -prompt="What is Gap's revenue in 2024?" -python3 $WORKDIR/GenAIExamples/FinanceAgent/tests/test.py --prompt "$prompt" --agent_role "worker" --ext_port $agent_port -``` - -Research Agent: - -```bash -export agent_port="9096" -prompt="generate NVDA financial research report" -python3 $WORKDIR/GenAIExamples/FinanceAgent/tests/test.py --prompt "$prompt" --agent_role "worker" --ext_port $agent_port --tool_choice "get_current_date" --tool_choice "get_share_performance" -``` - -Supervisor Agent single turns: - -```bash -export agent_port="9090" -python3 $WORKDIR/GenAIExamples/FinanceAgent/tests/test.py --agent_role "supervisor" --ext_port $agent_port --stream -``` - -Supervisor Agent multi turn: - -```bash -python3 $WORKDIR/GenAIExamples/FinanceAgent/tests/test.py --agent_role "supervisor" --ext_port $agent_port --multi-turn --stream - -``` - -## How to interact with the agent system with UI - -The UI microservice is launched in the previous step with the other microservices. -To see the UI, open a web browser to `http://${ip_address}:5175` to access the UI. Note the `ip_address` here is the host IP of the UI microservice. - -1. Create Admin Account with a random value - -2. Enter the endpoints in the `Connections` settings - - First, click on the user icon in the upper right corner to open `Settings`. Click on `Admin Settings`. Click on `Connections`. - - Then, enter the supervisor agent endpoint in the `OpenAI API` section: `http://${ip_address}:9090/v1`. Enter the API key as "empty". Add an arbitrary model id in `Model IDs`, for example, "opea_agent". The `ip_address` here should be the host ip of the agent microservice. - - Then, enter the dataprep endpoint in the `Icloud File API` section. You first need to enable `Icloud File API` by clicking on the button on the right to turn it into green and then enter the endpoint url, for example, `http://${ip_address}:6007/v1`. The `ip_address` here should be the host ip of the dataprep microservice. - - You should see screen like the screenshot below when the settings are done. - -![opea-agent-setting](assets/ui_connections_settings.png) - -3. Upload documents with UI - - Click on the `Workplace` icon in the top left corner. Click `Knowledge`. Click on the "+" sign to the right of `Icloud Knowledge`. You can paste an url in the left hand side of the pop-up window, or upload a local file by click on the cloud icon on the right hand side of the pop-up window. Then click on the `Upload Confirm` button. Wait till the processing is done and the pop-up window will be closed on its own when the data ingestion is done. See the screenshot below. - - Note: the data ingestion may take a few minutes depending on the length of the document. Please wait patiently and do not close the pop-up window. +## Deployment Options -![upload-doc-ui](assets/upload_doc_ui.png) +This Finance Agent example can be deployed manually on Docker Compose. -4. Test agent with UI +| Hardware | Deployment Mode | Guide Link | +| :----------------------------- | :------------------- | :----------------------------------------------------------------------- | +| Intel® Gaudi® AI Accelerator | Single Node (Docker) | [Gaudi Docker Compose Guide](./docker_compose/intel/hpu/gaudi/README.md) | - After the settings are done and documents are ingested, you can start to ask questions to the agent. Click on the `New Chat` icon in the top left corner, and type in your questions in the text box in the middle of the UI. +_Note: Building custom microservice images can be done using the resources in [GenAIComps](https://github.com/opea-project/GenAIComps)._ - The UI will stream the agent's response tokens. You need to expand the `Thinking` tab to see the agent's reasoning process. After the agent made tool calls, you would also see the tool output after the tool returns output to the agent. Note: it may take a while to get the tool output back if the tool execution takes time. +## Contribution -![opea-agent-test](assets/opea-agent-test.png) +We welcome contributions to the OPEA project. Please refer to the [contribution guidelines](https://github.com/opea-project/docs/blob/main/community/CONTRIBUTING.md) for more information. diff --git a/FinanceAgent/docker_compose/intel/hpu/gaudi/README.md b/FinanceAgent/docker_compose/intel/hpu/gaudi/README.md new file mode 100644 index 0000000000..79f0a9dec9 --- /dev/null +++ b/FinanceAgent/docker_compose/intel/hpu/gaudi/README.md @@ -0,0 +1,205 @@ +# Deploy Finance Agent on Intel® Gaudi® AI Accelerator with Docker Compose + +This README provides instructions for deploying the Finance Agent application using Docker Compose on systems equipped with Intel® Gaudi® AI Accelerators. + +## Table of Contents + +- [Overview](#overview) +- [Prerequisites](#prerequisites) +- [Start Deployment](#start-deployment) +- [Validate Services](#validate-services) +- [Accessing the User Interface (UI)](#accessing-the-user-interface-ui) + +## Overview + +This guide focuses on running the pre-configured Finance Agent service using Docker Compose on Intel® Gaudi® AI Accelerators. It leverages containers optimized for Gaudi for the LLM serving component, along with CPU-based containers for other microservices like embedding, retrieval, data preparation and the UI. + +## Prerequisites + +- Docker and Docker Compose installed. +- Intel® Gaudi® AI Accelerator(s) with the necessary drivers and software stack installed on the host system. (Refer to Intel Gaudi Documentation). +- Git installed (for cloning repository). +- Hugging Face Hub API Token (for downloading models). +- Access to the internet (or a private model cache). +- Finnhub API Key. Go to https://docs.financialdatasets.ai/ to get your free api key +- Financial Datgasets API Key. Go to https://docs.financialdatasets.ai/ to get your free api key + +Clone the GenAIExamples repository: + +```shell +mkdir /path/to/your/workspace/ +export WORKDIR=/path/to/your/workspace/ +cd $WORKDIR +git clone https://github.com/opea-project/GenAIExamples.git +cd GenAIExamples/FinanceAgent/docker_compose/intel/hpu/gaudi +``` + +## Start Deployment + +This uses the default vLLM-based deployment profile (vllm-gaudi-server). + +### Configure Environment + +Set required environment variables in your shell: + +```shell +# Path to your model cache +export HF_CACHE_DIR="./data" +# Some models from Hugging Face require approval beforehand. Ensure you have the necessary permissions to access them. +export HF_TOKEN="your_huggingface_token" +export FINNHUB_API_KEY="your-finnhub-api-key" +export FINANCIAL_DATASETS_API_KEY="your-financial-datgasets-api-key" + +# Optional: Configure HOST_IP if needed +# Replace with your host's external IP address (do not use localhost or 127.0.0.1). +# export HOST_IP=$(hostname -I | awk '{print $1}') + +# Optional: Configure proxy if needed +# export HTTP_PROXY="${http_proxy}" +# export HTTPS_PROXY="${https_proxy}" +# export NO_PROXY="${NO_PROXY},${HOST_IP}" + +source ../../set_env.sh +``` + +Note: The compose file might read additional variables from set_env.sh. Ensure all required variables like ports (LLM_SERVICE_PORT, TEI_EMBEDDER_PORT, etc.) are set if not using defaults from the compose file. For instance, edit the set_env.sh to change the LLM model: + +### Start Services + +#### Deploy with Docker Compose + +Below is the command to launch services + +- vllm-gaudi-server +- tei-embedding-serving +- redis-vector-db +- redis-kv-store +- dataprep-redis-server-finance +- finqa-agent-endpoint +- research-agent-endpoint +- docsum-vllm-gaudi +- supervisor-agent-endpoint +- agent-ui + +```shell +docker compose -f compose.yaml up -d +``` + +#### [Optional] Build docker images + +This is only needed if the Docker image is unavailable or the pull operation fails. + +```bash +cd $WORKDIR/GenAIExamples/FinanceAgent/docker_image_build +# get GenAIComps repo +git clone https://github.com/opea-project/GenAIComps.git +# build the images +docker compose -f build.yaml build --no-cache +``` + +If deploy on Gaudi, also need to build vllm image. + +```bash +cd $WORKDIR +git clone https://github.com/HabanaAI/vllm-fork.git +# get the latest release tag of vllm gaudi +cd vllm-fork +VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)") +echo "Check out vLLM tag ${VLLM_VER}" +git checkout ${VLLM_VER} +docker build --no-cache -f Dockerfile.hpu -t opea/vllm-gaudi:latest --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy +``` + +## Validate Services + +Wait several minutes for models to download and services to initialize (Gaudi initialization can take time). Check container logs (docker compose logs -f , especially vllm-gaudi-server). + +```bash +docker logs --tail 2000 -f vllm-gaudi-server +``` + +> Below is the expected output of the `vllm-gaudi-server` service. + +``` + INFO: Started server process [1] + INFO: Waiting for application startup. + INFO: Application startup complete. + INFO: Uvicorn running on http://0.0.0.0:8000 (Press CTRL+C to quit) + INFO: : - "GET /health HTTP/1.1" 200 OK + +``` + +### Validate Data Services + +Ingest data and retrieval from database + +```bash +python $WORKDIR/GenAIExamples/FinanceAgent/tests/test_redis_finance.py --port 6007 --test_option ingest +python $WORKDIR/GenAIExamples/FinanceAgent/tests/test_redis_finance.py --port 6007 --test_option get +``` + +### Validate Agents + +FinQA Agent: + +```bash +export agent_port="9095" +prompt="What is Gap's revenue in 2024?" +python3 $WORKDIR/GenAIExamples/FinanceAgent/tests/test.py --prompt "$prompt" --agent_role "worker" --ext_port $agent_port +``` + +Research Agent: + +```bash +export agent_port="9096" +prompt="generate NVDA financial research report" +python3 $WORKDIR/GenAIExamples/FinanceAgent/tests/test.py --prompt "$prompt" --agent_role "worker" --ext_port $agent_port --tool_choice "get_current_date" --tool_choice "get_share_performance" +``` + +Supervisor Agent single turns: + +```bash +export agent_port="9090" +python3 $WORKDIR/GenAIExamples/FinanceAgent/tests/test.py --agent_role "supervisor" --ext_port $agent_port --stream +``` + +Supervisor Agent multi turn: + +```bash +python3 $WORKDIR/GenAIExamples/FinanceAgent/tests/test.py --agent_role "supervisor" --ext_port $agent_port --multi-turn --stream +``` + +## Accessing the User Interface (UI) + +The UI microservice is launched in the previous step with the other microservices. +To see the UI, open a web browser to `http://${HOST_IP}:5175` to access the UI. Note the `HOST_IP` here is the host IP of the UI microservice. + +1. Create Admin Account with a random value + +2. Enter the endpoints in the `Connections` settings + + First, click on the user icon in the upper right corner to open `Settings`. Click on `Admin Settings`. Click on `Connections`. + + Then, enter the supervisor agent endpoint in the `OpenAI API` section: `http://${HOST_IP}:9090/v1`. Enter the API key as "empty". Add an arbitrary model id in `Model IDs`, for example, "opea_agent". The `HOST_IP` here should be the host ip of the agent microservice. + + Then, enter the dataprep endpoint in the `Icloud File API` section. You first need to enable `Icloud File API` by clicking on the button on the right to turn it into green and then enter the endpoint url, for example, `http://${HOST_IP}:6007/v1`. The `HOST_IP` here should be the host ip of the dataprep microservice. + + You should see screen like the screenshot below when the settings are done. + +![opea-agent-setting](../../../../assets/ui_connections_settings.png) + +3. Upload documents with UI + + Click on the `Workplace` icon in the top left corner. Click `Knowledge`. Click on the "+" sign to the right of `iCloud Knowledge`. You can paste an url in the left hand side of the pop-up window, or upload a local file by click on the cloud icon on the right hand side of the pop-up window. Then click on the `Upload Confirm` button. Wait till the processing is done and the pop-up window will be closed on its own when the data ingestion is done. See the screenshot below. + Then, enter the dataprep endpoint in the `iCloud File API` section. You first need to enable `iCloud File API` by clicking on the button on the right to turn it into green and then enter the endpoint url, for example, `http://${HOST_IP}:6007/v1`. The `HOST_IP` here should be the host ip of the dataprep microservice. + Note: the data ingestion may take a few minutes depending on the length of the document. Please wait patiently and do not close the pop-up window. + +![upload-doc-ui](../../../../assets/upload_doc_ui.png) + +4. Test agent with UI + + After the settings are done and documents are ingested, you can start to ask questions to the agent. Click on the `New Chat` icon in the top left corner, and type in your questions in the text box in the middle of the UI. + + The UI will stream the agent's response tokens. You need to expand the `Thinking` tab to see the agent's reasoning process. After the agent made tool calls, you would also see the tool output after the tool returns output to the agent. Note: it may take a while to get the tool output back if the tool execution takes time. + +![opea-agent-test](../../../../assets/opea-agent-test.png) diff --git a/FinanceAgent/docker_compose/intel/hpu/gaudi/compose.yaml b/FinanceAgent/docker_compose/intel/hpu/gaudi/compose.yaml index 997aade843..e788c5899a 100644 --- a/FinanceAgent/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/FinanceAgent/docker_compose/intel/hpu/gaudi/compose.yaml @@ -1,37 +1,146 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 + +x-common-environment: + &common-env + no_proxy: ${NO_PROXY} + http_proxy: ${HTTP_PROXY} + https_proxy: ${HTTPS_PROXY} + +x-common-agent-environment: + &common-agent-env + <<: *common-env + HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + llm_endpoint_url: ${LLM_ENDPOINT} + model: ${LLM_MODEL_ID} + REDIS_URL_VECTOR: ${REDIS_URL_VECTOR} + REDIS_URL_KV: ${REDIS_URL_KV} + TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} + ip_address: ${HOST_IP} + strategy: react_llama + require_human_feedback: false + services: + + vllm-service: + image: ${REGISTRY:-opea}/vllm-gaudi:${TAG:-latest} + container_name: vllm-gaudi-server + ports: + - "8086:8000" + volumes: + - ${HF_CACHE_DIR:-./data}:/data + environment: + <<: *common-env + HF_TOKEN: ${HF_TOKEN} + HUGGING_FACE_HUB_TOKEN: ${HF_TOKEN} + HF_HOME: ./data + HABANA_VISIBLE_DEVICES: all + OMPI_MCA_btl_vader_single_copy_mechanism: none + LLM_MODEL_ID: ${LLM_MODEL_ID} + VLLM_TORCH_PROFILER_DIR: "/mnt" + VLLM_SKIP_WARMUP: true + PT_HPU_ENABLE_LAZY_COLLECTIVES: true + healthcheck: + test: ["CMD-SHELL", "curl -f http://$HOST_IP:8086/health || exit 1"] + interval: 10s + timeout: 10s + retries: 100 + runtime: habana + cap_add: + - SYS_NICE + ipc: host + command: --model ${LLM_MODEL_ID} --tensor-parallel-size ${NUM_CARDS} --host 0.0.0.0 --port 8000 --max-seq-len-to-capture $MAX_LEN + + tei-embedding-serving: + image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 + container_name: tei-embedding-serving + entrypoint: /bin/sh -c "apt-get update && apt-get install -y curl && text-embeddings-router --json-output --model-id ${EMBEDDING_MODEL_ID} --auto-truncate" + ports: + - "${TEI_EMBEDDER_PORT:-10221}:80" + volumes: + - ${HF_CACHE_DIR:-./data}:/data + shm_size: 1g + environment: + <<: *common-env + HF_TOKEN: ${HF_TOKEN} + host_ip: ${HOST_IP} + healthcheck: + test: ["CMD", "curl", "-f", "http://${HOST_IP}:${TEI_EMBEDDER_PORT}/health"] + interval: 10s + timeout: 6s + retries: 48 + + redis-vector-db: + image: redis/redis-stack:7.2.0-v9 + container_name: redis-vector-db + ports: + - "${REDIS_PORT1:-6379}:6379" + - "${REDIS_PORT2:-8001}:8001" + environment: + <<: *common-env + healthcheck: + test: ["CMD", "redis-cli", "ping"] + timeout: 10s + retries: 3 + start_period: 10s + + redis-kv-store: + image: redis/redis-stack:7.2.0-v9 + container_name: redis-kv-store + ports: + - "${REDIS_PORT3:-6380}:6379" + - "${REDIS_PORT4:-8002}:8001" + environment: + <<: *common-env + healthcheck: + test: ["CMD", "redis-cli", "ping"] + timeout: 10s + retries: 3 + start_period: 10s + + dataprep-redis-finance: + image: ${REGISTRY:-opea}/dataprep:${TAG:-latest} + container_name: dataprep-redis-server-finance + depends_on: + redis-vector-db: + condition: service_healthy + redis-kv-store: + condition: service_healthy + tei-embedding-serving: + condition: service_healthy + ports: + - "${DATAPREP_PORT:-6007}:5000" + environment: + <<: *common-env + DATAPREP_COMPONENT_NAME: ${DATAPREP_COMPONENT_NAME} + REDIS_URL_VECTOR: ${REDIS_URL_VECTOR} + REDIS_URL_KV: ${REDIS_URL_KV} + TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} + LLM_ENDPOINT: ${LLM_ENDPOINT} + LLM_MODEL: ${LLM_MODEL_ID} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} + LOGFLAG: true + worker-finqa-agent: image: opea/agent:latest container_name: finqa-agent-endpoint volumes: - ${TOOLSET_PATH}:/home/user/tools/ - ${PROMPT_PATH}:/home/user/prompts/ + ipc: host ports: - "9095:9095" - ipc: host environment: - ip_address: ${ip_address} - strategy: react_llama + <<: *common-agent-env with_memory: false - recursion_limit: ${recursion_limit_worker} - llm_engine: vllm - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} - llm_endpoint_url: ${LLM_ENDPOINT_URL} - model: ${LLM_MODEL_ID} + recursion_limit: ${RECURSION_LIMIT_WORKER} temperature: ${TEMPERATURE} max_new_tokens: ${MAX_TOKENS} stream: false tools: /home/user/tools/finqa_agent_tools.yaml custom_prompt: /home/user/prompts/finqa_prompt.py - require_human_feedback: false - no_proxy: ${no_proxy} - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} - REDIS_URL_VECTOR: $REDIS_URL_VECTOR - REDIS_URL_KV: $REDIS_URL_KV - TEI_EMBEDDING_ENDPOINT: $TEI_EMBEDDING_ENDPOINT port: 9095 worker-research-agent: @@ -40,67 +149,20 @@ services: volumes: - ${TOOLSET_PATH}:/home/user/tools/ - ${PROMPT_PATH}:/home/user/prompts/ + ipc: host ports: - "9096:9096" - ipc: host environment: - ip_address: ${ip_address} - strategy: react_llama + <<: *common-agent-env with_memory: false - recursion_limit: 25 - llm_engine: vllm - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} - llm_endpoint_url: ${LLM_ENDPOINT_URL} - model: ${LLM_MODEL_ID} + recursion_limit: ${RECURSION_LIMIT_WORKER} stream: false tools: /home/user/tools/research_agent_tools.yaml custom_prompt: /home/user/prompts/research_prompt.py - require_human_feedback: false - no_proxy: ${no_proxy} - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} FINNHUB_API_KEY: ${FINNHUB_API_KEY} FINANCIAL_DATASETS_API_KEY: ${FINANCIAL_DATASETS_API_KEY} port: 9096 - supervisor-react-agent: - image: opea/agent:latest - container_name: supervisor-agent-endpoint - depends_on: - - worker-finqa-agent - - worker-research-agent - volumes: - - ${TOOLSET_PATH}:/home/user/tools/ - - ${PROMPT_PATH}:/home/user/prompts/ - ports: - - "9090:9090" - ipc: host - environment: - ip_address: ${ip_address} - strategy: react_llama - with_memory: true - recursion_limit: ${recursion_limit_supervisor} - llm_engine: vllm - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} - llm_endpoint_url: ${LLM_ENDPOINT_URL} - model: ${LLM_MODEL_ID} - temperature: ${TEMPERATURE} - max_new_tokens: ${MAX_TOKENS} - stream: true - tools: /home/user/tools/supervisor_agent_tools.yaml - custom_prompt: /home/user/prompts/supervisor_prompt.py - require_human_feedback: false - no_proxy: ${no_proxy} - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} - WORKER_FINQA_AGENT_URL: $WORKER_FINQA_AGENT_URL - WORKER_RESEARCH_AGENT_URL: $WORKER_RESEARCH_AGENT_URL - DOCSUM_ENDPOINT: $DOCSUM_ENDPOINT - REDIS_URL_VECTOR: $REDIS_URL_VECTOR - REDIS_URL_KV: $REDIS_URL_KV - TEI_EMBEDDING_ENDPOINT: $TEI_EMBEDDING_ENDPOINT - port: 9090 - docsum-vllm-gaudi: image: opea/llm-docsum:latest container_name: docsum-vllm-gaudi @@ -108,26 +170,48 @@ services: - ${DOCSUM_PORT:-9000}:9000 ipc: host environment: - no_proxy: ${no_proxy} - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} + <<: *common-env LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} HF_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG:-False} MAX_INPUT_TOKENS: ${MAX_INPUT_TOKENS} MAX_TOTAL_TOKENS: ${MAX_TOTAL_TOKENS} - DocSum_COMPONENT_NAME: ${DocSum_COMPONENT_NAME:-OpeaDocSumvLLM} + DocSum_COMPONENT_NAME: ${DOCSUM_COMPONENT_NAME:-OpeaDocSumvLLM} restart: unless-stopped + supervisor-react-agent: + image: opea/agent:latest + container_name: supervisor-agent-endpoint + volumes: + - ${TOOLSET_PATH}:/home/user/tools/ + - ${PROMPT_PATH}:/home/user/prompts/ + ipc: host + depends_on: + - worker-finqa-agent + - worker-research-agent + ports: + - "9090:9090" + environment: + <<: *common-agent-env + with_memory: "true" + recursion_limit: ${RECURSION_LIMIT_SUPERVISOR} + temperature: ${TEMPERATURE} + max_new_tokens: ${MAX_TOKENS} + stream: "true" + tools: /home/user/tools/supervisor_agent_tools.yaml + custom_prompt: /home/user/prompts/supervisor_prompt.py + WORKER_FINQA_AGENT_URL: ${WORKER_FINQA_AGENT_URL} + WORKER_RESEARCH_AGENT_URL: ${WORKER_RESEARCH_AGENT_URL} + DOCSUM_ENDPOINT: ${DOCSUM_ENDPOINT} + port: 9090 + agent-ui: image: opea/agent-ui:latest container_name: agent-ui environment: - host_ip: ${host_ip} - no_proxy: ${no_proxy} - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} + <<: *common-env + host_ip: ${HOST_IP} ports: - "5175:8080" ipc: host diff --git a/FinanceAgent/docker_compose/intel/hpu/gaudi/dataprep_compose.yaml b/FinanceAgent/docker_compose/intel/hpu/gaudi/dataprep_compose.yaml deleted file mode 100644 index 5e4333c7d2..0000000000 --- a/FinanceAgent/docker_compose/intel/hpu/gaudi/dataprep_compose.yaml +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright (C) 2025 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -services: - tei-embedding-serving: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 - container_name: tei-embedding-serving - entrypoint: /bin/sh -c "apt-get update && apt-get install -y curl && text-embeddings-router --json-output --model-id ${EMBEDDING_MODEL_ID} --auto-truncate" - ports: - - "${TEI_EMBEDDER_PORT:-10221}:80" - volumes: - - "./data:/data" - shm_size: 1g - environment: - no_proxy: ${no_proxy} - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} - host_ip: ${host_ip} - HF_TOKEN: ${HF_TOKEN} - healthcheck: - test: ["CMD", "curl", "-f", "http://${host_ip}:${TEI_EMBEDDER_PORT}/health"] - interval: 10s - timeout: 6s - retries: 48 - - redis-vector-db: - image: redis/redis-stack:7.2.0-v9 - container_name: redis-vector-db - ports: - - "${REDIS_PORT1:-6379}:6379" - - "${REDIS_PORT2:-8001}:8001" - environment: - - no_proxy=${no_proxy} - - http_proxy=${http_proxy} - - https_proxy=${https_proxy} - healthcheck: - test: ["CMD", "redis-cli", "ping"] - timeout: 10s - retries: 3 - start_period: 10s - - redis-kv-store: - image: redis/redis-stack:7.2.0-v9 - container_name: redis-kv-store - ports: - - "${REDIS_PORT3:-6380}:6379" - - "${REDIS_PORT4:-8002}:8001" - environment: - - no_proxy=${no_proxy} - - http_proxy=${http_proxy} - - https_proxy=${https_proxy} - healthcheck: - test: ["CMD", "redis-cli", "ping"] - timeout: 10s - retries: 3 - start_period: 10s - - dataprep-redis-finance: - image: ${REGISTRY:-opea}/dataprep:${TAG:-latest} - container_name: dataprep-redis-server-finance - depends_on: - redis-vector-db: - condition: service_healthy - redis-kv-store: - condition: service_healthy - tei-embedding-serving: - condition: service_healthy - ports: - - "${DATAPREP_PORT:-6007}:5000" - environment: - no_proxy: ${no_proxy} - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} - DATAPREP_COMPONENT_NAME: ${DATAPREP_COMPONENT_NAME} - REDIS_URL_VECTOR: ${REDIS_URL_VECTOR} - REDIS_URL_KV: ${REDIS_URL_KV} - TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - LLM_ENDPOINT: ${LLM_ENDPOINT} - LLM_MODEL: ${LLM_MODEL} - HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} - HF_TOKEN: ${HF_TOKEN} - LOGFLAG: true diff --git a/FinanceAgent/docker_compose/intel/hpu/gaudi/launch_agents.sh b/FinanceAgent/docker_compose/intel/hpu/gaudi/launch_agents.sh deleted file mode 100644 index 55dcbb7d3d..0000000000 --- a/FinanceAgent/docker_compose/intel/hpu/gaudi/launch_agents.sh +++ /dev/null @@ -1,36 +0,0 @@ - -# Copyright (C) 2025 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -export ip_address=$(hostname -I | awk '{print $1}') -export HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} -export TOOLSET_PATH=$WORKDIR/GenAIExamples/FinanceAgent/tools/ -echo "TOOLSET_PATH=${TOOLSET_PATH}" -export PROMPT_PATH=$WORKDIR/GenAIExamples/FinanceAgent/prompts/ -echo "PROMPT_PATH=${PROMPT_PATH}" -export recursion_limit_worker=12 -export recursion_limit_supervisor=10 - -vllm_port=8086 -export LLM_MODEL_ID="meta-llama/Llama-3.3-70B-Instruct" -export LLM_ENDPOINT_URL="http://${ip_address}:${vllm_port}" -export TEMPERATURE=0.5 -export MAX_TOKENS=4096 - -export WORKER_FINQA_AGENT_URL="http://${ip_address}:9095/v1/chat/completions" -export WORKER_RESEARCH_AGENT_URL="http://${ip_address}:9096/v1/chat/completions" - -export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" -export TEI_EMBEDDING_ENDPOINT="http://${ip_address}:10221" -export REDIS_URL_VECTOR="redis://${ip_address}:6379" -export REDIS_URL_KV="redis://${ip_address}:6380" - -export MAX_INPUT_TOKENS=2048 -export MAX_TOTAL_TOKENS=4096 -export DocSum_COMPONENT_NAME="OpeaDocSumvLLM" -export DOCSUM_ENDPOINT="http://${ip_address}:9000/v1/docsum" - -export FINNHUB_API_KEY=${FINNHUB_API_KEY} -export FINANCIAL_DATASETS_API_KEY=${FINANCIAL_DATASETS_API_KEY} - -docker compose -f compose.yaml up -d diff --git a/FinanceAgent/docker_compose/intel/hpu/gaudi/launch_dataprep.sh b/FinanceAgent/docker_compose/intel/hpu/gaudi/launch_dataprep.sh deleted file mode 100644 index 9bb006c191..0000000000 --- a/FinanceAgent/docker_compose/intel/hpu/gaudi/launch_dataprep.sh +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (C) 2025 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -export host_ip=${ip_address} -export DATAPREP_PORT="6007" -export TEI_EMBEDDER_PORT="10221" -export REDIS_URL_VECTOR="redis://${ip_address}:6379" -export REDIS_URL_KV="redis://${ip_address}:6380" -export LLM_MODEL=$model -export LLM_ENDPOINT="http://${ip_address}:${vllm_port}" -export DATAPREP_COMPONENT_NAME="OPEA_DATAPREP_REDIS_FINANCE" -export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" -export TEI_EMBEDDING_ENDPOINT="http://${ip_address}:${TEI_EMBEDDER_PORT}" - -docker compose -f dataprep_compose.yaml up -d diff --git a/FinanceAgent/docker_compose/intel/hpu/gaudi/launch_vllm.sh b/FinanceAgent/docker_compose/intel/hpu/gaudi/launch_vllm.sh deleted file mode 100644 index 5d8d58641b..0000000000 --- a/FinanceAgent/docker_compose/intel/hpu/gaudi/launch_vllm.sh +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (C) 2025 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -export LLM_MODEL_ID="meta-llama/Llama-3.3-70B-Instruct" -export MAX_LEN=16384 - -docker compose -f vllm_compose.yaml up -d diff --git a/FinanceAgent/docker_compose/intel/hpu/gaudi/vllm_compose.yaml b/FinanceAgent/docker_compose/intel/hpu/gaudi/vllm_compose.yaml deleted file mode 100644 index 8ca62e1e46..0000000000 --- a/FinanceAgent/docker_compose/intel/hpu/gaudi/vllm_compose.yaml +++ /dev/null @@ -1,35 +0,0 @@ - -# Copyright (C) 2025 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -services: - vllm-service: - image: ${REGISTRY:-opea}/vllm-gaudi:${TAG:-latest} - container_name: vllm-gaudi-server - ports: - - "8086:8000" - volumes: - - ${HF_CACHE_DIR}:/data - environment: - no_proxy: ${no_proxy} - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} - HF_TOKEN: ${HF_TOKEN} - HUGGING_FACE_HUB_TOKEN: ${HF_TOKEN} - HF_HOME: /data - HABANA_VISIBLE_DEVICES: all - OMPI_MCA_btl_vader_single_copy_mechanism: none - LLM_MODEL_ID: ${LLM_MODEL_ID} - VLLM_TORCH_PROFILER_DIR: "/mnt" - VLLM_SKIP_WARMUP: true - PT_HPU_ENABLE_LAZY_COLLECTIVES: true - healthcheck: - test: ["CMD-SHELL", "curl -f http://$host_ip:8086/health || exit 1"] - interval: 10s - timeout: 10s - retries: 100 - runtime: habana - cap_add: - - SYS_NICE - ipc: host - command: --model $LLM_MODEL_ID --tensor-parallel-size 4 --host 0.0.0.0 --port 8000 --max-seq-len-to-capture $MAX_LEN diff --git a/FinanceAgent/docker_compose/intel/set_env.sh b/FinanceAgent/docker_compose/intel/set_env.sh new file mode 100644 index 0000000000..16893f3ab5 --- /dev/null +++ b/FinanceAgent/docker_compose/intel/set_env.sh @@ -0,0 +1,89 @@ +#!/usr/bin/env bash + +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +# Navigate to the parent directory and source the environment +pushd "../../" > /dev/null +source .set_env.sh +popd > /dev/null + +# Function to check if a variable is set +check_var() { + local var_name="$1" + local var_value="${!var_name}" + if [ -z "${var_value}" ]; then + echo "Error: ${var_name} is not set. Please set ${var_name}." + return 1 # Return an error code but do not exit the script + fi +} + +# Check critical variables +check_var "HF_TOKEN" +check_var "HOST_IP" + +# VLLM configuration +export VLLM_PORT="${VLLM_PORT:-8086}" +export VLLM_VOLUME="${VLLM_VOLUME:-/data2/huggingface}" +export VLLM_IMAGE="${VLLM_IMAGE:-opea/vllm-gaudi:latest}" +export LLM_MODEL_ID="${LLM_MODEL_ID:-meta-llama/Llama-3.3-70B-Instruct}" +export LLM_ENDPOINT="http://${HOST_IP}:${VLLM_PORT}" +export MAX_LEN="${MAX_LEN:-16384}" +export NUM_CARDS="${NUM_CARDS:-4}" +export HF_CACHE_DIR="${HF_CACHE_DIR:-"./data"}" + +# Data preparation and embedding configuration +export DATAPREP_PORT="${DATAPREP_PORT:-6007}" +export TEI_EMBEDDER_PORT="${TEI_EMBEDDER_PORT:-10221}" +export REDIS_URL_VECTOR="redis://${HOST_IP}:6379" +export REDIS_URL_KV="redis://${HOST_IP}:6380" +export DATAPREP_COMPONENT_NAME="${DATAPREP_COMPONENT_NAME:-OPEA_DATAPREP_REDIS_FINANCE}" +export EMBEDDING_MODEL_ID="${EMBEDDING_MODEL_ID:-BAAI/bge-base-en-v1.5}" +export TEI_EMBEDDING_ENDPOINT="http://${HOST_IP}:${TEI_EMBEDDER_PORT}" + +# Hugging Face API token +export HUGGINGFACEHUB_API_TOKEN="${HF_TOKEN}" + +# Recursion limits +export RECURSION_LIMIT_WORKER="${RECURSION_LIMIT_WORKER:-12}" +export RECURSION_LIMIT_SUPERVISOR="${RECURSION_LIMIT_SUPERVISOR:-10}" + +# LLM configuration +export TEMPERATURE="${TEMPERATURE:-0.5}" +export MAX_TOKENS="${MAX_TOKENS:-4096}" +export MAX_INPUT_TOKENS="${MAX_INPUT_TOKENS:-2048}" +export MAX_TOTAL_TOKENS="${MAX_TOTAL_TOKENS:-4096}" + +# Worker URLs +export WORKER_FINQA_AGENT_URL="http://${HOST_IP}:9095/v1/chat/completions" +export WORKER_RESEARCH_AGENT_URL="http://${HOST_IP}:9096/v1/chat/completions" + +# DocSum configuration +export DOCSUM_COMPONENT_NAME="${DOCSUM_COMPONENT_NAME:-"OpeaDocSumvLLM"}" +export DOCSUM_ENDPOINT="http://${HOST_IP}:9000/v1/docsum" + +# API keys +check_var "FINNHUB_API_KEY" +check_var "FINANCIAL_DATASETS_API_KEY" +export FINNHUB_API_KEY="${FINNHUB_API_KEY}" +export FINANCIAL_DATASETS_API_KEY="${FINANCIAL_DATASETS_API_KEY}" + + +# Toolset and prompt paths +if check_var "WORKDIR"; then + export TOOLSET_PATH=$WORKDIR/GenAIExamples/FinanceAgent/tools/ + export PROMPT_PATH=$WORKDIR/GenAIExamples/FinanceAgent/prompts/ + + echo "TOOLSET_PATH=${TOOLSET_PATH}" + echo "PROMPT_PATH=${PROMPT_PATH}" + + # Array of directories to check + REQUIRED_DIRS=("${TOOLSET_PATH}" "${PROMPT_PATH}") + + for dir in "${REQUIRED_DIRS[@]}"; do + if [ ! -d "${dir}" ]; then + echo "Error: Required directory does not exist: ${dir}" + exit 1 + fi + done +fi diff --git a/FinanceAgent/tests/test_compose_on_gaudi.sh b/FinanceAgent/tests/test_compose_on_gaudi.sh index 0f42813978..d534ffa122 100644 --- a/FinanceAgent/tests/test_compose_on_gaudi.sh +++ b/FinanceAgent/tests/test_compose_on_gaudi.sh @@ -6,33 +6,69 @@ set -xe export WORKPATH=$(dirname "$PWD") export WORKDIR=$WORKPATH/../../ echo "WORKDIR=${WORKDIR}" -export ip_address=$(hostname -I | awk '{print $1}') +export IP_ADDRESS=$(hostname -I | awk '{print $1}') +export HOST_IP=${IP_ADDRESS} LOG_PATH=$WORKPATH -#### env vars for LLM endpoint ############# -model=meta-llama/Llama-3.3-70B-Instruct -vllm_image=opea/vllm-gaudi:latest -vllm_port=8086 -vllm_image=$vllm_image -HF_CACHE_DIR=${model_cache:-"/data2/huggingface"} -vllm_volume=${HF_CACHE_DIR} -####################################### +# Proxy settings +export NO_PROXY="${NO_PROXY},${HOST_IP}" +export HTTP_PROXY="${http_proxy}" +export HTTPS_PROXY="${https_proxy}" + +export no_proxy="${no_proxy},${HOST_IP}" +export http_proxy="${http_proxy}" +export https_proxy="${https_proxy}" + +# VLLM configuration +MODEL=meta-llama/Llama-3.3-70B-Instruct +export VLLM_PORT="${VLLM_PORT:-8086}" + +# export HF_CACHE_DIR="${HF_CACHE_DIR:-"./data"}" +export HF_CACHE_DIR=${model_cache:-"./data2/huggingface"} +export VLLM_VOLUME="${HF_CACHE_DIR:-"./data2/huggingface"}" +export VLLM_IMAGE="${VLLM_IMAGE:-opea/vllm-gaudi:latest}" +export LLM_MODEL_ID="${LLM_MODEL_ID:-meta-llama/Llama-3.3-70B-Instruct}" +export LLM_MODEL=$LLM_MODEL_ID +export LLM_ENDPOINT="http://${IP_ADDRESS}:${VLLM_PORT}" +export MAX_LEN="${MAX_LEN:-16384}" +export NUM_CARDS="${NUM_CARDS:-4}" + +# Recursion limits +export RECURSION_LIMIT_WORKER="${RECURSION_LIMIT_WORKER:-12}" +export RECURSION_LIMIT_SUPERVISOR="${RECURSION_LIMIT_SUPERVISOR:-10}" + +# Hugging Face API token +export HUGGINGFACEHUB_API_TOKEN="${HF_TOKEN}" + +# LLM configuration +export TEMPERATURE="${TEMPERATURE:-0.5}" +export MAX_TOKENS="${MAX_TOKENS:-4096}" +export MAX_INPUT_TOKENS="${MAX_INPUT_TOKENS:-2048}" +export MAX_TOTAL_TOKENS="${MAX_TOTAL_TOKENS:-4096}" + +# Worker URLs +export WORKER_FINQA_AGENT_URL="http://${IP_ADDRESS}:9095/v1/chat/completions" +export WORKER_RESEARCH_AGENT_URL="http://${IP_ADDRESS}:9096/v1/chat/completions" + +# DocSum configuration +export DOCSUM_COMPONENT_NAME="${DOCSUM_COMPONENT_NAME:-"OpeaDocSumvLLM"}" +export DOCSUM_ENDPOINT="http://${IP_ADDRESS}:9000/v1/docsum" + +# Toolset and prompt paths +export TOOLSET_PATH=$WORKDIR/GenAIExamples/FinanceAgent/tools/ +export PROMPT_PATH=$WORKDIR/GenAIExamples/FinanceAgent/prompts/ #### env vars for dataprep ############# -export host_ip=${ip_address} export DATAPREP_PORT="6007" export TEI_EMBEDDER_PORT="10221" -export REDIS_URL_VECTOR="redis://${ip_address}:6379" -export REDIS_URL_KV="redis://${ip_address}:6380" -export LLM_MODEL=$model -export LLM_ENDPOINT="http://${ip_address}:${vllm_port}" +export REDIS_URL_VECTOR="redis://${IP_ADDRESS}:6379" +export REDIS_URL_KV="redis://${IP_ADDRESS}:6380" + export DATAPREP_COMPONENT_NAME="OPEA_DATAPREP_REDIS_FINANCE" export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" -export TEI_EMBEDDING_ENDPOINT="http://${ip_address}:${TEI_EMBEDDER_PORT}" +export TEI_EMBEDDING_ENDPOINT="http://${IP_ADDRESS}:${TEI_EMBEDDER_PORT}" ####################################### - - function get_genai_comps() { if [ ! -d "GenAIComps" ] ; then git clone --depth 1 --branch ${opea_branch:-"main"} https://github.com/opea-project/GenAIComps.git @@ -48,7 +84,7 @@ function build_dataprep_agent_images() { function build_agent_image_local(){ cd $WORKDIR/GenAIComps/ - docker build -t opea/agent:latest -f comps/agent/src/Dockerfile . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy + docker build -t opea/agent:latest -f comps/agent/src/Dockerfile . --build-arg https_proxy=$HTTPS_PROXY --build-arg http_proxy=$HTTP_PROXY } function build_vllm_docker_image() { @@ -62,24 +98,25 @@ function build_vllm_docker_image() { VLLM_FORK_VER=v0.6.6.post1+Gaudi-1.20.0 git checkout ${VLLM_FORK_VER} &> /dev/null - docker build --no-cache -f Dockerfile.hpu -t $vllm_image --shm-size=128g . --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy + docker build --no-cache -f Dockerfile.hpu -t $VLLM_IMAGE --shm-size=128g . --build-arg https_proxy=$HTTPS_PROXY --build-arg http_proxy=$HTTP_PROXY if [ $? -ne 0 ]; then - echo "$vllm_image failed" + echo "$VLLM_IMAGE failed" exit 1 else - echo "$vllm_image successful" + echo "$VLLM_IMAGE successful" fi } +function stop_llm(){ + cid=$(docker ps -aq --filter "name=vllm-gaudi-server") + echo "Stopping container $cid" + if [[ ! -z "$cid" ]]; then docker rm $cid -f && sleep 1s; fi + +} + +function start_all_services(){ + docker compose -f $WORKPATH/docker_compose/intel/hpu/gaudi/compose.yaml up -d -function start_vllm_service_70B() { - echo "token is ${HF_TOKEN}" - echo "start vllm gaudi service" - echo "**************model is $model**************" - docker run -d --runtime=habana --rm --name "vllm-gaudi-server" -e HABANA_VISIBLE_DEVICES=all -p $vllm_port:8000 -v $vllm_volume:/data -e HF_TOKEN=$HF_TOKEN -e HUGGING_FACE_HUB_TOKEN=$HF_TOKEN -e HF_HOME=/data -e OMPI_MCA_btl_vader_single_copy_mechanism=none -e PT_HPU_ENABLE_LAZY_COLLECTIVES=true -e http_proxy=$http_proxy -e https_proxy=$https_proxy -e no_proxy=$no_proxy -e VLLM_SKIP_WARMUP=true --cap-add=sys_nice --ipc=host $vllm_image --model ${model} --max-seq-len-to-capture 16384 --tensor-parallel-size 4 - sleep 10s - echo "Waiting vllm gaudi ready" - n=0 until [[ "$n" -ge 200 ]] || [[ $ready == true ]]; do docker logs vllm-gaudi-server &> ${LOG_PATH}/vllm-gaudi-service.log n=$((n+1)) @@ -96,19 +133,6 @@ function start_vllm_service_70B() { echo "Service started successfully" } - -function stop_llm(){ - cid=$(docker ps -aq --filter "name=vllm-gaudi-server") - echo "Stopping container $cid" - if [[ ! -z "$cid" ]]; then docker rm $cid -f && sleep 1s; fi - -} - -function start_dataprep(){ - docker compose -f $WORKPATH/docker_compose/intel/hpu/gaudi/dataprep_compose.yaml up -d - sleep 1m -} - function validate() { local CONTENT="$1" local EXPECTED_RESULT="$2" @@ -155,16 +179,8 @@ function stop_dataprep() { } -function start_agents() { - echo "Starting Agent services" - cd $WORKDIR/GenAIExamples/FinanceAgent/docker_compose/intel/hpu/gaudi/ - bash launch_agents.sh - sleep 2m -} - - function validate_agent_service() { - # # test worker finqa agent + # test worker finqa agent echo "======================Testing worker finqa agent======================" export agent_port="9095" prompt="What is Gap's revenue in 2024?" @@ -178,7 +194,7 @@ function validate_agent_service() { exit 1 fi - # # test worker research agent + # test worker research agent echo "======================Testing worker research agent======================" export agent_port="9096" prompt="Johnson & Johnson" @@ -215,7 +231,6 @@ function validate_agent_service() { docker logs supervisor-agent-endpoint exit 1 fi - } function stop_agent_docker() { @@ -228,7 +243,6 @@ function stop_agent_docker() { done } - echo "workpath: $WORKPATH" echo "=================== Stop containers ====================" stop_llm @@ -238,24 +252,22 @@ stop_dataprep cd $WORKPATH/tests echo "=================== #1 Building docker images====================" -build_vllm_docker_image +# build_vllm_docker_image build_dataprep_agent_images -#### for local test -# build_agent_image_local -# echo "=================== #1 Building docker images completed====================" +# ## for local test +# # build_agent_image_local +echo "=================== #1 Building docker images completed====================" -echo "=================== #2 Start vllm endpoint====================" -start_vllm_service_70B -echo "=================== #2 vllm endpoint started====================" +echo "=================== #2 Start services ====================" +start_all_services +echo "=================== #2 Endpoints for services started====================" -echo "=================== #3 Start dataprep and ingest data ====================" -start_dataprep +echo "=================== #3 Validate ingest_validate_dataprep ====================" ingest_validate_dataprep echo "=================== #3 Data ingestion and validation completed====================" echo "=================== #4 Start agents ====================" -start_agents validate_agent_service echo "=================== #4 Agent test passed ====================" From d81c9dbc159c3e0d2d2f58ca8f8a7307c405700d Mon Sep 17 00:00:00 2001 From: Yi Yao Date: Tue, 3 Jun 2025 14:59:06 +0800 Subject: [PATCH 083/217] Add code owners. (#2022) Signed-off-by: Yi Yao Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- .github/CODEOWNERS | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index e57bd74544..5033ca6483 100755 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -2,17 +2,17 @@ * liang1.lv@intel.com feng.tian@intel.com suyue.chen@intel.com kaokao.lv@intel.com minmin.hou@intel.com rita.brugarolas.brufau@intel.com /.github/ suyue.chen@intel.com ze.pan@intel.com -/AgentQnA/ abolfazl.shahbazi@intel.com kaokao.lv@intel.com minmin.hou@intel.com +/AgentQnA/ abolfazl.shahbazi@intel.com kaokao.lv@intel.com minmin.hou@intel.com xinyu.ye@intel.com /AudioQnA/ sihan.chen@intel.com wenjiao.yue@intel.com -/AvatarChatbot/ chun.tao@intel.com kaokao.lv@intel.com +/AvatarChatbot/ chun.tao@intel.com kaokao.lv@intel.com xinyu.ye@intel.com /ChatQnA/ liang1.lv@intel.com letong.han@intel.com -/CodeGen/ liang1.lv@intel.com -/CodeTrans/ sihan.chen@intel.com -/DBQnA/ supriya.krishnamurthi@intel.com liang1.lv@intel.com -/DocIndexRetriever/ abolfazl.shahbazi@intel.com kaokao.lv@intel.com chendi.xue@intel.com +/CodeGen/ liang1.lv@intel.com qing.yao@intel.com +/CodeTrans/ sihan.chen@intel.com letong.han@intel.com +/DBQnA/ supriya.krishnamurthi@intel.com liang1.lv@intel.com ze.pan@intel.com +/DocIndexRetriever/ abolfazl.shahbazi@intel.com kaokao.lv@intel.com chendi.xue@intel.com ze.pan@intel.com /DocSum/ letong.han@intel.com /EdgeCraftRAG/ yongbo.zhu@intel.com mingyuan.qi@intel.com -/FinanceAgent/ abolfazl.shahbazi@intel.com kaokao.lv@intel.com minmin.hou@intel.com rita.brugarolas.brufau@intel.com +/FinanceAgent/ abolfazl.shahbazi@intel.com kaokao.lv@intel.com minmin.hou@intel.com rita.brugarolas.brufau@intel.com xinyu.ye@intel.com /GraphRAG/ rita.brugarolas.brufau@intel.com abolfazl.shahbazi@intel.com /InstructionTuning/ xinyu.ye@intel.com kaokao.lv@intel.com /MultimodalQnA/ melanie.h.buehler@intel.com tiep.le@intel.com @@ -20,7 +20,7 @@ /RerankFinetuning/ xinyu.ye@intel.com kaokao.lv@intel.com /SearchQnA/ sihan.chen@intel.com letong.han@intel.com /Text2Image/ wenjiao.yue@intel.com xinyu.ye@intel.com -/Translation/ liang1.lv@intel.com sihan.chen@intel.com +/Translation/ liang1.lv@intel.com sihan.chen@intel.com qing.yao@intel.com /VideoQnA/ huiling.bao@intel.com -/VisualQnA/ liang1.lv@intel.com sihan.chen@intel.com -/WorkflowExecAgent/ joshua.jian.ern.liew@intel.com kaokao.lv@intel.com \ No newline at end of file +/VisualQnA/ liang1.lv@intel.com sihan.chen@intel.com wenjiao.yue@intel.com +/WorkflowExecAgent/ joshua.jian.ern.liew@intel.com kaokao.lv@intel.com wenjiao.yue@intel.com \ No newline at end of file From 241971b6add5faa08d6a350a69e70ee2d9b2b1ad Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Tue, 3 Jun 2025 15:18:08 +0800 Subject: [PATCH 084/217] Fix MultimodalQnA UT issues (#2011) Signed-off-by: ZePan110 --- MultimodalQnA/tests/test_compose_milvus_on_xeon.sh | 4 ++-- MultimodalQnA/tests/test_compose_on_xeon.sh | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/MultimodalQnA/tests/test_compose_milvus_on_xeon.sh b/MultimodalQnA/tests/test_compose_milvus_on_xeon.sh index c82e0a7c62..d46b38d06f 100644 --- a/MultimodalQnA/tests/test_compose_milvus_on_xeon.sh +++ b/MultimodalQnA/tests/test_compose_milvus_on_xeon.sh @@ -66,11 +66,11 @@ function build_docker_images() { function setup_env() { export COLLECTION_NAME="LangChainCollection" - export MILVUS_HOST=${host_ip} + export MILVUS_HOST=${ip_address} export MILVUS_RETRIEVER_PORT=7000 - export LVM_MODEL_ID="llava-hf/llava-1.5-7b-hf" cd $WORKPATH/docker_compose/intel source set_env.sh + export LVM_MODEL_ID="llava-hf/llava-1.5-7b-hf" } diff --git a/MultimodalQnA/tests/test_compose_on_xeon.sh b/MultimodalQnA/tests/test_compose_on_xeon.sh index 10f015aa7b..2c87016d16 100644 --- a/MultimodalQnA/tests/test_compose_on_xeon.sh +++ b/MultimodalQnA/tests/test_compose_on_xeon.sh @@ -66,8 +66,10 @@ function build_docker_images() { } function setup_env() { + export host_ip=${ip_address} cd $WORKPATH/docker_compose/intel source set_env.sh + export LVM_MODEL_ID="llava-hf/llava-1.5-7b-hf" } From c33d0ef4cdbadb99f418ead33c7aab6fd3b3a492 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Thu, 5 Jun 2025 14:36:22 +0800 Subject: [PATCH 085/217] update secrets token name for AgentQnA. (#2023) Signed-off-by: ZePan110 --- .github/workflows/_gmc-e2e.yml | 2 +- .github/workflows/_helm-e2e.yml | 6 +++--- .github/workflows/_run-docker-compose.yml | 4 ++-- AgentQnA/README.md | 2 +- AgentQnA/docker_compose/amd/gpu/rocm/README.md | 2 +- AgentQnA/docker_compose/amd/gpu/rocm/compose.yaml | 10 +++++----- AgentQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml | 10 +++++----- .../amd/gpu/rocm/launch_agent_service_tgi_rocm.sh | 7 ++++--- .../amd/gpu/rocm/launch_agent_service_vllm_rocm.sh | 7 ++++--- .../amd/gpu/rocm/stop_agent_service_tgi_rocm.sh | 2 +- .../amd/gpu/rocm/stop_agent_service_vllm_rocm.sh | 8 ++++---- AgentQnA/docker_compose/intel/cpu/xeon/set_env.sh | 3 ++- AgentQnA/docker_compose/intel/hpu/gaudi/compose.yaml | 8 ++++---- .../grafana/dashboards/download_opea_dashboard.sh | 1 + AgentQnA/docker_compose/intel/hpu/gaudi/set_env.sh | 4 ++-- AgentQnA/docker_compose/intel/hpu/gaudi/tgi_gaudi.yaml | 2 +- AgentQnA/retrieval_tool/launch_retrieval_tool.sh | 3 ++- AgentQnA/retrieval_tool/run_ingest_data.sh | 1 + AgentQnA/tests/sql_agent_test/run_data_split.sh | 1 + AgentQnA/tests/step2_start_retrieval_tool_rocm_vllm.sh | 2 +- .../tests/step4_launch_and_validate_agent_gaudi.sh | 3 +-- AgentQnA/tests/test_compose_on_gaudi.sh | 2 +- AgentQnA/tests/test_compose_on_rocm.sh | 2 +- AgentQnA/tests/test_compose_vllm_on_rocm.sh | 2 +- ChatQnA/benchmark/accuracy_faqgen/launch_tgi.sh | 1 + ChatQnA/benchmark/accuracy_faqgen/run_acc.sh | 1 + .../xeon/grafana/dashboards/download_opea_dashboard.sh | 1 + .../grafana/dashboards/download_opea_dashboard.sh | 1 + ChatQnA/docker_compose/intel/hpu/gaudi/set_env.sh | 2 +- ChatQnA/entrypoint.sh | 1 + HybridRAG/tests/test_compose_on_gaudi.sh | 6 +++--- Text2Image/tests/test_compose_on_gaudi.sh | 1 + Text2Image/tests/test_compose_on_xeon.sh | 1 + 33 files changed, 61 insertions(+), 48 deletions(-) diff --git a/.github/workflows/_gmc-e2e.yml b/.github/workflows/_gmc-e2e.yml index ba50e8b955..debd59ea03 100644 --- a/.github/workflows/_gmc-e2e.yml +++ b/.github/workflows/_gmc-e2e.yml @@ -56,7 +56,7 @@ jobs: - name: Run tests id: run-test env: - HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }} + HF_TOKEN: ${{ secrets.HF_TOKEN }} GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }} GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }} run: | diff --git a/.github/workflows/_helm-e2e.yml b/.github/workflows/_helm-e2e.yml index 61a91ce583..fa7ca06761 100644 --- a/.github/workflows/_helm-e2e.yml +++ b/.github/workflows/_helm-e2e.yml @@ -165,8 +165,8 @@ jobs: env: GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }} GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }} - HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }} - HFTOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }} + HF_TOKEN: ${{ secrets.HF_TOKEN }} + HFTOKEN: ${{ secrets.HF_TOKEN }} value_file: ${{ matrix.value_file }} run: | set -xe @@ -190,7 +190,7 @@ jobs: --namespace $NAMESPACE \ $RELEASE_NAME \ oci://ghcr.io/opea-project/charts/${CHART_NAME} \ - --set global.HUGGINGFACEHUB_API_TOKEN=${HFTOKEN} \ + --set global.HF_TOKEN=${HFTOKEN} \ --set global.modelUseHostPath=/data2/hf_model \ --set GOOGLE_API_KEY=${{ env.GOOGLE_API_KEY}} \ --set GOOGLE_CSE_ID=${{ env.GOOGLE_CSE_ID}} \ diff --git a/.github/workflows/_run-docker-compose.yml b/.github/workflows/_run-docker-compose.yml index 7af15e11be..4301de9ebc 100644 --- a/.github/workflows/_run-docker-compose.yml +++ b/.github/workflows/_run-docker-compose.yml @@ -170,8 +170,8 @@ jobs: - name: Run test shell: bash env: - HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }} - HF_TOKEN: ${{ secrets.HUGGINGFACEHUB_API_TOKEN }} + HUGGINGFACEHUB_API_TOKEN: ${{ secrets.HF_TOKEN }} + HF_TOKEN: ${{ secrets.HF_TOKEN }} GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }} GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }} PINECONE_KEY: ${{ secrets.PINECONE_KEY }} diff --git a/AgentQnA/README.md b/AgentQnA/README.md index c78703d6fb..e7f01a3ee1 100644 --- a/AgentQnA/README.md +++ b/AgentQnA/README.md @@ -123,7 +123,7 @@ Set up a [HuggingFace](https://huggingface.co/) account and generate a [user acc Then set an environment variable with the token and another for a directory to download the models: ```bash -export HUGGINGFACEHUB_API_TOKEN= +export HF_TOKEN= export HF_CACHE_DIR= # to avoid redownloading models ``` diff --git a/AgentQnA/docker_compose/amd/gpu/rocm/README.md b/AgentQnA/docker_compose/amd/gpu/rocm/README.md index fe5253ed07..27854510e5 100644 --- a/AgentQnA/docker_compose/amd/gpu/rocm/README.md +++ b/AgentQnA/docker_compose/amd/gpu/rocm/README.md @@ -170,7 +170,7 @@ Use AMD GPU driver utilities to determine the correct `cardN` and `renderN` IDs ### Replace the string 'server_address' with your local server IP address export host_ip='server_address' ### Replace the string 'your_huggingfacehub_token' with your HuggingFacehub repository access token. -export HUGGINGFACEHUB_API_TOKEN='your_huggingfacehub_token' +export HF_TOKEN='your_huggingfacehub_token' ### Replace the string 'your_langchain_api_key' with your LANGCHAIN API KEY. export LANGCHAIN_API_KEY='your_langchain_api_key' export LANGCHAIN_TRACING_V2="" diff --git a/AgentQnA/docker_compose/amd/gpu/rocm/compose.yaml b/AgentQnA/docker_compose/amd/gpu/rocm/compose.yaml index 4eab372dec..722019c6e0 100644 --- a/AgentQnA/docker_compose/amd/gpu/rocm/compose.yaml +++ b/AgentQnA/docker_compose/amd/gpu/rocm/compose.yaml @@ -13,8 +13,8 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} TGI_LLM_ENDPOINT: "http://${ip_address}:${TGI_SERVICE_PORT}" - HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGING_FACE_HUB_TOKEN: ${HF_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} shm_size: 32g devices: - /dev/kfd:/dev/kfd @@ -42,7 +42,7 @@ services: with_memory: false recursion_limit: ${recursion_limit_worker} llm_engine: tgi - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} llm_endpoint_url: ${LLM_ENDPOINT_URL} model: ${LLM_MODEL_ID} temperature: ${temperature} @@ -76,7 +76,7 @@ services: use_hints: false recursion_limit: ${recursion_limit_worker} llm_engine: vllm - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} llm_endpoint_url: ${LLM_ENDPOINT_URL} model: ${LLM_MODEL_ID} temperature: ${temperature} @@ -104,7 +104,7 @@ services: with_memory: true recursion_limit: ${recursion_limit_supervisor} llm_engine: tgi - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} llm_endpoint_url: ${LLM_ENDPOINT_URL} model: ${LLM_MODEL_ID} temperature: ${temperature} diff --git a/AgentQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml b/AgentQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml index 4b37d15d10..28e48fc569 100644 --- a/AgentQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml +++ b/AgentQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml @@ -10,8 +10,8 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 WILM_USE_TRITON_FLASH_ATTENTION: 0 @@ -46,7 +46,7 @@ services: with_memory: false recursion_limit: ${recursion_limit_worker} llm_engine: vllm - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} llm_endpoint_url: ${LLM_ENDPOINT_URL} model: ${LLM_MODEL_ID} temperature: ${temperature} @@ -80,7 +80,7 @@ services: use_hints: false recursion_limit: ${recursion_limit_worker} llm_engine: vllm - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} llm_endpoint_url: ${LLM_ENDPOINT_URL} model: ${LLM_MODEL_ID} temperature: ${temperature} @@ -108,7 +108,7 @@ services: with_memory: true recursion_limit: ${recursion_limit_supervisor} llm_engine: vllm - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} llm_endpoint_url: ${LLM_ENDPOINT_URL} model: ${LLM_MODEL_ID} temperature: ${temperature} diff --git a/AgentQnA/docker_compose/amd/gpu/rocm/launch_agent_service_tgi_rocm.sh b/AgentQnA/docker_compose/amd/gpu/rocm/launch_agent_service_tgi_rocm.sh index 209d9b372a..7774202208 100644 --- a/AgentQnA/docker_compose/amd/gpu/rocm/launch_agent_service_tgi_rocm.sh +++ b/AgentQnA/docker_compose/amd/gpu/rocm/launch_agent_service_tgi_rocm.sh @@ -1,9 +1,10 @@ +#!/bin/bash # Copyright (C) 2024 Advanced Micro Devices, Inc. # SPDX-License-Identifier: Apache-2.0 # Before start script: # export host_ip="your_host_ip_or_host_name" -# export HUGGINGFACEHUB_API_TOKEN="your_huggingface_api_token" +# export HF_TOKEN="your_huggingface_api_token" # export LANGCHAIN_API_KEY="your_langchain_api_key" # export LANGCHAIN_TRACING_V2="" @@ -19,7 +20,7 @@ export CRAG_SERVER_PORT="18114" export WORKPATH=$(dirname "$PWD") export WORKDIR=${WORKPATH}/../../../ -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" export HF_CACHE_DIR="./data" export MODEL_CACHE="./data" @@ -39,7 +40,7 @@ export CRAG_SERVER=http://${ip_address}:${CRAG_SERVER_PORT} export WORKER_AGENT_URL="http://${ip_address}:${WORKER_RAG_AGENT_PORT}/v1/chat/completions" export SQL_AGENT_URL="http://${ip_address}:${WORKER_SQL_AGENT_PORT}/v1/chat/completions" export HF_CACHE_DIR=${HF_CACHE_DIR} -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export no_proxy=${no_proxy} export http_proxy=${http_proxy} export https_proxy=${https_proxy} diff --git a/AgentQnA/docker_compose/amd/gpu/rocm/launch_agent_service_vllm_rocm.sh b/AgentQnA/docker_compose/amd/gpu/rocm/launch_agent_service_vllm_rocm.sh index 2d28ea3c34..1e7a7289bd 100644 --- a/AgentQnA/docker_compose/amd/gpu/rocm/launch_agent_service_vllm_rocm.sh +++ b/AgentQnA/docker_compose/amd/gpu/rocm/launch_agent_service_vllm_rocm.sh @@ -1,9 +1,10 @@ +#!/bin/bash # Copyright (C) 2024 Advanced Micro Devices, Inc. # SPDX-License-Identifier: Apache-2.0 # Before start script: # export host_ip="your_host_ip_or_host_name" -# export HUGGINGFACEHUB_API_TOKEN="your_huggingface_api_token" +# export HF_TOKEN="your_huggingface_api_token" # export LANGCHAIN_API_KEY="your_langchain_api_key" # export LANGCHAIN_TRACING_V2="" @@ -19,7 +20,7 @@ export CRAG_SERVER_PORT="18114" export WORKPATH=$(dirname "$PWD") export WORKDIR=${WORKPATH}/../../../ -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export VLLM_LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" export HF_CACHE_DIR="./data" export MODEL_CACHE="./data" @@ -40,7 +41,7 @@ export CRAG_SERVER=http://${ip_address}:${CRAG_SERVER_PORT} export WORKER_AGENT_URL="http://${ip_address}:${WORKER_RAG_AGENT_PORT}/v1/chat/completions" export SQL_AGENT_URL="http://${ip_address}:${WORKER_SQL_AGENT_PORT}/v1/chat/completions" export HF_CACHE_DIR=${HF_CACHE_DIR} -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export no_proxy=${no_proxy} export http_proxy=${http_proxy} export https_proxy=${https_proxy} diff --git a/AgentQnA/docker_compose/amd/gpu/rocm/stop_agent_service_tgi_rocm.sh b/AgentQnA/docker_compose/amd/gpu/rocm/stop_agent_service_tgi_rocm.sh index ab5b133c18..58c6c303cb 100644 --- a/AgentQnA/docker_compose/amd/gpu/rocm/stop_agent_service_tgi_rocm.sh +++ b/AgentQnA/docker_compose/amd/gpu/rocm/stop_agent_service_tgi_rocm.sh @@ -5,7 +5,7 @@ WORKPATH=$(dirname "$PWD")/.. export ip_address=${host_ip} -export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token} +export HF_TOKEN=${HF_TOKEN} export AGENTQNA_TGI_IMAGE=ghcr.io/huggingface/text-generation-inference:2.4.1-rocm export AGENTQNA_TGI_SERVICE_PORT="19001" diff --git a/AgentQnA/docker_compose/amd/gpu/rocm/stop_agent_service_vllm_rocm.sh b/AgentQnA/docker_compose/amd/gpu/rocm/stop_agent_service_vllm_rocm.sh index d448864e08..fa9d6508e6 100644 --- a/AgentQnA/docker_compose/amd/gpu/rocm/stop_agent_service_vllm_rocm.sh +++ b/AgentQnA/docker_compose/amd/gpu/rocm/stop_agent_service_vllm_rocm.sh @@ -1,10 +1,11 @@ +#!/bin/bash # Copyright (C) 2024 Advanced Micro Devices, Inc. # SPDX-License-Identifier: Apache-2.0 # Before start script: # export host_ip="your_host_ip_or_host_name" -# export HUGGINGFACEHUB_API_TOKEN="your_huggingface_api_token" +# export HF_TOKEN="your_huggingface_api_token" # export LANGCHAIN_API_KEY="your_langchain_api_key" # export LANGCHAIN_TRACING_V2="" @@ -20,8 +21,7 @@ export CRAG_SERVER_PORT="18114" export WORKPATH=$(dirname "$PWD") export WORKDIR=${WORKPATH}/../../../ -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export VLLM_LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" export HF_CACHE_DIR="./data" export MODEL_CACHE="./data" @@ -42,7 +42,7 @@ export CRAG_SERVER=http://${ip_address}:${CRAG_SERVER_PORT} export WORKER_AGENT_URL="http://${ip_address}:${WORKER_RAG_AGENT_PORT}/v1/chat/completions" export SQL_AGENT_URL="http://${ip_address}:${WORKER_SQL_AGENT_PORT}/v1/chat/completions" export HF_CACHE_DIR=${HF_CACHE_DIR} -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export no_proxy=${no_proxy} export http_proxy=${http_proxy} export https_proxy=${https_proxy} diff --git a/AgentQnA/docker_compose/intel/cpu/xeon/set_env.sh b/AgentQnA/docker_compose/intel/cpu/xeon/set_env.sh index ba452cf4bb..470cfb044e 100644 --- a/AgentQnA/docker_compose/intel/cpu/xeon/set_env.sh +++ b/AgentQnA/docker_compose/intel/cpu/xeon/set_env.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 @@ -33,7 +34,7 @@ fi # retriever export host_ip=$(hostname -I | awk '{print $1}') export HF_CACHE_DIR=${HF_CACHE_DIR} -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export no_proxy=${no_proxy} export http_proxy=${http_proxy} export https_proxy=${https_proxy} diff --git a/AgentQnA/docker_compose/intel/hpu/gaudi/compose.yaml b/AgentQnA/docker_compose/intel/hpu/gaudi/compose.yaml index a5240ad4b8..9945fa2b41 100644 --- a/AgentQnA/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/AgentQnA/docker_compose/intel/hpu/gaudi/compose.yaml @@ -16,7 +16,7 @@ services: with_memory: false recursion_limit: ${recursion_limit_worker} llm_engine: vllm - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} llm_endpoint_url: ${LLM_ENDPOINT_URL} model: ${LLM_MODEL_ID} temperature: ${temperature} @@ -50,7 +50,7 @@ services: use_hints: false recursion_limit: ${recursion_limit_worker} llm_engine: vllm - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} llm_endpoint_url: ${LLM_ENDPOINT_URL} model: ${LLM_MODEL_ID} temperature: ${temperature} @@ -79,7 +79,7 @@ services: with_memory: true recursion_limit: ${recursion_limit_supervisor} llm_engine: vllm - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} llm_endpoint_url: ${LLM_ENDPOINT_URL} model: ${LLM_MODEL_ID} temperature: ${temperature} @@ -122,7 +122,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} HABANA_VISIBLE_DEVICES: all OMPI_MCA_btl_vader_single_copy_mechanism: none LLM_MODEL_ID: ${LLM_MODEL_ID} diff --git a/AgentQnA/docker_compose/intel/hpu/gaudi/grafana/dashboards/download_opea_dashboard.sh b/AgentQnA/docker_compose/intel/hpu/gaudi/grafana/dashboards/download_opea_dashboard.sh index df4012d2d3..a37e6c39d8 100644 --- a/AgentQnA/docker_compose/intel/hpu/gaudi/grafana/dashboards/download_opea_dashboard.sh +++ b/AgentQnA/docker_compose/intel/hpu/gaudi/grafana/dashboards/download_opea_dashboard.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2025 Intel Corporation # SPDX-License-Identifier: Apache-2.0 diff --git a/AgentQnA/docker_compose/intel/hpu/gaudi/set_env.sh b/AgentQnA/docker_compose/intel/hpu/gaudi/set_env.sh index cc8139c960..fb66c459e3 100644 --- a/AgentQnA/docker_compose/intel/hpu/gaudi/set_env.sh +++ b/AgentQnA/docker_compose/intel/hpu/gaudi/set_env.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 @@ -16,8 +17,7 @@ export ip_address=$(hostname -I | awk '{print $1}') # LLM related environment variables export HF_CACHE_DIR=${HF_CACHE_DIR} ls $HF_CACHE_DIR -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -export HF_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export LLM_MODEL_ID="meta-llama/Llama-3.3-70B-Instruct" export NUM_SHARDS=4 export LLM_ENDPOINT_URL="http://${ip_address}:8086" diff --git a/AgentQnA/docker_compose/intel/hpu/gaudi/tgi_gaudi.yaml b/AgentQnA/docker_compose/intel/hpu/gaudi/tgi_gaudi.yaml index 856642456a..86324b8652 100644 --- a/AgentQnA/docker_compose/intel/hpu/gaudi/tgi_gaudi.yaml +++ b/AgentQnA/docker_compose/intel/hpu/gaudi/tgi_gaudi.yaml @@ -13,7 +13,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGING_FACE_HUB_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 HABANA_VISIBLE_DEVICES: all diff --git a/AgentQnA/retrieval_tool/launch_retrieval_tool.sh b/AgentQnA/retrieval_tool/launch_retrieval_tool.sh index 40ff1ff62a..b5d1a76686 100644 --- a/AgentQnA/retrieval_tool/launch_retrieval_tool.sh +++ b/AgentQnA/retrieval_tool/launch_retrieval_tool.sh @@ -1,9 +1,10 @@ +#!/bin/bash # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 host_ip=$(hostname -I | awk '{print $1}') export HF_CACHE_DIR=${HF_CACHE_DIR} -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export no_proxy=${no_proxy} export http_proxy=${http_proxy} export https_proxy=${https_proxy} diff --git a/AgentQnA/retrieval_tool/run_ingest_data.sh b/AgentQnA/retrieval_tool/run_ingest_data.sh index 8dcd5a22fc..00a3c4e785 100644 --- a/AgentQnA/retrieval_tool/run_ingest_data.sh +++ b/AgentQnA/retrieval_tool/run_ingest_data.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 diff --git a/AgentQnA/tests/sql_agent_test/run_data_split.sh b/AgentQnA/tests/sql_agent_test/run_data_split.sh index 2fc2dfcb0e..07f1d71474 100644 --- a/AgentQnA/tests/sql_agent_test/run_data_split.sh +++ b/AgentQnA/tests/sql_agent_test/run_data_split.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 diff --git a/AgentQnA/tests/step2_start_retrieval_tool_rocm_vllm.sh b/AgentQnA/tests/step2_start_retrieval_tool_rocm_vllm.sh index 1c039ad6a0..6a15e79c37 100644 --- a/AgentQnA/tests/step2_start_retrieval_tool_rocm_vllm.sh +++ b/AgentQnA/tests/step2_start_retrieval_tool_rocm_vllm.sh @@ -20,7 +20,7 @@ function start_retrieval_tool() { cd $WORKPATH/../DocIndexRetriever/docker_compose/intel/cpu/xeon host_ip=$(hostname -I | awk '{print $1}') export HF_CACHE_DIR=${HF_CACHE_DIR} - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} + export HF_TOKEN=${HF_TOKEN} export no_proxy=${no_proxy} export http_proxy=${http_proxy} export https_proxy=${https_proxy} diff --git a/AgentQnA/tests/step4_launch_and_validate_agent_gaudi.sh b/AgentQnA/tests/step4_launch_and_validate_agent_gaudi.sh index 6e970ce2e8..b853094c50 100644 --- a/AgentQnA/tests/step4_launch_and_validate_agent_gaudi.sh +++ b/AgentQnA/tests/step4_launch_and_validate_agent_gaudi.sh @@ -11,8 +11,7 @@ export ip_address=$(hostname -I | awk '{print $1}') export host_ip=$ip_address echo "ip_address=${ip_address}" export TOOLSET_PATH=$WORKPATH/tools/ -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -HF_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} model="meta-llama/Llama-3.3-70B-Instruct" #"meta-llama/Meta-Llama-3.1-70B-Instruct" export HF_CACHE_DIR=${model_cache:-"/data2/huggingface"} diff --git a/AgentQnA/tests/test_compose_on_gaudi.sh b/AgentQnA/tests/test_compose_on_gaudi.sh index f6e7e95997..2920455e2b 100644 --- a/AgentQnA/tests/test_compose_on_gaudi.sh +++ b/AgentQnA/tests/test_compose_on_gaudi.sh @@ -7,7 +7,7 @@ WORKPATH=$(dirname "$PWD") export WORKDIR=$WORKPATH/../../ echo "WORKDIR=${WORKDIR}" export ip_address=$(hostname -I | awk '{print $1}') -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export TOOLSET_PATH=$WORKDIR/GenAIExamples/AgentQnA/tools/ export no_proxy="$no_proxy,rag-agent-endpoint,sql-agent-endpoint,react-agent-endpoint,agent-ui,vllm-gaudi-server,jaeger,grafana,prometheus,127.0.0.1,localhost,0.0.0.0,$ip_address" IMAGE_REPO=${IMAGE_REPO:-"opea"} diff --git a/AgentQnA/tests/test_compose_on_rocm.sh b/AgentQnA/tests/test_compose_on_rocm.sh index 561ab0a967..1a466fff72 100644 --- a/AgentQnA/tests/test_compose_on_rocm.sh +++ b/AgentQnA/tests/test_compose_on_rocm.sh @@ -9,7 +9,7 @@ ls $WORKPATH export WORKDIR=$WORKPATH/../../ echo "WORKDIR=${WORKDIR}" export ip_address=$(hostname -I | awk '{print $1}') -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export TOOLSET_PATH=$WORKPATH/tools/ IMAGE_REPO=${IMAGE_REPO:-"opea"} IMAGE_TAG=${IMAGE_TAG:-"latest"} diff --git a/AgentQnA/tests/test_compose_vllm_on_rocm.sh b/AgentQnA/tests/test_compose_vllm_on_rocm.sh index 711554f965..01630ee243 100644 --- a/AgentQnA/tests/test_compose_vllm_on_rocm.sh +++ b/AgentQnA/tests/test_compose_vllm_on_rocm.sh @@ -8,7 +8,7 @@ WORKPATH=$(dirname "$PWD") export WORKDIR=${WORKPATH}/../../ echo "WORKDIR=${WORKDIR}" export ip_address=$(hostname -I | awk '{print $1}') -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export TOOLSET_PATH=$WORKPATH/tools/ IMAGE_REPO=${IMAGE_REPO:-"opea"} IMAGE_TAG=${IMAGE_TAG:-"latest"} diff --git a/ChatQnA/benchmark/accuracy_faqgen/launch_tgi.sh b/ChatQnA/benchmark/accuracy_faqgen/launch_tgi.sh index 72082d1419..0af4431184 100644 --- a/ChatQnA/benchmark/accuracy_faqgen/launch_tgi.sh +++ b/ChatQnA/benchmark/accuracy_faqgen/launch_tgi.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 diff --git a/ChatQnA/benchmark/accuracy_faqgen/run_acc.sh b/ChatQnA/benchmark/accuracy_faqgen/run_acc.sh index 766b718ff8..d31cc74080 100644 --- a/ChatQnA/benchmark/accuracy_faqgen/run_acc.sh +++ b/ChatQnA/benchmark/accuracy_faqgen/run_acc.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/grafana/dashboards/download_opea_dashboard.sh b/ChatQnA/docker_compose/intel/cpu/xeon/grafana/dashboards/download_opea_dashboard.sh index c3739ec705..b98476de2a 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/grafana/dashboards/download_opea_dashboard.sh +++ b/ChatQnA/docker_compose/intel/cpu/xeon/grafana/dashboards/download_opea_dashboard.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2025 Intel Corporation # SPDX-License-Identifier: Apache-2.0 if ls *.json 1> /dev/null 2>&1; then diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/grafana/dashboards/download_opea_dashboard.sh b/ChatQnA/docker_compose/intel/hpu/gaudi/grafana/dashboards/download_opea_dashboard.sh index 03e0d1f3c2..598ed1427a 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/grafana/dashboards/download_opea_dashboard.sh +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/grafana/dashboards/download_opea_dashboard.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2025 Intel Corporation # SPDX-License-Identifier: Apache-2.0 rm *.json diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/set_env.sh b/ChatQnA/docker_compose/intel/hpu/gaudi/set_env.sh index fe847e6036..1d0409eccd 100755 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/set_env.sh +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/set_env.sh @@ -1,4 +1,4 @@ -#/usr/bin/env bash +#!/usr/bin/env bash # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 diff --git a/ChatQnA/entrypoint.sh b/ChatQnA/entrypoint.sh index ae395cde0e..c45ae5f94b 100644 --- a/ChatQnA/entrypoint.sh +++ b/ChatQnA/entrypoint.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2025 Intel Corporation # SPDX-License-Identifier: Apache-2.0 diff --git a/HybridRAG/tests/test_compose_on_gaudi.sh b/HybridRAG/tests/test_compose_on_gaudi.sh index f8c2ccf203..9c512afc9a 100755 --- a/HybridRAG/tests/test_compose_on_gaudi.sh +++ b/HybridRAG/tests/test_compose_on_gaudi.sh @@ -99,7 +99,7 @@ function validate_service() { local HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -X POST -d "$INPUT_DATA" -H 'Content-Type: application/json' "$URL") - if [ "DOCKER_NAME" -eq "text2cypher-gaudi-container" ]; then + if [ "$DOCKER_NAME" == "text2cypher-gaudi-container" ]; then docker ps docker logs text2cypher-gaudi-container fi @@ -114,7 +114,7 @@ function validate_service() { else echo "[ $SERVICE_NAME ] Content does not match the expected result: $CONTENT" docker logs ${DOCKER_NAME} >> ${LOG_PATH}/${SERVICE_NAME}.log - if [ "DOCKER_NAME" -eq "hybridrag-xeon-backend-server" ]; then + if [ "$DOCKER_NAME" == "hybridrag-xeon-backend-server" ]; then docker ps docker logs text2cypher-gaudi-container fi @@ -123,7 +123,7 @@ function validate_service() { else echo "[ $SERVICE_NAME ] HTTP status is not 200. Received status was $HTTP_STATUS" docker logs ${DOCKER_NAME} >> ${LOG_PATH}/${SERVICE_NAME}.log - if [ "DOCKER_NAME" -eq "hybridrag-xeon-backend-server" ]; then + if [ "$DOCKER_NAME" == "hybridrag-xeon-backend-server" ]; then docker ps docker logs text2cypher-gaudi-container fi diff --git a/Text2Image/tests/test_compose_on_gaudi.sh b/Text2Image/tests/test_compose_on_gaudi.sh index 2e4e70c84d..811782cd5b 100644 --- a/Text2Image/tests/test_compose_on_gaudi.sh +++ b/Text2Image/tests/test_compose_on_gaudi.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 diff --git a/Text2Image/tests/test_compose_on_xeon.sh b/Text2Image/tests/test_compose_on_xeon.sh index ac2f344482..650c5d47ed 100644 --- a/Text2Image/tests/test_compose_on_xeon.sh +++ b/Text2Image/tests/test_compose_on_xeon.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 From 6949dbc1d1ed871fdbe96958a326b2bf7f14fa78 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Thu, 5 Jun 2025 14:58:43 +0800 Subject: [PATCH 086/217] update secrets token name for AudioQnA. (#2024) Signed-off-by: ZePan110 --- AudioQnA/benchmark/accuracy/run_acc.sh | 2 +- AudioQnA/docker_compose/amd/gpu/rocm/README.md | 4 ++-- AudioQnA/docker_compose/amd/gpu/rocm/compose.yaml | 2 +- AudioQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml | 4 ++-- AudioQnA/docker_compose/amd/gpu/rocm/set_env.sh | 5 ++--- AudioQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh | 4 ++-- AudioQnA/docker_compose/intel/cpu/xeon/README.md | 2 +- AudioQnA/docker_compose/intel/cpu/xeon/README_vllm.md | 2 +- AudioQnA/docker_compose/intel/cpu/xeon/compose.yaml | 2 +- .../docker_compose/intel/cpu/xeon/compose_multilang.yaml | 2 +- AudioQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml | 2 +- AudioQnA/docker_compose/intel/cpu/xeon/set_env.sh | 2 +- AudioQnA/docker_compose/intel/hpu/gaudi/README.md | 2 +- AudioQnA/docker_compose/intel/hpu/gaudi/compose.yaml | 2 +- AudioQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml | 2 +- AudioQnA/docker_compose/intel/hpu/gaudi/set_env.sh | 2 +- AudioQnA/tests/README.md | 2 +- README-deploy-benchmark.md | 2 +- deploy.py | 4 ++-- deploy_and_benchmark.py | 2 +- 20 files changed, 25 insertions(+), 26 deletions(-) diff --git a/AudioQnA/benchmark/accuracy/run_acc.sh b/AudioQnA/benchmark/accuracy/run_acc.sh index af7fab3c41..e56835e1be 100644 --- a/AudioQnA/benchmark/accuracy/run_acc.sh +++ b/AudioQnA/benchmark/accuracy/run_acc.sh @@ -1,4 +1,4 @@ - +#!/bin/bash # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 diff --git a/AudioQnA/docker_compose/amd/gpu/rocm/README.md b/AudioQnA/docker_compose/amd/gpu/rocm/README.md index d26e52553c..651974b575 100644 --- a/AudioQnA/docker_compose/amd/gpu/rocm/README.md +++ b/AudioQnA/docker_compose/amd/gpu/rocm/README.md @@ -72,7 +72,7 @@ For TGI inference usage: ```bash export host_ip="External_Public_IP" # ip address of the node -export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +export HF_TOKEN="Your_HuggingFace_API_Token" export http_proxy="Your_HTTP_Proxy" # http proxy if any export https_proxy="Your_HTTPs_Proxy" # https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip,whisper-service,speecht5-service,vllm-service,tgi-service,audioqna-xeon-backend-server,audioqna-xeon-ui-server # additional no proxies if needed @@ -84,7 +84,7 @@ For vLLM inference usage ```bash export host_ip="External_Public_IP" # ip address of the node -export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +export HF_TOKEN="Your_HuggingFace_API_Token" export http_proxy="Your_HTTP_Proxy" # http proxy if any export https_proxy="Your_HTTPs_Proxy" # https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip,whisper-service,speecht5-service,vllm-service,tgi-service,audioqna-xeon-backend-server,audioqna-xeon-ui-server # additional no proxies if needed diff --git a/AudioQnA/docker_compose/amd/gpu/rocm/compose.yaml b/AudioQnA/docker_compose/amd/gpu/rocm/compose.yaml index 2465fec1f5..5ef2298b10 100644 --- a/AudioQnA/docker_compose/amd/gpu/rocm/compose.yaml +++ b/AudioQnA/docker_compose/amd/gpu/rocm/compose.yaml @@ -40,7 +40,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 command: --model-id ${LLM_MODEL_ID} diff --git a/AudioQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml b/AudioQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml index da7defe67d..cda5b2529d 100644 --- a/AudioQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml +++ b/AudioQnA/docker_compose/amd/gpu/rocm/compose_vllm.yaml @@ -35,8 +35,8 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 WILM_USE_TRITON_FLASH_ATTENTION: 0 diff --git a/AudioQnA/docker_compose/amd/gpu/rocm/set_env.sh b/AudioQnA/docker_compose/amd/gpu/rocm/set_env.sh index d4a0bda6d1..b3106df68b 100644 --- a/AudioQnA/docker_compose/amd/gpu/rocm/set_env.sh +++ b/AudioQnA/docker_compose/amd/gpu/rocm/set_env.sh @@ -1,5 +1,4 @@ -#!/usr/bin/env bash set_env.sh - +#!/usr/bin/env bash # Copyright (C) 2024 Advanced Micro Devices, Inc. # SPDX-License-Identifier: Apache-2.0 @@ -7,7 +6,7 @@ # export host_ip= # export host_ip=$(hostname -I | awk '{print $1}') export host_ip=${ip_address} -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} # export LLM_MODEL_ID=Intel/neural-chat-7b-v3-3 diff --git a/AudioQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh b/AudioQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh index 9cd8934f49..c6c59342a4 100644 --- a/AudioQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh +++ b/AudioQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh @@ -1,4 +1,4 @@ -#!/usr/bin/env bash set_env.sh +#!/usr/bin/env bash # Copyright (C) 2024 Advanced Micro Devices, Inc. # SPDX-License-Identifier: Apache-2.0 @@ -8,7 +8,7 @@ export host_ip=${ip_address} export external_host_ip=${ip_address} -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export HF_CACHE_DIR="./data" export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" export VLLM_SERVICE_PORT="8081" diff --git a/AudioQnA/docker_compose/intel/cpu/xeon/README.md b/AudioQnA/docker_compose/intel/cpu/xeon/README.md index 3994d34219..918e5c8a21 100644 --- a/AudioQnA/docker_compose/intel/cpu/xeon/README.md +++ b/AudioQnA/docker_compose/intel/cpu/xeon/README.md @@ -43,7 +43,7 @@ To set up environment variables for deploying AudioQnA services, set up some par ```bash export host_ip="External_Public_IP" # ip address of the node -export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +export HF_TOKEN="Your_HuggingFace_API_Token" export http_proxy="Your_HTTP_Proxy" # http proxy if any export https_proxy="Your_HTTPs_Proxy" # https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip,whisper-service,speecht5-service,vllm-service,tgi-service,audioqna-xeon-backend-server,audioqna-xeon-ui-server # additional no proxies if needed diff --git a/AudioQnA/docker_compose/intel/cpu/xeon/README_vllm.md b/AudioQnA/docker_compose/intel/cpu/xeon/README_vllm.md index 8602259532..8b94343e32 100644 --- a/AudioQnA/docker_compose/intel/cpu/xeon/README_vllm.md +++ b/AudioQnA/docker_compose/intel/cpu/xeon/README_vllm.md @@ -68,7 +68,7 @@ The output of the command should contain images: ```bash ### Replace the string 'your_huggingfacehub_token' with your HuggingFacehub repository access token. -export HUGGINGFACEHUB_API_TOKEN='your_huggingfacehub_token' +export HF_TOKEN='your_huggingfacehub_token' ``` ### Setting variables in the file set_env_vllm.sh diff --git a/AudioQnA/docker_compose/intel/cpu/xeon/compose.yaml b/AudioQnA/docker_compose/intel/cpu/xeon/compose.yaml index 1fe5e6b2a6..1f3b5ec454 100644 --- a/AudioQnA/docker_compose/intel/cpu/xeon/compose.yaml +++ b/AudioQnA/docker_compose/intel/cpu/xeon/compose.yaml @@ -36,7 +36,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} LLM_MODEL_ID: ${LLM_MODEL_ID} VLLM_TORCH_PROFILER_DIR: "/mnt" LLM_SERVER_PORT: ${LLM_SERVER_PORT} diff --git a/AudioQnA/docker_compose/intel/cpu/xeon/compose_multilang.yaml b/AudioQnA/docker_compose/intel/cpu/xeon/compose_multilang.yaml index 4499da33bf..528ba48c68 100644 --- a/AudioQnA/docker_compose/intel/cpu/xeon/compose_multilang.yaml +++ b/AudioQnA/docker_compose/intel/cpu/xeon/compose_multilang.yaml @@ -40,7 +40,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} LLM_MODEL_ID: ${LLM_MODEL_ID} VLLM_TORCH_PROFILER_DIR: "/mnt" LLM_SERVER_PORT: ${LLM_SERVER_PORT} diff --git a/AudioQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml b/AudioQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml index d421f488fd..2dda4379ee 100644 --- a/AudioQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml +++ b/AudioQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml @@ -36,7 +36,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} LLM_SERVER_PORT: ${LLM_SERVER_PORT} healthcheck: test: ["CMD-SHELL", "curl -f http://$host_ip:${LLM_SERVER_PORT}/health || exit 1"] diff --git a/AudioQnA/docker_compose/intel/cpu/xeon/set_env.sh b/AudioQnA/docker_compose/intel/cpu/xeon/set_env.sh index 4a63ef65b3..3fb001855d 100644 --- a/AudioQnA/docker_compose/intel/cpu/xeon/set_env.sh +++ b/AudioQnA/docker_compose/intel/cpu/xeon/set_env.sh @@ -5,7 +5,7 @@ # export host_ip= export host_ip=$(hostname -I | awk '{print $1}') -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} # export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" diff --git a/AudioQnA/docker_compose/intel/hpu/gaudi/README.md b/AudioQnA/docker_compose/intel/hpu/gaudi/README.md index dbec6d11bd..ae2ede434e 100644 --- a/AudioQnA/docker_compose/intel/hpu/gaudi/README.md +++ b/AudioQnA/docker_compose/intel/hpu/gaudi/README.md @@ -43,7 +43,7 @@ To set up environment variables for deploying AudioQnA services, set up some par ```bash export host_ip="External_Public_IP" # ip address of the node -export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +export HF_TOKEN="Your_HuggingFace_API_Token" export http_proxy="Your_HTTP_Proxy" # http proxy if any export https_proxy="Your_HTTPs_Proxy" # https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip,whisper-service,speecht5-service,vllm-service,tgi-service,audioqna-gaudi-backend-server,audioqna-gaudi-ui-server # additional no proxies if needed diff --git a/AudioQnA/docker_compose/intel/hpu/gaudi/compose.yaml b/AudioQnA/docker_compose/intel/hpu/gaudi/compose.yaml index c1e9db4505..dcd05ff160 100644 --- a/AudioQnA/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/AudioQnA/docker_compose/intel/hpu/gaudi/compose.yaml @@ -45,7 +45,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 HABANA_VISIBLE_DEVICES: all diff --git a/AudioQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml b/AudioQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml index f14bd8cb99..c1d02da25a 100644 --- a/AudioQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml +++ b/AudioQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml @@ -45,7 +45,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 HABANA_VISIBLE_DEVICES: all diff --git a/AudioQnA/docker_compose/intel/hpu/gaudi/set_env.sh b/AudioQnA/docker_compose/intel/hpu/gaudi/set_env.sh index 179a8c2a24..4b19d19c08 100644 --- a/AudioQnA/docker_compose/intel/hpu/gaudi/set_env.sh +++ b/AudioQnA/docker_compose/intel/hpu/gaudi/set_env.sh @@ -5,7 +5,7 @@ # export host_ip= export host_ip=$(hostname -I | awk '{print $1}') -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} # export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" diff --git a/AudioQnA/tests/README.md b/AudioQnA/tests/README.md index 390c182447..c55fa7b7b5 100644 --- a/AudioQnA/tests/README.md +++ b/AudioQnA/tests/README.md @@ -3,7 +3,7 @@ ## Set the required environment variable ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` ## Run test diff --git a/README-deploy-benchmark.md b/README-deploy-benchmark.md index 1b0f0ee530..cbdef4fb62 100644 --- a/README-deploy-benchmark.md +++ b/README-deploy-benchmark.md @@ -30,7 +30,7 @@ Before running the benchmarks, ensure you have: - (Optional) [Kubernetes set up guide on Intel Gaudi product](https://github.com/opea-project/GenAIInfra/blob/main/README.md#setup-kubernetes-cluster) 2. **Configuration YAML** - The configuration file (e.g., `./ChatQnA/benchmark_chatqna.yaml`) consists of two main sections: deployment and benchmarking. Required fields with `# mandatory` comment must be filled with valid values, such as `HUGGINGFACEHUB_API_TOKEN`. For all other fields, you can either customize them according to our needs or leave them empty ("") to use the default values from the [helm charts](https://github.com/opea-project/GenAIInfra/tree/main/helm-charts). + The configuration file (e.g., `./ChatQnA/benchmark_chatqna.yaml`) consists of two main sections: deployment and benchmarking. Required fields with `# mandatory` comment must be filled with valid values, such as `HF_TOKEN`. For all other fields, you can either customize them according to our needs or leave them empty ("") to use the default values from the [helm charts](https://github.com/opea-project/GenAIInfra/tree/main/helm-charts). **Default Models**: diff --git a/deploy.py b/deploy.py index 184b82e187..682af01c17 100644 --- a/deploy.py +++ b/deploy.py @@ -220,11 +220,11 @@ def generate_helm_values(example_type, deploy_config, chart_dir, action_type, no # Initialize base values values = { "global": { - "HUGGINGFACEHUB_API_TOKEN": deploy_config.get("HUGGINGFACEHUB_API_TOKEN", ""), + "HF_TOKEN": deploy_config.get("HF_TOKEN", ""), "modelUseHostPath": deploy_config.get("modelUseHostPath", ""), } } - os.environ["HF_TOKEN"] = deploy_config.get("HUGGINGFACEHUB_API_TOKEN", "") + os.environ["HF_TOKEN"] = deploy_config.get("HF_TOKEN", "") # Configure components values = configure_node_selectors(values, node_selector or {}, deploy_config) diff --git a/deploy_and_benchmark.py b/deploy_and_benchmark.py index 495a554525..e14be3c800 100644 --- a/deploy_and_benchmark.py +++ b/deploy_and_benchmark.py @@ -187,7 +187,7 @@ def main(yaml_file, target_node=None, test_mode="oob", clean_up=True): return # Set HF_TOKEN - HF_TOKEN = deploy_config.get("HUGGINGFACEHUB_API_TOKEN", "") + HF_TOKEN = deploy_config.get("HF_TOKEN", "") os.environ["HF_TOKEN"] = HF_TOKEN os.environ["HUGGINGFACEHUB_API_TOKEN"] = HF_TOKEN From 32b18cf4e02bb37e0e23251888d951f34aff1b51 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Thu, 5 Jun 2025 15:18:14 +0800 Subject: [PATCH 087/217] update secrets token name for AvatarChatbot and DBQnA. (#2030) Signed-off-by: ZePan110 --- AvatarChatbot/docker_compose/amd/gpu/rocm/README.md | 4 ++-- AvatarChatbot/docker_compose/amd/gpu/rocm/compose.yaml | 4 ++-- AvatarChatbot/docker_compose/amd/gpu/rocm/set_env.sh | 2 +- AvatarChatbot/docker_compose/intel/cpu/xeon/README.md | 4 ++-- AvatarChatbot/docker_compose/intel/cpu/xeon/compose.yaml | 2 +- AvatarChatbot/docker_compose/intel/cpu/xeon/set_env.sh | 2 +- AvatarChatbot/docker_compose/intel/hpu/gaudi/README.md | 4 ++-- AvatarChatbot/docker_compose/intel/hpu/gaudi/compose.yaml | 2 +- AvatarChatbot/docker_compose/intel/hpu/gaudi/set_env.sh | 2 +- AvatarChatbot/tests/README.md | 2 +- DBQnA/docker_compose/amd/gpu/rocm/README.md | 2 +- DBQnA/docker_compose/amd/gpu/rocm/set_env.sh | 2 +- DBQnA/docker_compose/intel/cpu/xeon/README.md | 8 ++++---- DBQnA/docker_compose/intel/cpu/xeon/compose.yaml | 4 ++-- DBQnA/docker_compose/intel/cpu/xeon/set_env.sh | 4 ++-- DBQnA/tests/README.md | 2 +- 16 files changed, 25 insertions(+), 25 deletions(-) diff --git a/AvatarChatbot/docker_compose/amd/gpu/rocm/README.md b/AvatarChatbot/docker_compose/amd/gpu/rocm/README.md index a94924ab16..036810d53a 100644 --- a/AvatarChatbot/docker_compose/amd/gpu/rocm/README.md +++ b/AvatarChatbot/docker_compose/amd/gpu/rocm/README.md @@ -68,7 +68,7 @@ Then run the command `docker images`, you will have following images ready: Before starting the services with `docker compose`, you have to recheck the following environment variables. ```bash -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export host_ip=$(hostname -I | awk '{print $1}') export TGI_SERVICE_PORT=3006 @@ -203,7 +203,7 @@ In the current version v1.3, you need to set the avatar figure image/video and t cd GenAIExamples/AvatarChatbot/tests export IMAGE_REPO="opea" export IMAGE_TAG="latest" -export HUGGINGFACEHUB_API_TOKEN= +export HF_TOKEN= test_avatarchatbot_on_xeon.sh ``` diff --git a/AvatarChatbot/docker_compose/amd/gpu/rocm/compose.yaml b/AvatarChatbot/docker_compose/amd/gpu/rocm/compose.yaml index 884e1fcf79..651ff1a146 100644 --- a/AvatarChatbot/docker_compose/amd/gpu/rocm/compose.yaml +++ b/AvatarChatbot/docker_compose/amd/gpu/rocm/compose.yaml @@ -52,8 +52,8 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGING_FACE_HUB_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} shm_size: 1g devices: - /dev/kfd:/dev/kfd diff --git a/AvatarChatbot/docker_compose/amd/gpu/rocm/set_env.sh b/AvatarChatbot/docker_compose/amd/gpu/rocm/set_env.sh index 38d54c38f7..02e35b241f 100644 --- a/AvatarChatbot/docker_compose/amd/gpu/rocm/set_env.sh +++ b/AvatarChatbot/docker_compose/amd/gpu/rocm/set_env.sh @@ -3,7 +3,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export OPENAI_API_KEY=${OPENAI_API_KEY} export host_ip=$(hostname -I | awk '{print $1}') diff --git a/AvatarChatbot/docker_compose/intel/cpu/xeon/README.md b/AvatarChatbot/docker_compose/intel/cpu/xeon/README.md index b803392f80..e4c91f67cd 100644 --- a/AvatarChatbot/docker_compose/intel/cpu/xeon/README.md +++ b/AvatarChatbot/docker_compose/intel/cpu/xeon/README.md @@ -58,7 +58,7 @@ Then run the command `docker images`, you will have following images ready: Before starting the services with `docker compose`, you have to recheck the following environment variables. ```bash -export HUGGINGFACEHUB_API_TOKEN= +export HF_TOKEN= export host_ip=$(hostname -I | awk '{print $1}') export LLM_MODEL_ID=Intel/neural-chat-7b-v3-3 @@ -173,7 +173,7 @@ In the current version v1.3, you need to set the avatar figure image/video and t cd GenAIExamples/AvatarChatbot/tests export IMAGE_REPO="opea" export IMAGE_TAG="latest" -export HUGGINGFACEHUB_API_TOKEN= +export HF_TOKEN= test_avatarchatbot_on_xeon.sh ``` diff --git a/AvatarChatbot/docker_compose/intel/cpu/xeon/compose.yaml b/AvatarChatbot/docker_compose/intel/cpu/xeon/compose.yaml index f33449d020..6d9491f6c7 100644 --- a/AvatarChatbot/docker_compose/intel/cpu/xeon/compose.yaml +++ b/AvatarChatbot/docker_compose/intel/cpu/xeon/compose.yaml @@ -37,7 +37,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD-SHELL", "curl -f http://${host_ip}:3006/health || exit 1"] interval: 10s diff --git a/AvatarChatbot/docker_compose/intel/cpu/xeon/set_env.sh b/AvatarChatbot/docker_compose/intel/cpu/xeon/set_env.sh index 49c7e4cdd7..c65d6b9446 100644 --- a/AvatarChatbot/docker_compose/intel/cpu/xeon/set_env.sh +++ b/AvatarChatbot/docker_compose/intel/cpu/xeon/set_env.sh @@ -6,7 +6,7 @@ pushd "../../../../../" > /dev/null source .set_env.sh popd > /dev/null -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export host_ip=$(hostname -I | awk '{print $1}') export LLM_MODEL_ID=Intel/neural-chat-7b-v3-3 export WAV2LIP_ENDPOINT=http://$host_ip:7860 diff --git a/AvatarChatbot/docker_compose/intel/hpu/gaudi/README.md b/AvatarChatbot/docker_compose/intel/hpu/gaudi/README.md index 105987ec18..2dfc814437 100644 --- a/AvatarChatbot/docker_compose/intel/hpu/gaudi/README.md +++ b/AvatarChatbot/docker_compose/intel/hpu/gaudi/README.md @@ -58,7 +58,7 @@ Then run the command `docker images`, you will have following images ready: Before starting the services with `docker compose`, you have to recheck the following environment variables. ```bash -export HUGGINGFACEHUB_API_TOKEN= +export HF_TOKEN= export host_ip=$(hostname -I | awk '{print $1}') export LLM_MODEL_ID=Intel/neural-chat-7b-v3-3 @@ -183,7 +183,7 @@ In the current version v1.3, you need to set the avatar figure image/video and t cd GenAIExamples/AvatarChatbot/tests export IMAGE_REPO="opea" export IMAGE_TAG="latest" -export HUGGINGFACEHUB_API_TOKEN= +export HF_TOKEN= test_avatarchatbot_on_gaudi.sh ``` diff --git a/AvatarChatbot/docker_compose/intel/hpu/gaudi/compose.yaml b/AvatarChatbot/docker_compose/intel/hpu/gaudi/compose.yaml index aba9bb910c..036736a574 100644 --- a/AvatarChatbot/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/AvatarChatbot/docker_compose/intel/hpu/gaudi/compose.yaml @@ -48,7 +48,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGING_FACE_HUB_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 HABANA_VISIBLE_DEVICES: all diff --git a/AvatarChatbot/docker_compose/intel/hpu/gaudi/set_env.sh b/AvatarChatbot/docker_compose/intel/hpu/gaudi/set_env.sh index a55f4b4f58..a14f168d39 100644 --- a/AvatarChatbot/docker_compose/intel/hpu/gaudi/set_env.sh +++ b/AvatarChatbot/docker_compose/intel/hpu/gaudi/set_env.sh @@ -6,7 +6,7 @@ pushd "../../../../../" > /dev/null source .set_env.sh popd > /dev/null -export HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN +export HF_TOKEN=$HF_TOKEN export host_ip=$(hostname -I | awk '{print $1}') export LLM_MODEL_ID=Intel/neural-chat-7b-v3-3 diff --git a/AvatarChatbot/tests/README.md b/AvatarChatbot/tests/README.md index 411afc28b7..bc50211ec0 100644 --- a/AvatarChatbot/tests/README.md +++ b/AvatarChatbot/tests/README.md @@ -3,7 +3,7 @@ ## Set the required environment variable ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` ## Run test diff --git a/DBQnA/docker_compose/amd/gpu/rocm/README.md b/DBQnA/docker_compose/amd/gpu/rocm/README.md index 014d5722c4..3e212f31e5 100644 --- a/DBQnA/docker_compose/amd/gpu/rocm/README.md +++ b/DBQnA/docker_compose/amd/gpu/rocm/README.md @@ -36,7 +36,7 @@ Then run the command `docker images`, you will have the following Docker Images: We set default model as "mistralai/Mistral-7B-Instruct-v0.3", change "LLM_MODEL_ID" in following Environment Variables setting if you want to use other models. -If use gated models, you also need to provide [huggingface token](https://huggingface.co/docs/hub/security-tokens) to "HUGGINGFACEHUB_API_TOKEN" environment variable. +If use gated models, you also need to provide [huggingface token](https://huggingface.co/docs/hub/security-tokens) to "HF_TOKEN" environment variable. ### 2.1 Setup Environment Variables diff --git a/DBQnA/docker_compose/amd/gpu/rocm/set_env.sh b/DBQnA/docker_compose/amd/gpu/rocm/set_env.sh index f744dbcc0f..fb52787fb0 100644 --- a/DBQnA/docker_compose/amd/gpu/rocm/set_env.sh +++ b/DBQnA/docker_compose/amd/gpu/rocm/set_env.sh @@ -9,7 +9,7 @@ source .set_env.sh popd > /dev/null export host_ip=${ip_address} -export DBQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export DBQNA_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} export DBQNA_TGI_SERVICE_PORT=8008 export DBQNA_TGI_LLM_ENDPOINT="http://${host_ip}:${DBQNA_TGI_SERVICE_PORT}" export DBQNA_LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" diff --git a/DBQnA/docker_compose/intel/cpu/xeon/README.md b/DBQnA/docker_compose/intel/cpu/xeon/README.md index c227e1fe46..a979a43da6 100644 --- a/DBQnA/docker_compose/intel/cpu/xeon/README.md +++ b/DBQnA/docker_compose/intel/cpu/xeon/README.md @@ -36,10 +36,10 @@ Then run the command `docker images`, you will have the following Docker Images: We set default model as "mistralai/Mistral-7B-Instruct-v0.3", change "LLM_MODEL_ID" in following Environment Variables setting if you want to use other models. -If use gated models, you also need to provide [huggingface token](https://huggingface.co/docs/hub/security-tokens) to "HUGGINGFACEHUB_API_TOKEN" environment variable. +If use gated models, you also need to provide [huggingface token](https://huggingface.co/docs/hub/security-tokens) to "HF_TOKEN" environment variable. ```bash -export HUGGINGFACEHUB_API_TOKEN="xxx" +export HF_TOKEN="xxx" ``` ### 2.1 Setup Environment Variables @@ -61,7 +61,7 @@ export https_proxy=${https_proxy} export TGI_PORT=8008 export TGI_LLM_ENDPOINT=http://${host_ip}:${TGI_PORT} -export HF_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export LLM_MODEL_ID="mistralai/Mistral-7B-Instruct-v0.3" export POSTGRES_USER=postgres export POSTGRES_PASSWORD=testpwd @@ -109,7 +109,7 @@ docker run --name test-text2sql-postgres --ipc=host -e POSTGRES_USER=${POSTGRES_ ```bash -docker run -d --name="test-text2sql-tgi-endpoint" --ipc=host -p $TGI_PORT:80 -v ./data:/data --shm-size 1g -e HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -e HF_TOKEN=${HF_TOKEN} -e model=${model} ghcr.io/huggingface/text-generation-inference:2.4.1 --model-id $model +docker run -d --name="test-text2sql-tgi-endpoint" --ipc=host -p $TGI_PORT:80 -v ./data:/data --shm-size 1g -e HF_TOKEN=${HF_TOKEN} -e HF_TOKEN=${HF_TOKEN} -e model=${model} ghcr.io/huggingface/text-generation-inference:2.4.1 --model-id $model ``` - Start Text-to-SQL Service diff --git a/DBQnA/docker_compose/intel/cpu/xeon/compose.yaml b/DBQnA/docker_compose/intel/cpu/xeon/compose.yaml index b96a71d01d..1e66ef992f 100644 --- a/DBQnA/docker_compose/intel/cpu/xeon/compose.yaml +++ b/DBQnA/docker_compose/intel/cpu/xeon/compose.yaml @@ -13,8 +13,8 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} shm_size: 1g command: --model-id ${LLM_MODEL_ID} diff --git a/DBQnA/docker_compose/intel/cpu/xeon/set_env.sh b/DBQnA/docker_compose/intel/cpu/xeon/set_env.sh index f05e9c871c..3990c7b114 100755 --- a/DBQnA/docker_compose/intel/cpu/xeon/set_env.sh +++ b/DBQnA/docker_compose/intel/cpu/xeon/set_env.sh @@ -9,8 +9,8 @@ popd > /dev/null export host_ip=${ip_address} export no_proxy=$no_proxy,$host_ip,dbqna-xeon-react-ui-server,text2sql-service,tgi-service,postgres-container -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -export HF_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} +export HF_TOKEN=${HF_TOKEN} export POSTGRES_USER=postgres export POSTGRES_PASSWORD=testpwd export POSTGRES_DB=chinook diff --git a/DBQnA/tests/README.md b/DBQnA/tests/README.md index 5d6dc16a10..951d7da1c7 100644 --- a/DBQnA/tests/README.md +++ b/DBQnA/tests/README.md @@ -3,7 +3,7 @@ ## Set the required environment variable ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` ## Run test From 35e0ae447d714c6db741f919aa5e9d57b7479e8c Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Thu, 5 Jun 2025 15:19:15 +0800 Subject: [PATCH 088/217] update secrets token name for ChatQnA. (#2029) Signed-off-by: ZePan110 --- ChatQnA/benchmark_chatqna.yaml | 2 +- ChatQnA/docker_compose/amd/gpu/rocm/README.md | 2 +- ChatQnA/docker_compose/amd/gpu/rocm/set_env.sh | 2 +- ChatQnA/docker_compose/amd/gpu/rocm/set_env_faqgen.sh | 2 +- .../docker_compose/amd/gpu/rocm/set_env_faqgen_vllm.sh | 2 +- ChatQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh | 2 +- ChatQnA/docker_compose/intel/cpu/aipc/README.md | 10 +++++----- ChatQnA/docker_compose/intel/cpu/aipc/compose.yaml | 6 +++--- ChatQnA/docker_compose/intel/cpu/aipc/set_env.sh | 6 +++--- ChatQnA/docker_compose/intel/cpu/xeon/README.md | 2 +- ChatQnA/docker_compose/intel/cpu/xeon/README_faqgen.md | 4 ++-- .../docker_compose/intel/cpu/xeon/README_mariadb.md | 2 +- .../docker_compose/intel/cpu/xeon/README_pinecone.md | 4 ++-- ChatQnA/docker_compose/intel/cpu/xeon/compose.yaml | 8 ++++---- .../docker_compose/intel/cpu/xeon/compose_faqgen.yaml | 6 +++--- .../intel/cpu/xeon/compose_faqgen_tgi.yaml | 6 +++--- .../docker_compose/intel/cpu/xeon/compose_mariadb.yaml | 8 ++++---- .../docker_compose/intel/cpu/xeon/compose_milvus.yaml | 8 ++++---- .../intel/cpu/xeon/compose_pinecone.yaml | 8 ++++---- .../docker_compose/intel/cpu/xeon/compose_qdrant.yaml | 6 +++--- .../docker_compose/intel/cpu/xeon/compose_remote.yaml | 6 +++--- ChatQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml | 8 ++++---- .../intel/cpu/xeon/compose_without_rerank.yaml | 6 +++--- ChatQnA/docker_compose/intel/cpu/xeon/set_env.sh | 1 - .../docker_compose/intel/cpu/xeon/set_env_mariadb.sh | 6 +++--- ChatQnA/docker_compose/intel/hpu/gaudi/compose.yaml | 6 +++--- .../docker_compose/intel/hpu/gaudi/compose_faqgen.yaml | 4 ++-- .../intel/hpu/gaudi/compose_faqgen_tgi.yaml | 6 +++--- .../intel/hpu/gaudi/compose_guardrails.yaml | 10 +++++----- .../docker_compose/intel/hpu/gaudi/compose_tgi.yaml | 6 +++--- .../intel/hpu/gaudi/compose_without_rerank.yaml | 6 +++--- .../intel/hpu/gaudi/how_to_validate_service.md | 2 +- ChatQnA/docker_compose/intel/hpu/gaudi/set_env.sh | 4 ++-- .../docker_compose/intel/hpu/gaudi/set_env_faqgen.sh | 1 - ChatQnA/docker_compose/nvidia/gpu/README.md | 4 ++-- ChatQnA/docker_compose/nvidia/gpu/compose.yaml | 6 +++--- ChatQnA/tests/README.md | 2 +- 37 files changed, 89 insertions(+), 91 deletions(-) diff --git a/ChatQnA/benchmark_chatqna.yaml b/ChatQnA/benchmark_chatqna.yaml index e528bb9d7a..815523191f 100644 --- a/ChatQnA/benchmark_chatqna.yaml +++ b/ChatQnA/benchmark_chatqna.yaml @@ -5,7 +5,7 @@ deploy: device: gaudi version: 1.3.0 modelUseHostPath: /mnt/models - HUGGINGFACEHUB_API_TOKEN: "" # mandatory + HF_TOKEN: "" # mandatory node: [1, 2, 4, 8] namespace: "" timeout: 1000 # timeout in seconds for services to be ready, default 30 minutes diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/README.md b/ChatQnA/docker_compose/amd/gpu/rocm/README.md index 4d968b84eb..0ce4e88958 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/README.md +++ b/ChatQnA/docker_compose/amd/gpu/rocm/README.md @@ -64,7 +64,7 @@ Set the values of the variables: Setting variables in the operating system environment: ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +export HF_TOKEN="Your_HuggingFace_API_Token" source ./set_env_*.sh # replace the script name with the appropriate one ``` diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/set_env.sh b/ChatQnA/docker_compose/amd/gpu/rocm/set_env.sh index 5fcdad0a06..3d729dd0da 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/set_env.sh +++ b/ChatQnA/docker_compose/amd/gpu/rocm/set_env.sh @@ -6,7 +6,7 @@ export HOST_IP=${ip_address} export HOST_IP_EXTERNAL=${ip_address} export CHATQNA_EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" -export CHATQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export CHATQNA_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} export CHATQNA_LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" export CHATQNA_RERANK_MODEL_ID="BAAI/bge-reranker-base" diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/set_env_faqgen.sh b/ChatQnA/docker_compose/amd/gpu/rocm/set_env_faqgen.sh index 543119eadc..706eb64ac0 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/set_env_faqgen.sh +++ b/ChatQnA/docker_compose/amd/gpu/rocm/set_env_faqgen.sh @@ -6,7 +6,7 @@ export HOST_IP=${ip_address} export HOST_IP_EXTERNAL=${ip_address} export CHATQNA_EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" -export CHATQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export CHATQNA_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} export CHATQNA_LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" export CHATQNA_RERANK_MODEL_ID="BAAI/bge-reranker-base" diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/set_env_faqgen_vllm.sh b/ChatQnA/docker_compose/amd/gpu/rocm/set_env_faqgen_vllm.sh index d2462d2646..35379df191 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/set_env_faqgen_vllm.sh +++ b/ChatQnA/docker_compose/amd/gpu/rocm/set_env_faqgen_vllm.sh @@ -6,7 +6,7 @@ export HOST_IP=${ip_address} export HOST_IP_EXTERNAL=${ip_address} export CHATQNA_EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" -export CHATQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export CHATQNA_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} export CHATQNA_LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" export CHATQNA_RERANK_MODEL_ID="BAAI/bge-reranker-base" diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh b/ChatQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh index 0000b233e1..ae31cee3cf 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh +++ b/ChatQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh @@ -6,7 +6,7 @@ export HOST_IP=${ip_address} export HOST_IP_EXTERNAL=${ip_address} export CHATQNA_EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" -export CHATQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export CHATQNA_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} export CHATQNA_LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" export CHATQNA_RERANK_MODEL_ID="BAAI/bge-reranker-base" diff --git a/ChatQnA/docker_compose/intel/cpu/aipc/README.md b/ChatQnA/docker_compose/intel/cpu/aipc/README.md index 77d7ddfcd0..1bf2b9c674 100644 --- a/ChatQnA/docker_compose/intel/cpu/aipc/README.md +++ b/ChatQnA/docker_compose/intel/cpu/aipc/README.md @@ -22,7 +22,7 @@ cd GenAIExamples/ChatQnA/docker_compose/intel/cpu/aipc 1. Set the required environment variables: ```bash - export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" + export HF_TOKEN="Your_Huggingface_API_Token" ``` 2. If you are in a proxy environment, also set the proxy-related environment variables: @@ -160,12 +160,12 @@ export host_ip="External_Public_IP" For Linux users, please run `hostname -I | awk '{print $1}'`. For Windows users, please run `ipconfig | findstr /i "IPv4"` to get the external public ip. -**Export the value of your Huggingface API token to the `HUGGINGFACEHUB_API_TOKEN` environment variable** +**Export the value of your Huggingface API token to the `HF_TOKEN` environment variable** > Change the Your_Huggingface_API_Token below with tyour actual Huggingface API Token value ``` -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` **Append the value of the public IP address to the no_proxy list if you are in a proxy environment** @@ -183,7 +183,7 @@ export https_proxy=${your_http_proxy} export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" export RERANK_MODEL_ID="BAAI/bge-reranker-base" export INDEX_NAME="rag-redis" -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export OLLAMA_HOST=${host_ip} export OLLAMA_MODEL="llama3.2" ``` @@ -194,7 +194,7 @@ export OLLAMA_MODEL="llama3.2" set EMBEDDING_MODEL_ID=BAAI/bge-base-en-v1.5 set RERANK_MODEL_ID=BAAI/bge-reranker-base set INDEX_NAME=rag-redis -set HUGGINGFACEHUB_API_TOKEN=%HUGGINGFACEHUB_API_TOKEN% +set HF_TOKEN=%HF_TOKEN% set OLLAMA_HOST=host.docker.internal set OLLAMA_MODEL="llama3.2" ``` diff --git a/ChatQnA/docker_compose/intel/cpu/aipc/compose.yaml b/ChatQnA/docker_compose/intel/cpu/aipc/compose.yaml index 9035642c5d..2afd0d9181 100644 --- a/ChatQnA/docker_compose/intel/cpu/aipc/compose.yaml +++ b/ChatQnA/docker_compose/intel/cpu/aipc/compose.yaml @@ -24,7 +24,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] interval: 10s @@ -60,7 +60,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped @@ -76,7 +76,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 command: --model-id ${RERANK_MODEL_ID} --auto-truncate diff --git a/ChatQnA/docker_compose/intel/cpu/aipc/set_env.sh b/ChatQnA/docker_compose/intel/cpu/aipc/set_env.sh index 3ee4cd6d6c..f2cfdf6fe7 100644 --- a/ChatQnA/docker_compose/intel/cpu/aipc/set_env.sh +++ b/ChatQnA/docker_compose/intel/cpu/aipc/set_env.sh @@ -9,15 +9,15 @@ popd > /dev/null export host_ip=$(hostname -I | awk '{print $1}') -if [ -z "${HUGGINGFACEHUB_API_TOKEN}" ]; then - echo "Error: HUGGINGFACEHUB_API_TOKEN is not set. Please set HUGGINGFACEHUB_API_TOKEN." +if [ -z "${HF_TOKEN}" ]; then + echo "Error: HF_TOKEN is not set. Please set HF_TOKEN." fi if [ -z "${host_ip}" ]; then echo "Error: host_ip is not set. Please set host_ip first." fi -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" export RERANK_MODEL_ID="BAAI/bge-reranker-base" export INDEX_NAME="rag-redis" diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/README.md b/ChatQnA/docker_compose/intel/cpu/xeon/README.md index 166dc50c40..f8ac050355 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/README.md +++ b/ChatQnA/docker_compose/intel/cpu/xeon/README.md @@ -45,7 +45,7 @@ To set up environment variables for deploying ChatQnA services, set up some para ``` export host_ip="External_Public_IP" #ip address of the node -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" export http_proxy="Your_HTTP_Proxy" #http proxy if any export https_proxy="Your_HTTPs_Proxy" #https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip #additional no proxies if needed diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/README_faqgen.md b/ChatQnA/docker_compose/intel/cpu/xeon/README_faqgen.md index c783161288..4210415db5 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/README_faqgen.md +++ b/ChatQnA/docker_compose/intel/cpu/xeon/README_faqgen.md @@ -129,7 +129,7 @@ Then run the command `docker images`, you will have the following Docker Images: We set default model as "meta-llama/Meta-Llama-3-8B-Instruct", change "LLM_MODEL_ID" in following Environment Variables setting if you want to use other models. -If use gated models, you also need to provide [huggingface token](https://huggingface.co/docs/hub/security-tokens) to "HUGGINGFACEHUB_API_TOKEN" environment variable. +If use gated models, you also need to provide [huggingface token](https://huggingface.co/docs/hub/security-tokens) to "HF_TOKEN" environment variable. ### Setup Environment Variables @@ -145,7 +145,7 @@ export LLM_SERVICE_PORT=9000 export FAQGEN_BACKEND_PORT=8888 export FAQGen_COMPONENT_NAME="OpeaFaqGenvLLM" export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" -export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token} +export HF_TOKEN=${your_hf_api_token} export MEGA_SERVICE_HOST_IP=${host_ip} export LLM_SERVICE_HOST_IP=${host_ip} export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}" diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/README_mariadb.md b/ChatQnA/docker_compose/intel/cpu/xeon/README_mariadb.md index 4717e61109..31681566a7 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/README_mariadb.md +++ b/ChatQnA/docker_compose/intel/cpu/xeon/README_mariadb.md @@ -114,7 +114,7 @@ export host_ip="External_Public_IP" > Change to your actual Huggingface API Token value ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` **Append the value of the public IP address to the no_proxy list if you are in a proxy environment** diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/README_pinecone.md b/ChatQnA/docker_compose/intel/cpu/xeon/README_pinecone.md index b26435c335..6c90228430 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/README_pinecone.md +++ b/ChatQnA/docker_compose/intel/cpu/xeon/README_pinecone.md @@ -25,7 +25,7 @@ To set up environment variables for deploying ChatQnA services, follow these ste ```bash # Example: host_ip="192.168.1.1" export host_ip="External_Public_IP" - export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" + export HF_TOKEN="Your_Huggingface_API_Token" export PINECONE_API_KEY="Pinecone_API_Key" export PINECONE_INDEX_NAME="Pinecone_Index_Name" export INDEX_NAME="Pinecone_Index_Name" @@ -201,7 +201,7 @@ For users in China who are unable to download models directly from Huggingface, ```bash # Example: host_ip="192.168.1.1" export host_ip="External_Public_IP" - export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" + export HF_TOKEN="Your_Huggingface_API_Token" # Example: NGINX_PORT=80 export NGINX_PORT=${your_nginx_port} export PINECONE_API_KEY="Pinecone_API_Key" diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose.yaml index 1e5fef6d40..2f6eb00642 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose.yaml @@ -31,7 +31,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] interval: 10s @@ -67,7 +67,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped @@ -83,7 +83,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 command: --model-id ${RERANK_MODEL_ID} --auto-truncate @@ -99,7 +99,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} LLM_MODEL_ID: ${LLM_MODEL_ID} VLLM_TORCH_PROFILER_DIR: "/mnt" VLLM_CPU_KVCACHE_SPACE: 40 diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_faqgen.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_faqgen.yaml index eb31dfb1fa..2c772044a7 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_faqgen.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_faqgen.yaml @@ -24,7 +24,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] interval: 10s @@ -60,7 +60,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped @@ -76,7 +76,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 command: --model-id ${RERANK_MODEL_ID} --auto-truncate diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_faqgen_tgi.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_faqgen_tgi.yaml index a66be60327..9750a4fa98 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_faqgen_tgi.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_faqgen_tgi.yaml @@ -24,7 +24,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] interval: 10s @@ -60,7 +60,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped @@ -76,7 +76,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 command: --model-id ${RERANK_MODEL_ID} --auto-truncate diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_mariadb.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_mariadb.yaml index 9e109e6144..9731f011f3 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_mariadb.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_mariadb.yaml @@ -35,7 +35,7 @@ services: DATAPREP_COMPONENT_NAME: "OPEA_DATAPREP_MARIADBVECTOR" MARIADB_CONNECTION_URL: mariadb+mariadbconnector://${MARIADB_USER}:${MARIADB_PASSWORD}@mariadb-server:3306/${MARIADB_DATABASE} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] interval: 10s @@ -69,7 +69,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} MARIADB_CONNECTION_URL: mariadb+mariadbconnector://${MARIADB_USER}:${MARIADB_PASSWORD}@mariadb-server:3306/${MARIADB_DATABASE} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_MARIADBVECTOR" restart: unless-stopped @@ -85,7 +85,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 command: --model-id ${RERANK_MODEL_ID} --auto-truncate @@ -101,7 +101,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} LLM_MODEL_ID: ${LLM_MODEL_ID} VLLM_TORCH_PROFILER_DIR: "/mnt" VLLM_CPU_KVCACHE_SPACE: 40 diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml index eb81c3ec2e..2dbf25ca05 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml @@ -75,7 +75,7 @@ services: MILVUS_HOST: ${host_ip} MILVUS_PORT: 19530 TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} EMBEDDING_MODEL_ID: ${EMBEDDING_MODEL_ID} LOGFLAG: ${LOGFLAG} healthcheck: @@ -107,7 +107,7 @@ services: MILVUS_HOST: ${host_ip} MILVUS_PORT: 19530 TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_MILVUS" restart: unless-stopped @@ -138,7 +138,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 command: --model-id ${RERANK_MODEL_ID} --auto-truncate @@ -155,7 +155,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} LLM_MODEL_ID: ${LLM_MODEL_ID} VLLM_TORCH_PROFILER_DIR: "/mnt" healthcheck: diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_pinecone.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_pinecone.yaml index 8a2af3c117..917cfd26a8 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_pinecone.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_pinecone.yaml @@ -20,7 +20,7 @@ services: PINECONE_INDEX_NAME: ${PINECONE_INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} DATAPREP_COMPONENT_NAME: "OPEA_DATAPREP_PINECONE" healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -55,7 +55,7 @@ services: PINECONE_INDEX_NAME: ${PINECONE_INDEX_NAME} LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_PINECONE" restart: unless-stopped @@ -71,7 +71,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 command: --model-id ${RERANK_MODEL_ID} --auto-truncate @@ -87,7 +87,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} LLM_MODEL_ID: ${LLM_MODEL_ID} VLLM_TORCH_PROFILER_DIR: "/mnt" command: --model $LLM_MODEL_ID --host 0.0.0.0 --port 80 diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_qdrant.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_qdrant.yaml index 38cad037a4..b5e475185f 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_qdrant.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_qdrant.yaml @@ -24,7 +24,7 @@ services: QDRANT_PORT: 6333 QDRANT_INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} DATAPREP_COMPONENT_NAME: "OPEA_DATAPREP_QDRANT" healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -76,7 +76,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 command: --model-id ${RERANK_MODEL_ID} --auto-truncate @@ -92,7 +92,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} LLM_MODEL_ID: ${LLM_MODEL_ID} VLLM_TORCH_PROFILER_DIR: "/mnt" command: --model $LLM_MODEL_ID --host 0.0.0.0 --port 80 diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_remote.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_remote.yaml index 2acc51bbe4..a69a420aaa 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_remote.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_remote.yaml @@ -31,7 +31,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} tei-embedding-service: image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-embedding-server @@ -61,7 +61,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped @@ -77,7 +77,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 command: --model-id ${RERANK_MODEL_ID} --auto-truncate diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml index 434ae34eac..b57be60cf1 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml @@ -24,7 +24,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] interval: 10s @@ -60,7 +60,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped @@ -76,7 +76,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 command: --model-id ${RERANK_MODEL_ID} --auto-truncate @@ -92,7 +92,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 command: --model-id ${LLM_MODEL_ID} --cuda-graphs 0 diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml index b813852c74..1dd80c7106 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml @@ -24,7 +24,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] interval: 10s @@ -60,7 +60,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped @@ -76,7 +76,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} LLM_MODEL_ID: ${LLM_MODEL_ID} VLLM_TORCH_PROFILER_DIR: "/mnt" command: --model $LLM_MODEL_ID --host 0.0.0.0 --port 80 diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/set_env.sh b/ChatQnA/docker_compose/intel/cpu/xeon/set_env.sh index 2959f94321..e32a53ac1f 100755 --- a/ChatQnA/docker_compose/intel/cpu/xeon/set_env.sh +++ b/ChatQnA/docker_compose/intel/cpu/xeon/set_env.sh @@ -7,7 +7,6 @@ pushd "../../../../../" > /dev/null source .set_env.sh popd > /dev/null -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export HF_TOKEN=${HF_TOKEN} export host_ip=${ip_address} export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/set_env_mariadb.sh b/ChatQnA/docker_compose/intel/cpu/xeon/set_env_mariadb.sh index 88ae5c0eec..2ef732aeb0 100755 --- a/ChatQnA/docker_compose/intel/cpu/xeon/set_env_mariadb.sh +++ b/ChatQnA/docker_compose/intel/cpu/xeon/set_env_mariadb.sh @@ -7,15 +7,15 @@ pushd "../../../../../" > /dev/null source .set_env.sh popd > /dev/null -if [ -z "${HUGGINGFACEHUB_API_TOKEN}" ]; then - echo "Error: HUGGINGFACEHUB_API_TOKEN is not set. Please set HUGGINGFACEHUB_API_TOKEN." +if [ -z "${HF_TOKEN}" ]; then + echo "Error: HF_TOKEN is not set. Please set HF_TOKEN." fi export host_ip=$(hostname -I | awk '{print $1}') export MARIADB_DATABASE="vectordb" export MARIADB_USER="chatqna" export MARIADB_PASSWORD="password" -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" export RERANK_MODEL_ID="BAAI/bge-reranker-base" export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/compose.yaml b/ChatQnA/docker_compose/intel/hpu/gaudi/compose.yaml index 49d7ff99a5..ed9d3ffc1d 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/compose.yaml @@ -31,7 +31,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] interval: 10s @@ -67,7 +67,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} restart: unless-stopped tei-reranking-service: image: ghcr.io/huggingface/tei-gaudi:1.5.0 @@ -101,7 +101,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} HABANA_VISIBLE_DEVICES: all OMPI_MCA_btl_vader_single_copy_mechanism: none LLM_MODEL_ID: ${LLM_MODEL_ID} diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_faqgen.yaml b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_faqgen.yaml index 951956be8f..09a94df962 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_faqgen.yaml +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_faqgen.yaml @@ -24,7 +24,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -61,7 +61,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} restart: unless-stopped tei-reranking-service: image: ghcr.io/huggingface/tei-gaudi:1.5.0 diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_faqgen_tgi.yaml b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_faqgen_tgi.yaml index 8c2b0d1d54..01c55de853 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_faqgen_tgi.yaml +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_faqgen_tgi.yaml @@ -24,7 +24,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -61,7 +61,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} restart: unless-stopped tei-reranking-service: image: ghcr.io/huggingface/tei-gaudi:1.5.0 @@ -95,7 +95,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGING_FACE_HUB_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 HABANA_VISIBLE_DEVICES: all diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_guardrails.yaml b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_guardrails.yaml index 7f44764413..34005977be 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_guardrails.yaml +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_guardrails.yaml @@ -24,7 +24,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] interval: 10s @@ -42,7 +42,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} HABANA_VISIBLE_DEVICES: all OMPI_MCA_btl_vader_single_copy_mechanism: none GURADRAILS_MODEL_ID: ${GURADRAILS_MODEL_ID} @@ -73,7 +73,7 @@ services: https_proxy: ${https_proxy} SAFETY_GUARD_MODEL_ID: ${GURADRAILS_MODEL_ID} SAFETY_GUARD_ENDPOINT: http://vllm-guardrails-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} restart: unless-stopped tei-embedding-service: image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 @@ -104,7 +104,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped @@ -140,7 +140,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} HABANA_VISIBLE_DEVICES: all OMPI_MCA_btl_vader_single_copy_mechanism: none LLM_MODEL_ID: ${LLM_MODEL_ID} diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml index 02d99098b6..45294816dd 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml @@ -24,7 +24,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] interval: 10s @@ -60,7 +60,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped @@ -96,7 +96,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGING_FACE_HUB_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 HABANA_VISIBLE_DEVICES: all diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_without_rerank.yaml b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_without_rerank.yaml index 9704984f1a..1afc2ae7a2 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_without_rerank.yaml +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_without_rerank.yaml @@ -24,7 +24,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] interval: 10s @@ -60,7 +60,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped @@ -75,7 +75,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} HABANA_VISIBLE_DEVICES: all OMPI_MCA_btl_vader_single_copy_mechanism: none LLM_MODEL_ID: ${LLM_MODEL_ID} diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/how_to_validate_service.md b/ChatQnA/docker_compose/intel/hpu/gaudi/how_to_validate_service.md index ce515d4509..98e97fb19c 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/how_to_validate_service.md +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/how_to_validate_service.md @@ -123,7 +123,7 @@ View the docker input parameters in `./ChatQnA/docker_compose/intel/hpu/gaudi/co environment: http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGING_FACE_HUB_TOKEN: ${HF_TOKEN} HABANA_VISIBLE_DEVICES: all OMPI_MCA_btl_vader_single_copy_mechanism: none ENABLE_HPU_GRAPH: true diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/set_env.sh b/ChatQnA/docker_compose/intel/hpu/gaudi/set_env.sh index 1d0409eccd..1a3acef274 100755 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/set_env.sh +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/set_env.sh @@ -42,7 +42,7 @@ popd > /dev/null # Prompt the user for each required environment variable prompt_for_env_var "EMBEDDING_MODEL_ID" "Enter the EMBEDDING_MODEL_ID" "BAAI/bge-base-en-v1.5" false -prompt_for_env_var "HUGGINGFACEHUB_API_TOKEN" "Enter the HUGGINGFACEHUB_API_TOKEN" "${HF_TOKEN}" true +prompt_for_env_var "HF_TOKEN" "Enter the HF_TOKEN" "${HF_TOKEN}" true prompt_for_env_var "RERANK_MODEL_ID" "Enter the RERANK_MODEL_ID" "BAAI/bge-reranker-base" false prompt_for_env_var "LLM_MODEL_ID" "Enter the LLM_MODEL_ID" "meta-llama/Meta-Llama-3-8B-Instruct" false prompt_for_env_var "INDEX_NAME" "Enter the INDEX_NAME" "rag-redis" false @@ -92,7 +92,7 @@ cat < .env # Set all required ENV values export TAG=${TAG} export EMBEDDING_MODEL_ID=${EMBEDDING_MODEL_ID} -export HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN +export HF_TOKEN=$HF_TOKEN export RERANK_MODEL_ID=${RERANK_MODEL_ID} export LLM_MODEL_ID=${LLM_MODEL_ID} export INDEX_NAME=${INDEX_NAME} diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/set_env_faqgen.sh b/ChatQnA/docker_compose/intel/hpu/gaudi/set_env_faqgen.sh index fde0b35fd0..8337b7eebc 100755 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/set_env_faqgen.sh +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/set_env_faqgen.sh @@ -7,7 +7,6 @@ pushd "../../../../../" > /dev/null source .set_env.sh popd > /dev/null -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export HF_TOKEN=${HF_TOKEN} export host_ip=${ip_address} export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" diff --git a/ChatQnA/docker_compose/nvidia/gpu/README.md b/ChatQnA/docker_compose/nvidia/gpu/README.md index b1ab3e8baf..546419d5f9 100644 --- a/ChatQnA/docker_compose/nvidia/gpu/README.md +++ b/ChatQnA/docker_compose/nvidia/gpu/README.md @@ -20,7 +20,7 @@ To set up environment variables for deploying ChatQnA services, follow these ste ```bash # Example: host_ip="192.168.1.1" export host_ip="External_Public_IP" - export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" + export HF_TOKEN="Your_Huggingface_API_Token" ``` 2. If you are in a proxy environment, also set the proxy-related environment variables: @@ -182,7 +182,7 @@ Change the `xxx_MODEL_ID` below for your needs. ```bash # Example: host_ip="192.168.1.1" export host_ip="External_Public_IP" - export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" + export HF_TOKEN="Your_Huggingface_API_Token" # Example: NGINX_PORT=80 export NGINX_PORT=${your_nginx_port} ``` diff --git a/ChatQnA/docker_compose/nvidia/gpu/compose.yaml b/ChatQnA/docker_compose/nvidia/gpu/compose.yaml index 7a30c37c6b..7b47f46db1 100644 --- a/ChatQnA/docker_compose/nvidia/gpu/compose.yaml +++ b/ChatQnA/docker_compose/nvidia/gpu/compose.yaml @@ -24,7 +24,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] interval: 10s @@ -76,7 +76,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 command: --model-id ${RERANK_MODEL_ID} --auto-truncate @@ -98,7 +98,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 ipc: host diff --git a/ChatQnA/tests/README.md b/ChatQnA/tests/README.md index c622008650..1616127839 100644 --- a/ChatQnA/tests/README.md +++ b/ChatQnA/tests/README.md @@ -3,7 +3,7 @@ ## Set the required environment variable ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` ## Run test From d37062b0f52534b671c5653d091463cec2fb1ec9 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Thu, 5 Jun 2025 15:23:03 +0800 Subject: [PATCH 089/217] update secrets token name for CodeGen and CodeTrans (#2031) Signed-off-by: ZePan110 Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- CodeGen/benchmark/accuracy/run_acc.sh | 2 +- CodeGen/docker_compose/amd/gpu/rocm/README.md | 8 ++++---- CodeGen/docker_compose/amd/gpu/rocm/set_env.sh | 2 +- CodeGen/docker_compose/amd/gpu/rocm/set_env_vllm.sh | 2 +- CodeGen/docker_compose/intel/cpu/xeon/README.md | 8 ++++---- CodeGen/docker_compose/intel/cpu/xeon/compose.yaml | 12 ++++++------ .../intel/cpu/xeon/compose_remote.yaml | 6 +++--- CodeGen/docker_compose/intel/hpu/gaudi/README.md | 8 ++++---- CodeGen/docker_compose/intel/hpu/gaudi/compose.yaml | 12 ++++++------ CodeGen/docker_compose/intel/set_env.sh | 6 +++--- CodeGen/tests/README.md | 2 +- CodeGen/tests/test_compose_on_gaudi.sh | 2 +- CodeTrans/docker_compose/amd/gpu/rocm/README.md | 2 +- CodeTrans/docker_compose/amd/gpu/rocm/set_env.sh | 2 +- .../docker_compose/amd/gpu/rocm/set_env_vllm.sh | 2 +- CodeTrans/docker_compose/intel/cpu/xeon/README.md | 4 ++-- CodeTrans/docker_compose/intel/cpu/xeon/compose.yaml | 4 ++-- .../docker_compose/intel/cpu/xeon/compose_tgi.yaml | 4 ++-- CodeTrans/docker_compose/intel/hpu/gaudi/README.md | 4 ++-- .../docker_compose/intel/hpu/gaudi/compose.yaml | 4 ++-- .../docker_compose/intel/hpu/gaudi/compose_tgi.yaml | 4 ++-- CodeTrans/tests/README.md | 2 +- CodeTrans/tests/test_compose_on_gaudi.sh | 2 +- CodeTrans/tests/test_compose_on_xeon.sh | 2 +- CodeTrans/tests/test_compose_tgi_on_gaudi.sh | 2 +- CodeTrans/tests/test_compose_tgi_on_xeon.sh | 2 +- 26 files changed, 55 insertions(+), 55 deletions(-) diff --git a/CodeGen/benchmark/accuracy/run_acc.sh b/CodeGen/benchmark/accuracy/run_acc.sh index a5c451965c..7fb894451c 100644 --- a/CodeGen/benchmark/accuracy/run_acc.sh +++ b/CodeGen/benchmark/accuracy/run_acc.sh @@ -1,4 +1,4 @@ - +#!/bin/bash # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 diff --git a/CodeGen/docker_compose/amd/gpu/rocm/README.md b/CodeGen/docker_compose/amd/gpu/rocm/README.md index a3718f7ad0..90e3d2564c 100644 --- a/CodeGen/docker_compose/amd/gpu/rocm/README.md +++ b/CodeGen/docker_compose/amd/gpu/rocm/README.md @@ -109,7 +109,7 @@ Key parameters are configured via environment variables set before running `dock | Environment Variable | Description | Default (Set Externally) | | :-------------------------------------- | :------------------------------------------------------------------------------------------------------------------ | :----------------------------------------------------------------------------------------------- | | `HOST_IP` | External IP address of the host machine. **Required.** | `your_external_ip_address` | -| `HUGGINGFACEHUB_API_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingface_token` | +| `HF_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingface_token` | | `LLM_MODEL_ID` | Hugging Face model ID for the CodeGen LLM (used by TGI/vLLM service). Configured within `compose.yaml` environment. | `Qwen/Qwen2.5-Coder-7B-Instruct` | | `EMBEDDING_MODEL_ID` | Hugging Face model ID for the embedding model (used by TEI service). Configured within `compose.yaml` environment. | `BAAI/bge-base-en-v1.5` | | `LLM_ENDPOINT` | Internal URL for the LLM serving endpoint (used by `codegen-llm-server`). Configured in `compose.yaml`. | `http://codegen-tgi-server:80/generate` or `http://codegen-vllm-server:8000/v1/chat/completions` | @@ -125,7 +125,7 @@ For TGI ```bash export host_ip="External_Public_IP" #ip address of the node -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" export http_proxy="Your_HTTP_Proxy" #http proxy if any export https_proxy="Your_HTTPs_Proxy" #https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip #additional no proxies if needed @@ -137,7 +137,7 @@ For vLLM ```bash export host_ip="External_Public_IP" #ip address of the node -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" export http_proxy="Your_HTTP_Proxy" #http proxy if any export https_proxy="Your_HTTPs_Proxy" #https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip #additional no proxies if needed @@ -422,7 +422,7 @@ Users can interact with the backend service using the `Neural Copilot` VS Code e ## Troubleshooting -- **Model Download Issues:** Check `HUGGINGFACEHUB_API_TOKEN`. Ensure internet connectivity or correct proxy settings. Check logs of `tgi-service`/`vllm-service` and `tei-embedding-server`. Gated models need prior Hugging Face access. +- **Model Download Issues:** Check `HF_TOKEN`. Ensure internet connectivity or correct proxy settings. Check logs of `tgi-service`/`vllm-service` and `tei-embedding-server`. Gated models need prior Hugging Face access. - **Connection Errors:** Verify `HOST_IP` is correct and accessible. Check `docker ps` for port mappings. Ensure `no_proxy` includes `HOST_IP` if using a proxy. Check logs of the service failing to connect (e.g., `codegen-backend-server` logs if it can't reach `codegen-llm-server`). - **"Container name is in use"**: Stop existing containers (`docker compose down`) or change `container_name` in `compose.yaml`. - **Resource Issues:** CodeGen models can be memory-intensive. Monitor host RAM usage. Increase Docker resources if needed. diff --git a/CodeGen/docker_compose/amd/gpu/rocm/set_env.sh b/CodeGen/docker_compose/amd/gpu/rocm/set_env.sh index afaa29b341..bef5865267 100644 --- a/CodeGen/docker_compose/amd/gpu/rocm/set_env.sh +++ b/CodeGen/docker_compose/amd/gpu/rocm/set_env.sh @@ -12,7 +12,7 @@ export EXTERNAL_HOST_IP=${ip_address} export CODEGEN_TGI_SERVICE_PORT=8028 ### A token for accessing repositories with models -export CODEGEN_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export CODEGEN_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} ### Model ID export CODEGEN_LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" diff --git a/CodeGen/docker_compose/amd/gpu/rocm/set_env_vllm.sh b/CodeGen/docker_compose/amd/gpu/rocm/set_env_vllm.sh index 475191539a..5f8e02796f 100644 --- a/CodeGen/docker_compose/amd/gpu/rocm/set_env_vllm.sh +++ b/CodeGen/docker_compose/amd/gpu/rocm/set_env_vllm.sh @@ -13,7 +13,7 @@ export CODEGEN_VLLM_SERVICE_PORT=8028 export CODEGEN_VLLM_ENDPOINT="http://${HOST_IP}:${CODEGEN_VLLM_SERVICE_PORT}" ### A token for accessing repositories with models -export CODEGEN_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export CODEGEN_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} ### Model ID export CODEGEN_LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct" diff --git a/CodeGen/docker_compose/intel/cpu/xeon/README.md b/CodeGen/docker_compose/intel/cpu/xeon/README.md index 88f0a51c6c..eff643ddef 100644 --- a/CodeGen/docker_compose/intel/cpu/xeon/README.md +++ b/CodeGen/docker_compose/intel/cpu/xeon/README.md @@ -42,7 +42,7 @@ This uses the default vLLM-based deployment profile (`codegen-xeon-vllm`). # Replace with your host's external IP address (do not use localhost or 127.0.0.1) export HOST_IP="your_external_ip_address" # Replace with your Hugging Face Hub API token - export HUGGINGFACEHUB_API_TOKEN="your_huggingface_token" + export HF_TOKEN="your_huggingface_token" # Optional: Configure proxy if needed # export http_proxy="your_http_proxy" @@ -90,7 +90,7 @@ The `compose.yaml` file uses Docker Compose profiles to select the LLM serving b - **Services Deployed:** `codegen-tgi-server`, `codegen-llm-server`, `codegen-tei-embedding-server`, `codegen-retriever-server`, `redis-vector-db`, `codegen-dataprep-server`, `codegen-backend-server`, `codegen-gradio-ui-server`. - **To Run:** ```bash - # Ensure environment variables (HOST_IP, HUGGINGFACEHUB_API_TOKEN) are set + # Ensure environment variables (HOST_IP, HF_TOKEN) are set docker compose --profile codegen-xeon-tgi up -d ``` @@ -103,7 +103,7 @@ Key parameters are configured via environment variables set before running `dock | Environment Variable | Description | Default (Set Externally) | | :-------------------------------------- | :------------------------------------------------------------------------------------------------------------------ | :--------------------------------------------- | ------------------------------------ | | `HOST_IP` | External IP address of the host machine. **Required.** | `your_external_ip_address` | -| `HUGGINGFACEHUB_API_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingface_token` | +| `HF_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingface_token` | | `LLM_MODEL_ID` | Hugging Face model ID for the CodeGen LLM (used by TGI/vLLM service). Configured within `compose.yaml` environment. | `Qwen/Qwen2.5-Coder-7B-Instruct` | | `EMBEDDING_MODEL_ID` | Hugging Face model ID for the embedding model (used by TEI service). Configured within `compose.yaml` environment. | `BAAI/bge-base-en-v1.5` | | `LLM_ENDPOINT` | Internal URL for the LLM serving endpoint (used by `codegen-llm-server`). Configured in `compose.yaml`. | `http://codegen-vllm | tgi-server:9000/v1/chat/completions` | @@ -216,7 +216,7 @@ Users can interact with the backend service using the `Neural Copilot` VS Code e ## Troubleshooting -- **Model Download Issues:** Check `HUGGINGFACEHUB_API_TOKEN`. Ensure internet connectivity or correct proxy settings. Check logs of `tgi-service`/`vllm-service` and `tei-embedding-server`. Gated models need prior Hugging Face access. +- **Model Download Issues:** Check `HF_TOKEN`. Ensure internet connectivity or correct proxy settings. Check logs of `tgi-service`/`vllm-service` and `tei-embedding-server`. Gated models need prior Hugging Face access. - **Connection Errors:** Verify `HOST_IP` is correct and accessible. Check `docker ps` for port mappings. Ensure `no_proxy` includes `HOST_IP` if using a proxy. Check logs of the service failing to connect (e.g., `codegen-backend-server` logs if it can't reach `codegen-llm-server`). - **"Container name is in use"**: Stop existing containers (`docker compose down`) or change `container_name` in `compose.yaml`. - **Resource Issues:** CodeGen models can be memory-intensive. Monitor host RAM usage. Increase Docker resources if needed. diff --git a/CodeGen/docker_compose/intel/cpu/xeon/compose.yaml b/CodeGen/docker_compose/intel/cpu/xeon/compose.yaml index eec356dd8c..fd891c93ce 100644 --- a/CodeGen/docker_compose/intel/cpu/xeon/compose.yaml +++ b/CodeGen/docker_compose/intel/cpu/xeon/compose.yaml @@ -17,7 +17,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} host_ip: ${host_ip} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:80/health || exit 1"] @@ -39,7 +39,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} host_ip: ${host_ip} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:80/health || exit 1"] @@ -56,7 +56,7 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} restart: unless-stopped llm-tgi-service: extends: llm-base @@ -140,7 +140,7 @@ services: REDIS_URL: ${REDIS_URL} REDIS_HOST: ${host_ip} INDEX_NAME: ${INDEX_NAME} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: true healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -162,7 +162,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} host_ip: ${host_ip} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD", "curl", "-f", "http://localhost:80/health"] interval: 10s @@ -202,7 +202,7 @@ services: REDIS_RETRIEVER_PORT: ${REDIS_RETRIEVER_PORT} INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: ${RETRIEVER_COMPONENT_NAME:-OPEA_RETRIEVER_REDIS} restart: unless-stopped diff --git a/CodeGen/docker_compose/intel/cpu/xeon/compose_remote.yaml b/CodeGen/docker_compose/intel/cpu/xeon/compose_remote.yaml index 637c1f4b49..23b8af1959 100644 --- a/CodeGen/docker_compose/intel/cpu/xeon/compose_remote.yaml +++ b/CodeGen/docker_compose/intel/cpu/xeon/compose_remote.yaml @@ -59,7 +59,7 @@ services: REDIS_URL: ${REDIS_URL} REDIS_HOST: ${host_ip} INDEX_NAME: ${INDEX_NAME} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: true restart: unless-stopped tei-embedding-serving: @@ -76,7 +76,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} host_ip: ${host_ip} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD", "curl", "-f", "http://${host_ip}:${TEI_EMBEDDER_PORT}/health"] interval: 10s @@ -116,7 +116,7 @@ services: REDIS_RETRIEVER_PORT: ${REDIS_RETRIEVER_PORT} INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: ${RETRIEVER_COMPONENT_NAME:-OPEA_RETRIEVER_REDIS} restart: unless-stopped diff --git a/CodeGen/docker_compose/intel/hpu/gaudi/README.md b/CodeGen/docker_compose/intel/hpu/gaudi/README.md index 4af050f051..04f20874fd 100644 --- a/CodeGen/docker_compose/intel/hpu/gaudi/README.md +++ b/CodeGen/docker_compose/intel/hpu/gaudi/README.md @@ -42,7 +42,7 @@ This uses the default vLLM-based deployment profile (`codegen-gaudi-vllm`). # Replace with your host's external IP address (do not use localhost or 127.0.0.1) export HOST_IP="your_external_ip_address" # Replace with your Hugging Face Hub API token - export HUGGINGFACEHUB_API_TOKEN="your_huggingface_token" + export HF_TOKEN="your_huggingface_token" # Optional: Configure proxy if needed # export http_proxy="your_http_proxy" @@ -93,7 +93,7 @@ The `compose.yaml` file uses Docker Compose profiles to select the LLM serving b - **Other Services:** Same CPU-based services as the vLLM profile. - **To Run:** ```bash - # Ensure environment variables (HOST_IP, HUGGINGFACEHUB_API_TOKEN) are set + # Ensure environment variables (HOST_IP, HF_TOKEN) are set docker compose --profile codegen-gaudi-tgi up -d ``` @@ -106,7 +106,7 @@ Key parameters are configured via environment variables set before running `dock | Environment Variable | Description | Default (Set Externally) | | :-------------------------------------- | :------------------------------------------------------------------------------------------------------------------ | :--------------------------------------------- | ------------------------------------ | | `HOST_IP` | External IP address of the host machine. **Required.** | `your_external_ip_address` | -| `HUGGINGFACEHUB_API_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingface_token` | +| `HF_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingface_token` | | `LLM_MODEL_ID` | Hugging Face model ID for the CodeGen LLM (used by TGI/vLLM service). Configured within `compose.yaml` environment. | `Qwen/Qwen2.5-Coder-7B-Instruct` | | `EMBEDDING_MODEL_ID` | Hugging Face model ID for the embedding model (used by TEI service). Configured within `compose.yaml` environment. | `BAAI/bge-base-en-v1.5` | | `LLM_ENDPOINT` | Internal URL for the LLM serving endpoint (used by `llm-codegen-vllm-server`). Configured in `compose.yaml`. | http://codegen-vllm | tgi-server:9000/v1/chat/completions` | @@ -224,7 +224,7 @@ Use the `Neural Copilot` extension configured with the CodeGen backend URL: `htt - Ensure host drivers and Habana Docker runtime are installed and working (`habana-container-runtime`). - Verify `runtime: habana` and volume mounts in `compose.yaml`. - Gaudi initialization can take significant time and memory. Monitor resource usage. -- **Model Download Issues:** Check `HUGGINGFACEHUB_API_TOKEN`, internet access, proxy settings. Check LLM service logs. +- **Model Download Issues:** Check `HF_TOKEN`, internet access, proxy settings. Check LLM service logs. - **Connection Errors:** Verify `HOST_IP`, ports, and proxy settings. Use `docker ps` and check service logs. ## Stopping the Application diff --git a/CodeGen/docker_compose/intel/hpu/gaudi/compose.yaml b/CodeGen/docker_compose/intel/hpu/gaudi/compose.yaml index fb9a78d252..3b9b5a00e4 100644 --- a/CodeGen/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/CodeGen/docker_compose/intel/hpu/gaudi/compose.yaml @@ -17,7 +17,7 @@ services: https_proxy: ${https_proxy} HABANA_VISIBLE_DEVICES: all OMPI_MCA_btl_vader_single_copy_mechanism: none - HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGING_FACE_HUB_TOKEN: ${HF_TOKEN} ENABLE_HPU_GRAPH: true LIMIT_HPU_GRAPH: true USE_FLASH_ATTENTION: true @@ -46,7 +46,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} HABANA_VISIBLE_DEVICES: all OMPI_MCA_btl_vader_single_copy_mechanism: none VLLM_SKIP_WARMUP: ${VLLM_SKIP_WARMUP:-false} @@ -71,7 +71,7 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} restart: unless-stopped llm-tgi-service: extends: llm-base @@ -156,7 +156,7 @@ services: REDIS_URL: ${REDIS_URL} REDIS_HOST: ${host_ip} INDEX_NAME: ${INDEX_NAME} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: true healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -178,7 +178,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} host_ip: ${host_ip} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD", "curl", "-f", "http://localhost:80/health"] interval: 10s @@ -218,7 +218,7 @@ services: REDIS_RETRIEVER_PORT: ${REDIS_RETRIEVER_PORT} INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: ${RETRIEVER_COMPONENT_NAME:-OPEA_RETRIEVER_REDIS} restart: unless-stopped diff --git a/CodeGen/docker_compose/intel/set_env.sh b/CodeGen/docker_compose/intel/set_env.sh index ea48c198bb..28ed3aff46 100644 --- a/CodeGen/docker_compose/intel/set_env.sh +++ b/CodeGen/docker_compose/intel/set_env.sh @@ -7,9 +7,9 @@ source .set_env.sh popd > /dev/null export HOST_IP=$(hostname -I | awk '{print $1}') -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -if [ -z "${HUGGINGFACEHUB_API_TOKEN}" ]; then - echo "Error: HUGGINGFACEHUB_API_TOKEN is not set. Please set HUGGINGFACEHUB_API_TOKEN" +export HF_TOKEN=${HF_TOKEN} +if [ -z "${HF_TOKEN}" ]; then + echo "Error: HF_TOKEN is not set. Please set HF_TOKEN" fi if [ -z "${HOST_IP}" ]; then diff --git a/CodeGen/tests/README.md b/CodeGen/tests/README.md index 4909899be7..11efd37b0f 100644 --- a/CodeGen/tests/README.md +++ b/CodeGen/tests/README.md @@ -3,7 +3,7 @@ ## Set the required environment variable ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` ## Run test diff --git a/CodeGen/tests/test_compose_on_gaudi.sh b/CodeGen/tests/test_compose_on_gaudi.sh index 38354233d9..a86c5724a6 100644 --- a/CodeGen/tests/test_compose_on_gaudi.sh +++ b/CodeGen/tests/test_compose_on_gaudi.sh @@ -1,4 +1,4 @@ - +#!/bin/bashs # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 diff --git a/CodeTrans/docker_compose/amd/gpu/rocm/README.md b/CodeTrans/docker_compose/amd/gpu/rocm/README.md index 9fef7c8426..3c048905b1 100644 --- a/CodeTrans/docker_compose/amd/gpu/rocm/README.md +++ b/CodeTrans/docker_compose/amd/gpu/rocm/README.md @@ -62,7 +62,7 @@ Set the values of the variables: Setting variables in the operating system environment: ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +export HF_TOKEN="Your_HuggingFace_API_Token" source ./set_env_*.sh # replace the script name with the appropriate one ``` diff --git a/CodeTrans/docker_compose/amd/gpu/rocm/set_env.sh b/CodeTrans/docker_compose/amd/gpu/rocm/set_env.sh index c1acc4464d..24a3a4d11c 100644 --- a/CodeTrans/docker_compose/amd/gpu/rocm/set_env.sh +++ b/CodeTrans/docker_compose/amd/gpu/rocm/set_env.sh @@ -21,7 +21,7 @@ export CODETRANS_TGI_SERVICE_PORT=8008 export CODETRANS_TGI_LLM_ENDPOINT="http://${HOST_IP}:${CODETRANS_TGI_SERVICE_PORT}" ### A token for accessing repositories with models -export CODETRANS_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export CODETRANS_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} ### The port of the LLM service. On this port, the LLM service will accept connections export CODETRANS_LLM_SERVICE_PORT=9000 diff --git a/CodeTrans/docker_compose/amd/gpu/rocm/set_env_vllm.sh b/CodeTrans/docker_compose/amd/gpu/rocm/set_env_vllm.sh index ffcbd35df5..494df73a07 100644 --- a/CodeTrans/docker_compose/amd/gpu/rocm/set_env_vllm.sh +++ b/CodeTrans/docker_compose/amd/gpu/rocm/set_env_vllm.sh @@ -21,7 +21,7 @@ export CODETRANS_VLLM_SERVICE_PORT=8008 export CODETRANS_LLM_ENDPOINT="http://${HOST_IP}:${CODETRANS_VLLM_SERVICE_PORT}" ### A token for accessing repositories with models -export CODETRANS_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export CODETRANS_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} ### The port of the LLM service. On this port, the LLM service will accept connections export CODETRANS_LLM_SERVICE_PORT=9000 diff --git a/CodeTrans/docker_compose/intel/cpu/xeon/README.md b/CodeTrans/docker_compose/intel/cpu/xeon/README.md index b01492ff12..bc107c53c0 100755 --- a/CodeTrans/docker_compose/intel/cpu/xeon/README.md +++ b/CodeTrans/docker_compose/intel/cpu/xeon/README.md @@ -41,7 +41,7 @@ To set up environment variables for deploying CodeTrans services, set up some pa ```bash export host_ip="External_Public_IP" # ip address of the node -export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +export HF_TOKEN="Your_HuggingFace_API_Token" export http_proxy="Your_HTTP_Proxy" # http proxy if any export https_proxy="Your_HTTPs_Proxy" # https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip # additional no proxies if needed @@ -124,7 +124,7 @@ Key parameters are configured via environment variables set before running `dock | Environment Variable | Description | Default (Set Externally) | | :-------------------------------------- | :-------------------------------------------------------------------------------------------------------------------- | :------------------------------------ | | `HOST_IP` | External IP address of the host machine. **Required.** | `your_external_ip_address` | -| `HUGGINGFACEHUB_API_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingface_token` | +| `HF_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingface_token` | | `LLM_MODEL_ID` | Hugging Face model ID for the CodeTrans LLM (used by TGI/vLLM service). Configured within `compose.yaml` environment. | `mistralai/Mistral-7B-Instruct-v0.3` | | `LLM_ENDPOINT` | Internal URL for the LLM serving endpoint (used by `codetrans-xeon-llm-server`). Configured in `compose.yaml`. | `http://${HOST_IP}:8008` | | `LLM_COMPONENT_NAME` | LLM component name for the LLM Microservice. | `OpeaTextGenService` | diff --git a/CodeTrans/docker_compose/intel/cpu/xeon/compose.yaml b/CodeTrans/docker_compose/intel/cpu/xeon/compose.yaml index f4aa9f2b95..f950c770ec 100644 --- a/CodeTrans/docker_compose/intel/cpu/xeon/compose.yaml +++ b/CodeTrans/docker_compose/intel/cpu/xeon/compose.yaml @@ -14,7 +14,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} LLM_MODEL_ID: ${LLM_MODEL_ID} VLLM_TORCH_PROFILER_DIR: "/mnt" healthcheck: @@ -39,7 +39,7 @@ services: LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} LLM_COMPONENT_NAME: ${LLM_COMPONENT_NAME} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} restart: unless-stopped codetrans-xeon-backend-server: image: ${REGISTRY:-opea}/codetrans:${TAG:-latest} diff --git a/CodeTrans/docker_compose/intel/cpu/xeon/compose_tgi.yaml b/CodeTrans/docker_compose/intel/cpu/xeon/compose_tgi.yaml index 77c668241c..1eda99bccc 100644 --- a/CodeTrans/docker_compose/intel/cpu/xeon/compose_tgi.yaml +++ b/CodeTrans/docker_compose/intel/cpu/xeon/compose_tgi.yaml @@ -14,7 +14,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} host_ip: ${host_ip} healthcheck: test: ["CMD-SHELL", "curl -f http://$host_ip:8008/health || exit 1"] @@ -38,7 +38,7 @@ services: LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} LLM_COMPONENT_NAME: ${LLM_COMPONENT_NAME} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} restart: unless-stopped codetrans-xeon-backend-server: image: ${REGISTRY:-opea}/codetrans:${TAG:-latest} diff --git a/CodeTrans/docker_compose/intel/hpu/gaudi/README.md b/CodeTrans/docker_compose/intel/hpu/gaudi/README.md index 00551eb406..4f242cff5d 100755 --- a/CodeTrans/docker_compose/intel/hpu/gaudi/README.md +++ b/CodeTrans/docker_compose/intel/hpu/gaudi/README.md @@ -41,7 +41,7 @@ To set up environment variables for deploying CodeTrans services, set up some pa ```bash export host_ip="External_Public_IP" # ip address of the node -export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +export HF_TOKEN="Your_HuggingFace_API_Token" export http_proxy="Your_HTTP_Proxy" # http proxy if any export https_proxy="Your_HTTPs_Proxy" # https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip # additional no proxies if needed @@ -124,7 +124,7 @@ Key parameters are configured via environment variables set before running `dock | Environment Variable | Description | Default (Set Externally) | | :-------------------------------------- | :-------------------------------------------------------------------------------------------------------------------- | :------------------------------------ | | `HOST_IP` | External IP address of the host machine. **Required.** | `your_external_ip_address` | -| `HUGGINGFACEHUB_API_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingface_token` | +| `HF_TOKEN` | Your Hugging Face Hub token for model access. **Required.** | `your_huggingface_token` | | `LLM_MODEL_ID` | Hugging Face model ID for the CodeTrans LLM (used by TGI/vLLM service). Configured within `compose.yaml` environment. | `mistralai/Mistral-7B-Instruct-v0.3` | | `LLM_ENDPOINT` | Internal URL for the LLM serving endpoint (used by `codetrans-gaudi-llm-server`). Configured in `compose.yaml`. | `http://${HOST_IP}:8008` | | `LLM_COMPONENT_NAME` | LLM component name for the LLM Microservice. | `OpeaTextGenService` | diff --git a/CodeTrans/docker_compose/intel/hpu/gaudi/compose.yaml b/CodeTrans/docker_compose/intel/hpu/gaudi/compose.yaml index f34fe5a1e4..60728feabf 100644 --- a/CodeTrans/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/CodeTrans/docker_compose/intel/hpu/gaudi/compose.yaml @@ -13,7 +13,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} HABANA_VISIBLE_DEVICES: all OMPI_MCA_btl_vader_single_copy_mechanism: none LLM_MODEL_ID: ${LLM_MODEL_ID} @@ -45,7 +45,7 @@ services: LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} LLM_COMPONENT_NAME: ${LLM_COMPONENT_NAME} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} restart: unless-stopped codetrans-gaudi-backend-server: image: ${REGISTRY:-opea}/codetrans:${TAG:-latest} diff --git a/CodeTrans/docker_compose/intel/hpu/gaudi/compose_tgi.yaml b/CodeTrans/docker_compose/intel/hpu/gaudi/compose_tgi.yaml index 9bcc01f318..b2b4c268c8 100644 --- a/CodeTrans/docker_compose/intel/hpu/gaudi/compose_tgi.yaml +++ b/CodeTrans/docker_compose/intel/hpu/gaudi/compose_tgi.yaml @@ -13,7 +13,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGING_FACE_HUB_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 HABANA_VISIBLE_DEVICES: all @@ -42,7 +42,7 @@ services: LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} LLM_COMPONENT_NAME: ${LLM_COMPONENT_NAME} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} restart: unless-stopped codetrans-gaudi-backend-server: image: ${REGISTRY:-opea}/codetrans:${TAG:-latest} diff --git a/CodeTrans/tests/README.md b/CodeTrans/tests/README.md index 62edebc6a8..e4e9e135cc 100644 --- a/CodeTrans/tests/README.md +++ b/CodeTrans/tests/README.md @@ -3,7 +3,7 @@ ## Set the required environment variable ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` ## Run test diff --git a/CodeTrans/tests/test_compose_on_gaudi.sh b/CodeTrans/tests/test_compose_on_gaudi.sh index 600c20a0c3..298aa85900 100644 --- a/CodeTrans/tests/test_compose_on_gaudi.sh +++ b/CodeTrans/tests/test_compose_on_gaudi.sh @@ -38,7 +38,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} + export HF_TOKEN=${HF_TOKEN} export NGINX_PORT=80 export host_ip=${ip_address} source set_env.sh diff --git a/CodeTrans/tests/test_compose_on_xeon.sh b/CodeTrans/tests/test_compose_on_xeon.sh index 42f80469e0..12e96a9474 100644 --- a/CodeTrans/tests/test_compose_on_xeon.sh +++ b/CodeTrans/tests/test_compose_on_xeon.sh @@ -40,7 +40,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} + export HF_TOKEN=${HF_TOKEN} export NGINX_PORT=80 export host_ip=${ip_address} diff --git a/CodeTrans/tests/test_compose_tgi_on_gaudi.sh b/CodeTrans/tests/test_compose_tgi_on_gaudi.sh index 051afce9d4..a1c978b4b1 100644 --- a/CodeTrans/tests/test_compose_tgi_on_gaudi.sh +++ b/CodeTrans/tests/test_compose_tgi_on_gaudi.sh @@ -35,7 +35,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} + export HF_TOKEN=${HF_TOKEN} export NGINX_PORT=80 export host_ip=${ip_address} diff --git a/CodeTrans/tests/test_compose_tgi_on_xeon.sh b/CodeTrans/tests/test_compose_tgi_on_xeon.sh index 00da9bde73..e5393453b2 100644 --- a/CodeTrans/tests/test_compose_tgi_on_xeon.sh +++ b/CodeTrans/tests/test_compose_tgi_on_xeon.sh @@ -35,7 +35,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} + export HF_TOKEN=${HF_TOKEN} export NGINX_PORT=80 export host_ip=${ip_address} From ef51512ddab9f36ab87e3fefda642ece48f28216 Mon Sep 17 00:00:00 2001 From: Zhenzhong Xu Date: Fri, 6 Jun 2025 13:58:28 +0800 Subject: [PATCH 090/217] [DocSum] Aligned the output format (#1948) --- .../intel/cpu/xeon/compose.yaml | 1 + DocSum/docker_compose/intel/set_env.sh | 5 +++ DocSum/docsum.py | 39 +++++++++++++++++++ 3 files changed, 45 insertions(+) diff --git a/DocSum/docker_compose/intel/cpu/xeon/compose.yaml b/DocSum/docker_compose/intel/cpu/xeon/compose.yaml index 9f05963e7a..d6aa67ced3 100644 --- a/DocSum/docker_compose/intel/cpu/xeon/compose.yaml +++ b/DocSum/docker_compose/intel/cpu/xeon/compose.yaml @@ -17,6 +17,7 @@ services: HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} LLM_MODEL_ID: ${LLM_MODEL_ID} VLLM_TORCH_PROFILER_DIR: "/mnt" + VLLM_CPU_KVCACHE_SPACE: 40 healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:80/health || exit 1"] interval: 10s diff --git a/DocSum/docker_compose/intel/set_env.sh b/DocSum/docker_compose/intel/set_env.sh index d2c061177d..1d9a013375 100644 --- a/DocSum/docker_compose/intel/set_env.sh +++ b/DocSum/docker_compose/intel/set_env.sh @@ -14,6 +14,11 @@ export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export LLM_ENDPOINT_PORT=8008 export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" + +export BLOCK_SIZE=128 +export MAX_NUM_SEQS=256 +export MAX_SEQ_LEN_TO_CAPTURE=2048 +export NUM_CARDS=1 export MAX_INPUT_TOKENS=1024 export MAX_TOTAL_TOKENS=2048 diff --git a/DocSum/docsum.py b/DocSum/docsum.py index 786e48a264..32766925e0 100644 --- a/DocSum/docsum.py +++ b/DocSum/docsum.py @@ -3,6 +3,7 @@ import asyncio import base64 +import json import os import subprocess import uuid @@ -142,11 +143,49 @@ def read_text_from_file(file, save_file_name): return file_content +def align_generator(self, gen, **kwargs): + # OpenAI response format + # b'data:{"id":"","object":"text_completion","created":1725530204,"model":"meta-llama/Meta-Llama-3-8B-Instruct","system_fingerprint":"2.0.1-native","choices":[{"index":0,"delta":{"role":"assistant","content":"?"},"logprobs":null,"finish_reason":null}]}\n\n' + for line in gen: + line = line.decode("utf-8") + start = -1 + end = -1 + try: + start = line.find("{") + end = line.rfind("}") + 1 + if start == -1 or end <= start: + # Handle cases where '{' or '}' are not found or are in the wrong order + json_str = "" + else: + json_str = line[start:end] + except Exception as e: + print(f"Error finding JSON boundaries: {e}") + json_str = "" + + try: + # sometimes yield empty chunk, do a fallback here + json_data = json.loads(json_str) + if "ops" in json_data and "op" in json_data["ops"][0]: + if "value" in json_data["ops"][0] and isinstance(json_data["ops"][0]["value"], str): + yield f"data: {repr(json_data['ops'][0]['value'].encode('utf-8'))}\n\n" + else: + pass + elif ( + json_data["choices"][0]["finish_reason"] != "eos_token" + and "content" in json_data["choices"][0]["delta"] + ): + yield f"data: {repr(json_data['choices'][0]['delta']['content'].encode('utf-8'))}\n\n" + except Exception as e: + yield f"data: {repr(json_str.encode('utf-8'))}\n\n" + yield "data: [DONE]\n\n" + + class DocSumService: def __init__(self, host="0.0.0.0", port=8000): self.host = host self.port = port ServiceOrchestrator.align_inputs = align_inputs + ServiceOrchestrator.align_generator = align_generator self.megaservice = ServiceOrchestrator() self.megaservice_text_only = ServiceOrchestrator() self.endpoint = str(MegaServiceEndpoint.DOC_SUMMARY) From 771ce18e1b68c4bd6ad62138d80938ea74e7a4bf Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Fri, 6 Jun 2025 16:35:32 +0800 Subject: [PATCH 091/217] update secrets token name for DocIndexRetriever. (#2035) Signed-off-by: ZePan110 --- .../docker_compose/intel/cpu/xeon/README.md | 4 ++-- .../docker_compose/intel/cpu/xeon/compose.yaml | 12 ++++++------ .../intel/cpu/xeon/compose_milvus.yaml | 12 ++++++------ .../intel/cpu/xeon/compose_without_rerank.yaml | 8 ++++---- .../docker_compose/intel/cpu/xeon/set_env.sh | 2 +- .../docker_compose/intel/hpu/gaudi/README.md | 2 +- .../docker_compose/intel/hpu/gaudi/compose.yaml | 10 +++++----- .../intel/hpu/gaudi/compose_milvus.yaml | 10 +++++----- .../docker_compose/intel/hpu/gaudi/set_env.sh | 2 +- DocIndexRetriever/tests/README.md | 2 +- .../tests/test_compose_without_rerank_on_xeon.sh | 11 +---------- 11 files changed, 33 insertions(+), 42 deletions(-) diff --git a/DocIndexRetriever/docker_compose/intel/cpu/xeon/README.md b/DocIndexRetriever/docker_compose/intel/cpu/xeon/README.md index 5d0ff79475..fdd8effc33 100644 --- a/DocIndexRetriever/docker_compose/intel/cpu/xeon/README.md +++ b/DocIndexRetriever/docker_compose/intel/cpu/xeon/README.md @@ -47,7 +47,7 @@ docker build --no-cache -t opea/doc-index-retriever:latest --build-arg https_pro ```bash export host_ip="YOUR IP ADDR" -export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token} +export HF_TOKEN=${your_hf_api_token} ``` Set environment variables by @@ -81,7 +81,7 @@ In that case, start Docker Containers with compose_without_rerank.yaml ```bash export host_ip="YOUR IP ADDR" -export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token} +export HF_TOKEN=${your_hf_api_token} export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" cd GenAIExamples/DocIndexRetriever/docker_compose/intel/cpu/xeon docker compose -f compose_without_rerank.yaml up -d diff --git a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml index 252a01f2e9..457afaae46 100644 --- a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml +++ b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml @@ -28,7 +28,7 @@ services: REDIS_HOST: ${REDIS_HOST} INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -49,7 +49,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} host_ip: ${host_ip} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:80/health || exit 1"] @@ -69,7 +69,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} LOGFLAG: ${LOGFLAG} restart: unless-stopped @@ -87,7 +87,7 @@ services: https_proxy: ${https_proxy} REDIS_URL: ${REDIS_URL} INDEX_NAME: ${INDEX_NAME} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" @@ -105,7 +105,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 host_ip: ${host_ip} @@ -129,7 +129,7 @@ services: https_proxy: ${https_proxy} RERANK_TYPE: ${RERANK_TYPE} TEI_RERANKING_ENDPOINT: ${TEI_RERANKING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 LOGFLAG: ${LOGFLAG} diff --git a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_milvus.yaml b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_milvus.yaml index 59c60c5e81..eeacffa17b 100644 --- a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_milvus.yaml +++ b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_milvus.yaml @@ -76,7 +76,7 @@ services: DATAPREP_COMPONENT_NAME: "OPEA_DATAPREP_MILVUS" MILVUS_HOST: ${MILVUS_HOST} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -107,7 +107,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} host_ip: ${host_ip} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:80/health || exit 1"] @@ -130,7 +130,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} LOGFLAG: ${LOGFLAG} restart: unless-stopped @@ -148,7 +148,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} MILVUS_HOST: ${host_ip} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_MILVUS" @@ -167,7 +167,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 host_ip: ${host_ip} @@ -194,7 +194,7 @@ services: https_proxy: ${https_proxy} RERANK_TYPE: ${RERANK_TYPE} TEI_RERANKING_ENDPOINT: ${TEI_RERANKING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 LOGFLAG: ${LOGFLAG} diff --git a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml index d99d8e7b35..99af075420 100644 --- a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml +++ b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml @@ -25,7 +25,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME:-rag-redis} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -46,7 +46,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} host_ip: ${host_ip} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:80/health || exit 1"] @@ -66,7 +66,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 LOGFLAG: ${LOGFLAG} restart: unless-stopped @@ -84,7 +84,7 @@ services: https_proxy: ${https_proxy} REDIS_URL: redis://redis-vector-db:6379 INDEX_NAME: ${INDEX_NAME:-rag-redis} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" diff --git a/DocIndexRetriever/docker_compose/intel/cpu/xeon/set_env.sh b/DocIndexRetriever/docker_compose/intel/cpu/xeon/set_env.sh index ca8818e065..72cce36ebe 100644 --- a/DocIndexRetriever/docker_compose/intel/cpu/xeon/set_env.sh +++ b/DocIndexRetriever/docker_compose/intel/cpu/xeon/set_env.sh @@ -12,7 +12,7 @@ export RERANK_MODEL_ID="BAAI/bge-reranker-base" export TEI_EMBEDDING_ENDPOINT="http://${ip_address}:6006" export TEI_RERANKING_ENDPOINT="http://${ip_address}:8808" export TGI_LLM_ENDPOINT="http://${ip_address}:8008" -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export MEGA_SERVICE_HOST_IP=${ip_address} export EMBEDDING_SERVICE_HOST_IP=${ip_address} export RETRIEVER_SERVICE_HOST_IP=${ip_address} diff --git a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/README.md b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/README.md index 01a4dceb38..f8b0dac2ee 100644 --- a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/README.md +++ b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/README.md @@ -43,7 +43,7 @@ docker build --no-cache -t opea/doc-index-retriever:latest --build-arg https_pro ```bash export host_ip="YOUR IP ADDR" -export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token} +export HF_TOKEN=${your_hf_api_token} export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" export RERANK_MODEL_ID="BAAI/bge-reranker-base" export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:8090" diff --git a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose.yaml b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose.yaml index 3b17350218..ded82b6c1b 100644 --- a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose.yaml @@ -28,7 +28,7 @@ services: REDIS_URL: ${REDIS_URL} INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] interval: 10s @@ -76,7 +76,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} restart: unless-stopped retriever: @@ -96,7 +96,7 @@ services: LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} restart: unless-stopped tei-reranking-service: image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 @@ -111,7 +111,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 host_ip: ${host_ip} @@ -135,7 +135,7 @@ services: https_proxy: ${https_proxy} RERANK_TYPE: ${RERANK_TYPE} TEI_RERANKING_ENDPOINT: ${TEI_RERANKING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 LOGFLAG: ${LOGFLAG} diff --git a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose_milvus.yaml b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose_milvus.yaml index 49f8d07a05..c05ec1cce4 100644 --- a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose_milvus.yaml +++ b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose_milvus.yaml @@ -76,7 +76,7 @@ services: DATAPREP_COMPONENT_NAME: "OPEA_DATAPREP_MILVUS" MILVUS_HOST: ${MILVUS_HOST} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:5000/v1/health_check || exit 1"] @@ -136,7 +136,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} LOGFLAG: ${LOGFLAG} restart: unless-stopped @@ -154,7 +154,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} MILVUS_HOST: ${host_ip} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_MILVUS" @@ -173,7 +173,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 host_ip: ${host_ip} @@ -200,7 +200,7 @@ services: https_proxy: ${https_proxy} RERANK_TYPE: ${RERANK_TYPE} TEI_RERANKING_ENDPOINT: ${TEI_RERANKING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 LOGFLAG: ${LOGFLAG} diff --git a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/set_env.sh b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/set_env.sh index 0c2b818df4..4d91fe236b 100644 --- a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/set_env.sh +++ b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/set_env.sh @@ -11,7 +11,7 @@ export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" export RERANK_MODEL_ID="BAAI/bge-reranker-base" export TEI_EMBEDDING_ENDPOINT="http://${ip_address}:8090" export TEI_RERANKING_ENDPOINT="http://${ip_address}:8808" -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export MEGA_SERVICE_HOST_IP=${ip_address} export EMBEDDING_SERVICE_HOST_IP=${ip_address} export RETRIEVER_SERVICE_HOST_IP=${ip_address} diff --git a/DocIndexRetriever/tests/README.md b/DocIndexRetriever/tests/README.md index be057c4239..98b2476b8f 100644 --- a/DocIndexRetriever/tests/README.md +++ b/DocIndexRetriever/tests/README.md @@ -3,7 +3,7 @@ ## Set the required environment variable ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` ## Run test diff --git a/DocIndexRetriever/tests/test_compose_without_rerank_on_xeon.sh b/DocIndexRetriever/tests/test_compose_without_rerank_on_xeon.sh index dde5d84ef9..37c477b2ad 100644 --- a/DocIndexRetriever/tests/test_compose_without_rerank_on_xeon.sh +++ b/DocIndexRetriever/tests/test_compose_without_rerank_on_xeon.sh @@ -39,16 +39,7 @@ function build_docker_images() { function start_services() { echo "Starting Docker Services...." cd $WORKPATH/docker_compose/intel/cpu/xeon - export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" - export TEI_EMBEDDING_ENDPOINT="http://${ip_address}:6006" - export REDIS_URL="redis://${ip_address}:6379" - export INDEX_NAME="rag-redis" - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} - export MEGA_SERVICE_HOST_IP=${ip_address} - export EMBEDDING_SERVICE_HOST_IP=${ip_address} - export RETRIEVER_SERVICE_HOST_IP=${ip_address} - export host_ip=${ip_address} - export LOGFLAG=true + source ./set_env.sh # Start Docker Containers docker compose -f compose_without_rerank.yaml up -d From 8c0b9c4dcf9c78e3529bec9766b6b0baa5680eb6 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Fri, 6 Jun 2025 16:36:44 +0800 Subject: [PATCH 092/217] update secrets token name for EdgeCraftRag, FinanceAgent, GraphRAG and HybridRAG (#2037) Signed-off-by: ZePan110 --- EdgeCraftRAG/README.md | 2 +- EdgeCraftRAG/docker_compose/intel/gpu/arc/compose.yaml | 2 +- .../docker_compose/intel/gpu/arc/compose_gradio.yaml | 2 +- EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm.yaml | 2 +- EdgeCraftRAG/docker_compose/intel/gpu/arc/set_env.sh | 2 +- EdgeCraftRAG/tests/README.md | 2 +- FinanceAgent/docker_compose/intel/hpu/gaudi/compose.yaml | 2 +- FinanceAgent/docker_compose/intel/set_env.sh | 2 +- FinanceAgent/tests/test_compose_on_gaudi.sh | 2 +- GraphRAG/README.md | 2 +- GraphRAG/docker_compose/intel/hpu/gaudi/set_env.sh | 5 ++--- GraphRAG/tests/README.md | 2 +- 12 files changed, 13 insertions(+), 14 deletions(-) diff --git a/EdgeCraftRAG/README.md b/EdgeCraftRAG/README.md index 9a6216bdb6..93546869da 100755 --- a/EdgeCraftRAG/README.md +++ b/EdgeCraftRAG/README.md @@ -96,7 +96,7 @@ Set up Additional Environment Variables and start with compose_vllm.yaml export LLM_MODEL=#your model id export VLLM_SERVICE_PORT=8008 export vLLM_ENDPOINT="http://${HOST_IP}:${VLLM_SERVICE_PORT}" -export HUGGINGFACEHUB_API_TOKEN=#your HF token +export HF_TOKEN=#your HF token docker compose -f compose_vllm.yaml up -d ``` diff --git a/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose.yaml b/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose.yaml index 281dc16132..e4465e0e7f 100644 --- a/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose.yaml +++ b/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose.yaml @@ -71,7 +71,7 @@ services: # HTTP_PROXY: ${https_proxy} # VLLM_OPENVINO_DEVICE: GPU # HF_ENDPOINT: ${HF_ENDPOINT} - # HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + # HF_TOKEN: ${HF_TOKEN} # volumes: # - /dev/dri/by-path:/dev/dri/by-path # - $HOME/.cache/huggingface:/root/.cache/huggingface diff --git a/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_gradio.yaml b/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_gradio.yaml index 9204351fd4..f753a17460 100644 --- a/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_gradio.yaml +++ b/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_gradio.yaml @@ -71,7 +71,7 @@ services: # HTTP_PROXY: ${https_proxy} # VLLM_OPENVINO_DEVICE: GPU # HF_ENDPOINT: ${HF_ENDPOINT} - # HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + # HF_TOKEN: ${HF_TOKEN} # volumes: # - /dev/dri/by-path:/dev/dri/by-path # - $HOME/.cache/huggingface:/root/.cache/huggingface diff --git a/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm.yaml b/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm.yaml index 1204e5f0b2..d1811a4aca 100644 --- a/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm.yaml +++ b/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm.yaml @@ -72,7 +72,7 @@ services: https_proxy: ${https_proxy} VLLM_OPENVINO_DEVICE: GPU HF_ENDPOINT: ${HF_ENDPOINT} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} volumes: - ${HF_CACHE:-${HOME}/.cache}:/root/.cache devices: diff --git a/EdgeCraftRAG/docker_compose/intel/gpu/arc/set_env.sh b/EdgeCraftRAG/docker_compose/intel/gpu/arc/set_env.sh index eef0ebd201..c70928a492 100644 --- a/EdgeCraftRAG/docker_compose/intel/gpu/arc/set_env.sh +++ b/EdgeCraftRAG/docker_compose/intel/gpu/arc/set_env.sh @@ -13,5 +13,5 @@ export HOST_IP=${HOST_IP} export LLM_MODEL=${LLM_MODEL} export HF_ENDPOINT=${HF_ENDPOINT} export vLLM_ENDPOINT=${vLLM_ENDPOINT} -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export no_proxy="localhost, 127.0.0.1, 192.168.1.1" diff --git a/EdgeCraftRAG/tests/README.md b/EdgeCraftRAG/tests/README.md index 3b2f72e0c1..ec08f640b1 100644 --- a/EdgeCraftRAG/tests/README.md +++ b/EdgeCraftRAG/tests/README.md @@ -3,7 +3,7 @@ ## Set the required environment variable ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` ## Run test diff --git a/FinanceAgent/docker_compose/intel/hpu/gaudi/compose.yaml b/FinanceAgent/docker_compose/intel/hpu/gaudi/compose.yaml index e788c5899a..1edc6f0796 100644 --- a/FinanceAgent/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/FinanceAgent/docker_compose/intel/hpu/gaudi/compose.yaml @@ -11,7 +11,7 @@ x-common-environment: x-common-agent-environment: &common-agent-env <<: *common-env - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} llm_endpoint_url: ${LLM_ENDPOINT} model: ${LLM_MODEL_ID} REDIS_URL_VECTOR: ${REDIS_URL_VECTOR} diff --git a/FinanceAgent/docker_compose/intel/set_env.sh b/FinanceAgent/docker_compose/intel/set_env.sh index 16893f3ab5..c8a36fabb0 100644 --- a/FinanceAgent/docker_compose/intel/set_env.sh +++ b/FinanceAgent/docker_compose/intel/set_env.sh @@ -42,7 +42,7 @@ export EMBEDDING_MODEL_ID="${EMBEDDING_MODEL_ID:-BAAI/bge-base-en-v1.5}" export TEI_EMBEDDING_ENDPOINT="http://${HOST_IP}:${TEI_EMBEDDER_PORT}" # Hugging Face API token -export HUGGINGFACEHUB_API_TOKEN="${HF_TOKEN}" +export HF_TOKEN="${HF_TOKEN}" # Recursion limits export RECURSION_LIMIT_WORKER="${RECURSION_LIMIT_WORKER:-12}" diff --git a/FinanceAgent/tests/test_compose_on_gaudi.sh b/FinanceAgent/tests/test_compose_on_gaudi.sh index d534ffa122..cb0f594422 100644 --- a/FinanceAgent/tests/test_compose_on_gaudi.sh +++ b/FinanceAgent/tests/test_compose_on_gaudi.sh @@ -38,7 +38,7 @@ export RECURSION_LIMIT_WORKER="${RECURSION_LIMIT_WORKER:-12}" export RECURSION_LIMIT_SUPERVISOR="${RECURSION_LIMIT_SUPERVISOR:-10}" # Hugging Face API token -export HUGGINGFACEHUB_API_TOKEN="${HF_TOKEN}" +export HF_TOKEN="${HF_TOKEN}" # LLM configuration export TEMPERATURE="${TEMPERATURE:-0.5}" diff --git a/GraphRAG/README.md b/GraphRAG/README.md index 0cdc3b5905..0870b3d829 100644 --- a/GraphRAG/README.md +++ b/GraphRAG/README.md @@ -33,7 +33,7 @@ To set up environment variables for deploying GraphRAG services, follow these st export NEO4J_PASSWORD=${your_neo4j_password} export PYTHONPATH=${path_to_comps} export OPENAI_KEY=${your_openai_api_key} #optional, when not provided will use smaller models TGI/TEI - export HUGGINGFACEHUB_API_TOKEN=${your_hf_token} #needed for TGI/TEI models + export HF_TOKEN=${your_hf_token} #needed for TGI/TEI models ``` 2. If you are in a proxy environment, also set the proxy-related environment variables: diff --git a/GraphRAG/docker_compose/intel/hpu/gaudi/set_env.sh b/GraphRAG/docker_compose/intel/hpu/gaudi/set_env.sh index 441ea183be..d5b7e64b5b 100644 --- a/GraphRAG/docker_compose/intel/hpu/gaudi/set_env.sh +++ b/GraphRAG/docker_compose/intel/hpu/gaudi/set_env.sh @@ -5,14 +5,13 @@ # Remember to set your private variables mentioned in README -# host_ip, OPENAI_API_KEY, HUGGINGFACEHUB_API_TOKEN, proxies... +# host_ip, OPENAI_API_KEY, HF_TOKEN, proxies... pushd "../../../../../" > /dev/null source .set_env.sh popd > /dev/null host_ip=$(hostname -I | awk '{print $1}') -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -export HF_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export TEI_EMBEDDER_PORT=11633 export LLM_ENDPOINT_PORT=11634 export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" diff --git a/GraphRAG/tests/README.md b/GraphRAG/tests/README.md index daf4788df2..3f41f1851c 100644 --- a/GraphRAG/tests/README.md +++ b/GraphRAG/tests/README.md @@ -3,7 +3,7 @@ ## Set the required environment variable ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` ## Run test From 80502a1589a5fb21aa8e3653feb83b9de409f261 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Fri, 6 Jun 2025 16:37:11 +0800 Subject: [PATCH 093/217] update secrets token name for ProductivitySuite, RerankFinetuning, SearchQnA and Translation (#2038) update secrets token name for ProductivitySuite, RerankFinetuning, SearchQnA and Translation Fix shellcheck issue Signed-off-by: ZePan110 --- .../docker_compose/intel/cpu/xeon/README.md | 6 +++--- .../docker_compose/intel/cpu/xeon/compose.yaml | 14 +++++++------- .../docker_compose/intel/cpu/xeon/set_env.sh | 3 ++- ProductivitySuite/tests/README.md | 2 +- RerankFinetuning/tests/test_compose_on_gaudi.sh | 1 + RerankFinetuning/tests/test_compose_on_xeon.sh | 1 + SearchQnA/docker_compose/amd/gpu/rocm/README.md | 4 ++-- SearchQnA/docker_compose/amd/gpu/rocm/set_env.sh | 2 +- .../docker_compose/amd/gpu/rocm/set_env_vllm.sh | 2 +- SearchQnA/docker_compose/intel/cpu/xeon/README.md | 2 +- .../docker_compose/intel/cpu/xeon/compose.yaml | 8 ++++---- SearchQnA/docker_compose/intel/hpu/gaudi/README.md | 2 +- .../docker_compose/intel/hpu/gaudi/compose.yaml | 8 ++++---- SearchQnA/docker_compose/intel/set_env.sh | 2 +- SearchQnA/tests/README.md | 2 +- Translation/docker_compose/amd/gpu/rocm/set_env.sh | 2 +- .../docker_compose/amd/gpu/rocm/set_env_vllm.sh | 2 +- .../docker_compose/intel/cpu/xeon/compose.yaml | 4 ++-- .../docker_compose/intel/hpu/gaudi/compose.yaml | 4 ++-- Translation/docker_compose/intel/set_env.sh | 2 +- Translation/tests/README.md | 2 +- 21 files changed, 39 insertions(+), 36 deletions(-) diff --git a/ProductivitySuite/docker_compose/intel/cpu/xeon/README.md b/ProductivitySuite/docker_compose/intel/cpu/xeon/README.md index 91921c8c23..55af17f1e8 100644 --- a/ProductivitySuite/docker_compose/intel/cpu/xeon/README.md +++ b/ProductivitySuite/docker_compose/intel/cpu/xeon/README.md @@ -108,12 +108,12 @@ Since the `compose.yaml` will consume some environment variables, you need to se export host_ip="External_Public_IP" ``` -**Export the value of your Huggingface API token to the `HUGGINGFACEHUB_API_TOKEN` environment variable** +**Export the value of your Huggingface API token to the `HF_TOKEN` environment variable** > Change the Your_Huggingface_API_Token below with tyour actual Huggingface API Token value ``` -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` **Append the value of the public IP address to the no_proxy list** @@ -129,7 +129,7 @@ export RERANK_MODEL_ID="BAAI/bge-reranker-base" export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" export LLM_MODEL_ID_CODEGEN="meta-llama/CodeLlama-7b-hf" export INDEX_NAME="rag-redis" -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export BACKEND_SERVICE_ENDPOINT_CHATQNA="http://${host_ip}:8888/v1/chatqna" export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/delete" export BACKEND_SERVICE_ENDPOINT_CODEGEN="http://${host_ip}:7778/v1/codegen" diff --git a/ProductivitySuite/docker_compose/intel/cpu/xeon/compose.yaml b/ProductivitySuite/docker_compose/intel/cpu/xeon/compose.yaml index 00a16c1670..ddc8790951 100644 --- a/ProductivitySuite/docker_compose/intel/cpu/xeon/compose.yaml +++ b/ProductivitySuite/docker_compose/intel/cpu/xeon/compose.yaml @@ -29,7 +29,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} DATAPREP_TYPE: ${DATAPREP_TYPE} LOGFLAG: ${LOGFLAG} healthcheck: @@ -73,7 +73,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped @@ -90,7 +90,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 host_ip: ${host_ip} @@ -111,7 +111,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 host_ip: ${host_ip} @@ -167,7 +167,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} host_ip: ${host_ip} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:80/health || exit 1"] @@ -190,7 +190,7 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: http://tgi_service_codegen:80 LLM_MODEL_ID: ${LLM_MODEL_ID_CODEGEN} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} restart: unless-stopped codegen-xeon-backend-server: @@ -303,7 +303,7 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: http://tgi-service:80 LLM_MODEL_ID: ${LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} MAX_INPUT_TOKENS: ${MAX_INPUT_TOKENS:-1024} MAX_TOTAL_TOKENS: ${MAX_TOTAL_TOKENS:-2048} DocSum_COMPONENT_NAME: ${DocSum_COMPONENT_NAME} diff --git a/ProductivitySuite/docker_compose/intel/cpu/xeon/set_env.sh b/ProductivitySuite/docker_compose/intel/cpu/xeon/set_env.sh index a70561f28b..f88f72e444 100755 --- a/ProductivitySuite/docker_compose/intel/cpu/xeon/set_env.sh +++ b/ProductivitySuite/docker_compose/intel/cpu/xeon/set_env.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 pushd "../../../../../" > /dev/null @@ -10,7 +11,7 @@ export RERANK_MODEL_ID="BAAI/bge-reranker-base" export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" export LLM_MODEL_ID_CODEGEN="Intel/neural-chat-7b-v3-3" export INDEX_NAME="rag-redis" -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export BACKEND_SERVICE_ENDPOINT_CHATQNA="http://${host_ip}:8888/v1/chatqna" export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/delete" export BACKEND_SERVICE_ENDPOINT_CODEGEN="http://${host_ip}:7778/v1/codegen" diff --git a/ProductivitySuite/tests/README.md b/ProductivitySuite/tests/README.md index a7bc0ab7ce..fdd2bce112 100644 --- a/ProductivitySuite/tests/README.md +++ b/ProductivitySuite/tests/README.md @@ -3,7 +3,7 @@ ## Set the required environment variable ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` ## Run test diff --git a/RerankFinetuning/tests/test_compose_on_gaudi.sh b/RerankFinetuning/tests/test_compose_on_gaudi.sh index dca0bc2512..db6c468b6f 100644 --- a/RerankFinetuning/tests/test_compose_on_gaudi.sh +++ b/RerankFinetuning/tests/test_compose_on_gaudi.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 diff --git a/RerankFinetuning/tests/test_compose_on_xeon.sh b/RerankFinetuning/tests/test_compose_on_xeon.sh index ca9faa0222..3b9cf0786f 100644 --- a/RerankFinetuning/tests/test_compose_on_xeon.sh +++ b/RerankFinetuning/tests/test_compose_on_xeon.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 diff --git a/SearchQnA/docker_compose/amd/gpu/rocm/README.md b/SearchQnA/docker_compose/amd/gpu/rocm/README.md index 40533aac9f..4146dbbe92 100644 --- a/SearchQnA/docker_compose/amd/gpu/rocm/README.md +++ b/SearchQnA/docker_compose/amd/gpu/rocm/README.md @@ -50,7 +50,7 @@ To set up environment variables for deploying SearchQnA services, set up some pa export host_ip="External_Public_IP" # ip address of the node export GOOGLE_CSE_ID="your cse id" export GOOGLE_API_KEY="your google api key" -export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +export HF_TOKEN="Your_HuggingFace_API_Token" export http_proxy="Your_HTTP_Proxy" # http proxy if any export https_proxy="Your_HTTPs_Proxy" # https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip # additional no proxies if needed @@ -64,7 +64,7 @@ source ./set_env_vllm.sh export host_ip="External_Public_IP" # ip address of the node export GOOGLE_CSE_ID="your cse id" export GOOGLE_API_KEY="your google api key" -export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +export HF_TOKEN="Your_HuggingFace_API_Token" export http_proxy="Your_HTTP_Proxy" # http proxy if any export https_proxy="Your_HTTPs_Proxy" # https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip # additional no proxies if needed diff --git a/SearchQnA/docker_compose/amd/gpu/rocm/set_env.sh b/SearchQnA/docker_compose/amd/gpu/rocm/set_env.sh index 3d84e01fcf..faeca0ae51 100644 --- a/SearchQnA/docker_compose/amd/gpu/rocm/set_env.sh +++ b/SearchQnA/docker_compose/amd/gpu/rocm/set_env.sh @@ -8,7 +8,7 @@ export EXTERNAL_HOST_IP=${ip_address} export SEARCH_EMBEDDING_MODEL_ID='BAAI/bge-base-en-v1.5' export SEARCH_GOOGLE_API_KEY=${GOOGLE_API_KEY} export SEARCH_GOOGLE_CSE_ID=${GOOGLE_CSE_ID} -export SEARCH_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export SEARCH_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} export SEARCH_LLM_MODEL_ID='Intel/neural-chat-7b-v3-3' export SEARCH_RERANK_MODEL_ID='BAAI/bge-reranker-base' diff --git a/SearchQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh b/SearchQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh index a891cce2a0..9ee0d24f79 100644 --- a/SearchQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh +++ b/SearchQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh @@ -8,7 +8,7 @@ export EXTERNAL_HOST_IP=${ip_address} export SEARCH_EMBEDDING_MODEL_ID='BAAI/bge-base-en-v1.5' export SEARCH_GOOGLE_API_KEY=${GOOGLE_API_KEY} export SEARCH_GOOGLE_CSE_ID=${GOOGLE_CSE_ID} -export SEARCH_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export SEARCH_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} export SEARCH_LLM_MODEL_ID='Intel/neural-chat-7b-v3-3' export SEARCH_RERANK_MODEL_ID='BAAI/bge-reranker-base' diff --git a/SearchQnA/docker_compose/intel/cpu/xeon/README.md b/SearchQnA/docker_compose/intel/cpu/xeon/README.md index 742a6ae1cd..c59d5fade5 100644 --- a/SearchQnA/docker_compose/intel/cpu/xeon/README.md +++ b/SearchQnA/docker_compose/intel/cpu/xeon/README.md @@ -43,7 +43,7 @@ To set up environment variables for deploying SearchQnA services, set up some pa export host_ip="External_Public_IP" # ip address of the node export GOOGLE_CSE_ID="your cse id" export GOOGLE_API_KEY="your google api key" -export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +export HF_TOKEN="Your_HuggingFace_API_Token" export http_proxy="Your_HTTP_Proxy" # http proxy if any export https_proxy="Your_HTTPs_Proxy" # https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip # additional no proxies if needed diff --git a/SearchQnA/docker_compose/intel/cpu/xeon/compose.yaml b/SearchQnA/docker_compose/intel/cpu/xeon/compose.yaml index 4503a645bb..dfc05a5b31 100644 --- a/SearchQnA/docker_compose/intel/cpu/xeon/compose.yaml +++ b/SearchQnA/docker_compose/intel/cpu/xeon/compose.yaml @@ -35,7 +35,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} restart: unless-stopped web-retriever: @@ -87,7 +87,7 @@ services: https_proxy: ${https_proxy} RERANK_TYPE: ${RERANK_TYPE} TEI_RERANKING_ENDPOINT: ${TEI_RERANKING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} restart: unless-stopped tgi-service: @@ -102,7 +102,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} host_ip: ${host_ip} healthcheck: test: ["CMD-SHELL", "curl -f http://$host_ip:3006/health || exit 1"] @@ -125,7 +125,7 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: ${TGI_LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} restart: unless-stopped searchqna-xeon-backend-server: diff --git a/SearchQnA/docker_compose/intel/hpu/gaudi/README.md b/SearchQnA/docker_compose/intel/hpu/gaudi/README.md index 611b4a3c44..4e4cedadc6 100644 --- a/SearchQnA/docker_compose/intel/hpu/gaudi/README.md +++ b/SearchQnA/docker_compose/intel/hpu/gaudi/README.md @@ -43,7 +43,7 @@ To set up environment variables for deploying SearchQnA services, set up some pa export host_ip="External_Public_IP" # ip address of the node export GOOGLE_CSE_ID="your cse id" export GOOGLE_API_KEY="your google api key" -export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +export HF_TOKEN="Your_HuggingFace_API_Token" export http_proxy="Your_HTTP_Proxy" # http proxy if any export https_proxy="Your_HTTPs_Proxy" # https proxy if any export no_proxy=localhost,127.0.0.1,$host_ip # additional no proxies if needed diff --git a/SearchQnA/docker_compose/intel/hpu/gaudi/compose.yaml b/SearchQnA/docker_compose/intel/hpu/gaudi/compose.yaml index 5ff29a5d7a..6affd1fa69 100644 --- a/SearchQnA/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/SearchQnA/docker_compose/intel/hpu/gaudi/compose.yaml @@ -43,7 +43,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} restart: unless-stopped web-retriever: @@ -94,7 +94,7 @@ services: https_proxy: ${https_proxy} RERANK_TYPE: ${RERANK_TYPE} TEI_RERANKING_ENDPOINT: ${TEI_RERANKING_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} restart: unless-stopped tgi-service: @@ -108,7 +108,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGING_FACE_HUB_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 HABANA_VISIBLE_DEVICES: all @@ -142,7 +142,7 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: ${TGI_LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 LOGFLAG: ${LOGFLAG} diff --git a/SearchQnA/docker_compose/intel/set_env.sh b/SearchQnA/docker_compose/intel/set_env.sh index 45aaa7eb48..9680c13af0 100644 --- a/SearchQnA/docker_compose/intel/set_env.sh +++ b/SearchQnA/docker_compose/intel/set_env.sh @@ -8,7 +8,7 @@ popd > /dev/null export GOOGLE_CSE_ID=$GOOGLE_CSE_ID export GOOGLE_API_KEY=$GOOGLE_API_KEY -export HUGGINGFACEHUB_API_TOKEN=$HUGGINGFACEHUB_API_TOKEN +export HF_TOKEN=$HF_TOKEN export EMBEDDING_MODEL_ID=BAAI/bge-base-en-v1.5 export TEI_EMBEDDING_ENDPOINT=http://${host_ip}:3001 export RERANK_MODEL_ID=BAAI/bge-reranker-base diff --git a/SearchQnA/tests/README.md b/SearchQnA/tests/README.md index 4dd235fbb6..652d025076 100644 --- a/SearchQnA/tests/README.md +++ b/SearchQnA/tests/README.md @@ -3,7 +3,7 @@ ## Set the required environment variable ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` ## Run test diff --git a/Translation/docker_compose/amd/gpu/rocm/set_env.sh b/Translation/docker_compose/amd/gpu/rocm/set_env.sh index dc7dbe85ac..e0457d8395 100644 --- a/Translation/docker_compose/amd/gpu/rocm/set_env.sh +++ b/Translation/docker_compose/amd/gpu/rocm/set_env.sh @@ -9,7 +9,7 @@ export TRANSLATION_HOST_IP=${host_ip} export TRANSLATION_EXTERNAL_HOST_IP=${host_ip} export TRANSLATION_LLM_MODEL_ID="haoranxu/ALMA-13B" export TRANSLATION_TGI_LLM_ENDPOINT="http://${TRANSLATION_HOST_IP}:8008" -export TRANSLATION_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export TRANSLATION_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} export TRANSLATION_MEGA_SERVICE_HOST_IP=${TRANSLATION_HOST_IP} export TRANSLATION_LLM_SERVICE_HOST_IP=${TRANSLATION_HOST_IP} export TRANSLATION_FRONTEND_SERVICE_IP=${TRANSLATION_HOST_IP} diff --git a/Translation/docker_compose/amd/gpu/rocm/set_env_vllm.sh b/Translation/docker_compose/amd/gpu/rocm/set_env_vllm.sh index 4ebd939baa..772430ac0c 100644 --- a/Translation/docker_compose/amd/gpu/rocm/set_env_vllm.sh +++ b/Translation/docker_compose/amd/gpu/rocm/set_env_vllm.sh @@ -11,7 +11,7 @@ export TRANSLATION_LLM_MODEL_ID="haoranxu/ALMA-13B" export TRANSLATION_VLLM_SERVICE_PORT=8088 export TRANSLATION_LLM_ENDPOINT="http://${HOST_IP}:${TRANSLATION_VLLM_SERVICE_PORT}" export TRANSLATION_LLM_PORT=9088 -export TRANSLATION_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export TRANSLATION_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} export TRANSLATION_MEGA_SERVICE_HOST_IP=${HOST_IP} export TRANSLATION_LLM_SERVICE_HOST_IP=${HOST_IP} export TRANSLATION_FRONTEND_SERVICE_IP=${HOST_IP} diff --git a/Translation/docker_compose/intel/cpu/xeon/compose.yaml b/Translation/docker_compose/intel/cpu/xeon/compose.yaml index 4b77d84484..1c3d115741 100644 --- a/Translation/docker_compose/intel/cpu/xeon/compose.yaml +++ b/Translation/docker_compose/intel/cpu/xeon/compose.yaml @@ -11,7 +11,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 host_ip: ${host_ip} @@ -39,7 +39,7 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: ${TGI_LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 restart: unless-stopped diff --git a/Translation/docker_compose/intel/hpu/gaudi/compose.yaml b/Translation/docker_compose/intel/hpu/gaudi/compose.yaml index 9516e60ce6..92661ab552 100644 --- a/Translation/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/Translation/docker_compose/intel/hpu/gaudi/compose.yaml @@ -11,7 +11,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGING_FACE_HUB_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 HABANA_VISIBLE_DEVICES: all @@ -47,7 +47,7 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: ${TGI_LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 restart: unless-stopped diff --git a/Translation/docker_compose/intel/set_env.sh b/Translation/docker_compose/intel/set_env.sh index 37762fbd50..931ea2716a 100644 --- a/Translation/docker_compose/intel/set_env.sh +++ b/Translation/docker_compose/intel/set_env.sh @@ -9,7 +9,7 @@ popd > /dev/null export LLM_MODEL_ID="haoranxu/ALMA-13B" export TGI_LLM_ENDPOINT="http://${host_ip}:8008" -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export MEGA_SERVICE_HOST_IP=${host_ip} export LLM_SERVICE_HOST_IP=${host_ip} export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8888/v1/translation" diff --git a/Translation/tests/README.md b/Translation/tests/README.md index ece64cf149..7a544febb2 100644 --- a/Translation/tests/README.md +++ b/Translation/tests/README.md @@ -3,7 +3,7 @@ ## Set the required environment variable ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` ## Run test From 25597860d6a20ece9389a2bd5d996e9c93900069 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Fri, 6 Jun 2025 16:37:39 +0800 Subject: [PATCH 094/217] update secrets token name for InstructionTuning, MultimodalQnA and WorkflowExecAgent (#2039) update secrets token name for InstructionTuning, MultimodalQnA and WorkflowExecAgent Fix shellcheck issue Signed-off-by: ZePan110 --- InstructionTuning/tests/README.md | 2 +- MultimodalQnA/docker_compose/amd/gpu/rocm/README.md | 4 ++-- MultimodalQnA/docker_compose/intel/cpu/xeon/compose.yaml | 2 +- .../docker_compose/intel/cpu/xeon/compose_milvus.yaml | 2 +- MultimodalQnA/docker_compose/intel/hpu/gaudi/compose.yaml | 2 +- .../docker_compose/intel/hpu/gaudi/compose_milvus.yaml | 2 +- MultimodalQnA/docker_compose/intel/set_env.sh | 2 +- MultimodalQnA/tests/README.md | 2 +- WorkflowExecAgent/README.md | 2 +- WorkflowExecAgent/tests/2_start_vllm_service.sh | 2 +- WorkflowExecAgent/tests/3_launch_and_validate_agent.sh | 2 +- WorkflowExecAgent/tests/README.md | 2 +- WorkflowExecAgent/tests/test_compose_vllm_on_xeon.sh | 1 + 13 files changed, 14 insertions(+), 13 deletions(-) diff --git a/InstructionTuning/tests/README.md b/InstructionTuning/tests/README.md index fd43a2b4a1..19d617b426 100644 --- a/InstructionTuning/tests/README.md +++ b/InstructionTuning/tests/README.md @@ -3,7 +3,7 @@ ## Set the required environment variable ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` ## Run test diff --git a/MultimodalQnA/docker_compose/amd/gpu/rocm/README.md b/MultimodalQnA/docker_compose/amd/gpu/rocm/README.md index 14e66d989a..f6bc0e8d1a 100644 --- a/MultimodalQnA/docker_compose/amd/gpu/rocm/README.md +++ b/MultimodalQnA/docker_compose/amd/gpu/rocm/README.md @@ -165,11 +165,11 @@ Use AMD GPU driver utilities to determine the correct `cardN` and `renderN` IDs #### Setting variables in the operating system environment: -##### Set variable HUGGINGFACEHUB_API_TOKEN: +##### Set variable HF_TOKEN: ```bash ### Replace the string 'your_huggingfacehub_token' with your HuggingFacehub repository access token. -export HUGGINGFACEHUB_API_TOKEN='your_huggingfacehub_token' +export HF_TOKEN='your_huggingfacehub_token' ``` #### Set variables value in set_env\*\*\*\*.sh file: diff --git a/MultimodalQnA/docker_compose/intel/cpu/xeon/compose.yaml b/MultimodalQnA/docker_compose/intel/cpu/xeon/compose.yaml index 2f2318de07..3de373be5d 100644 --- a/MultimodalQnA/docker_compose/intel/cpu/xeon/compose.yaml +++ b/MultimodalQnA/docker_compose/intel/cpu/xeon/compose.yaml @@ -49,7 +49,7 @@ services: DATAPREP_MMR_PORT: ${DATAPREP_MMR_PORT} INDEX_NAME: ${INDEX_NAME} LVM_ENDPOINT: "http://${LVM_SERVICE_HOST_IP}:${LVM_PORT}/v1/lvm" - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} MULTIMODAL_DATAPREP: true DATAPREP_COMPONENT_NAME: "OPEA_DATAPREP_MULTIMODALREDIS" healthcheck: diff --git a/MultimodalQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml b/MultimodalQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml index 250d2633a5..257f033745 100644 --- a/MultimodalQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml +++ b/MultimodalQnA/docker_compose/intel/cpu/xeon/compose_milvus.yaml @@ -91,7 +91,7 @@ services: MILVUS_HOST: ${MILVUS_HOST} COLLECTION_NAME: ${COLLECTION_NAME:-LangChainCollection} LVM_ENDPOINT: "http://${LVM_SERVICE_HOST_IP}:${LVM_PORT}/v1/lvm" - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} restart: unless-stopped embedding-multimodal-bridgetower: diff --git a/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose.yaml b/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose.yaml index c3dcc9f8cc..ae68d329a3 100644 --- a/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose.yaml @@ -51,7 +51,7 @@ services: DATAPREP_MMR_PORT: ${DATAPREP_MMR_PORT} INDEX_NAME: ${INDEX_NAME} LVM_ENDPOINT: "http://${LVM_SERVICE_HOST_IP}:${LVM_PORT}/v1/lvm" - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} MULTIMODAL_DATAPREP: true DATAPREP_COMPONENT_NAME: "OPEA_DATAPREP_MULTIMODALREDIS" healthcheck: diff --git a/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose_milvus.yaml b/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose_milvus.yaml index 165760003c..4c1019785c 100644 --- a/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose_milvus.yaml +++ b/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose_milvus.yaml @@ -89,7 +89,7 @@ services: MILVUS_HOST: ${MILVUS_HOST} COLLECTION_NAME: ${COLLECTION_NAME} LVM_ENDPOINT: "http://${LVM_SERVICE_HOST_IP}:${LVM_PORT}/v1/lvm" - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} restart: unless-stopped embedding-multimodal-bridgetower-gaudi: image: ${REGISTRY:-opea}/embedding-multimodal-bridgetower-gaudi:${TAG:-latest} diff --git a/MultimodalQnA/docker_compose/intel/set_env.sh b/MultimodalQnA/docker_compose/intel/set_env.sh index 8d31674a29..b2caa3ad0b 100755 --- a/MultimodalQnA/docker_compose/intel/set_env.sh +++ b/MultimodalQnA/docker_compose/intel/set_env.sh @@ -7,7 +7,7 @@ source .set_env.sh popd > /dev/null export host_ip=$(hostname -I | awk '{print $1}') -export HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} +export HF_TOKEN=${HF_TOKEN} export MM_EMBEDDING_SERVICE_HOST_IP=${host_ip} export MM_RETRIEVER_SERVICE_HOST_IP=${host_ip} export LVM_SERVICE_HOST_IP=${host_ip} diff --git a/MultimodalQnA/tests/README.md b/MultimodalQnA/tests/README.md index 279576500f..0aa33095cc 100644 --- a/MultimodalQnA/tests/README.md +++ b/MultimodalQnA/tests/README.md @@ -3,7 +3,7 @@ ## Set the required environment variable ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` ## Run test diff --git a/WorkflowExecAgent/README.md b/WorkflowExecAgent/README.md index 0a4b7f333e..402913775d 100644 --- a/WorkflowExecAgent/README.md +++ b/WorkflowExecAgent/README.md @@ -85,7 +85,7 @@ Configure `GenAIExamples/WorkflowExecAgent/docker_compose/.env` file with the fo ```sh export SDK_BASE_URL=${SDK_BASE_URL} export SERVING_TOKEN=${SERVING_TOKEN} -export HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} +export HF_TOKEN=${HF_TOKEN} export llm_engine=${llm_engine} export llm_endpoint_url=${llm_endpoint_url} export ip_address=$(hostname -I | awk '{print $1}') diff --git a/WorkflowExecAgent/tests/2_start_vllm_service.sh b/WorkflowExecAgent/tests/2_start_vllm_service.sh index a058864c37..73c43d4b44 100644 --- a/WorkflowExecAgent/tests/2_start_vllm_service.sh +++ b/WorkflowExecAgent/tests/2_start_vllm_service.sh @@ -10,7 +10,7 @@ vllm_port=${vllm_port} [[ -z "$vllm_port" ]] && vllm_port=8084 model=mistralai/Mistral-7B-Instruct-v0.3 export WORKDIR=$WORKPATH/../../ -export HF_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} function build_vllm_docker_image() { echo "Building the vllm docker images" diff --git a/WorkflowExecAgent/tests/3_launch_and_validate_agent.sh b/WorkflowExecAgent/tests/3_launch_and_validate_agent.sh index 5c9e6da583..3fa75920c3 100644 --- a/WorkflowExecAgent/tests/3_launch_and_validate_agent.sh +++ b/WorkflowExecAgent/tests/3_launch_and_validate_agent.sh @@ -12,7 +12,7 @@ export WORKDIR=$WORKPATH/../../ echo "WORKDIR=${WORKDIR}" export SDK_BASE_URL=${SDK_BASE_URL} export SERVING_TOKEN=${SERVING_TOKEN} -export HF_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export llm_engine=vllm export ip_address=$(hostname -I | awk '{print $1}') export llm_endpoint_url=http://${ip_address}:${vllm_port} diff --git a/WorkflowExecAgent/tests/README.md b/WorkflowExecAgent/tests/README.md index 1dbaab6e93..9f20e96b5e 100644 --- a/WorkflowExecAgent/tests/README.md +++ b/WorkflowExecAgent/tests/README.md @@ -9,7 +9,7 @@ Configure necessary variables as listed below. Replace the variables according t ```sh export SDK_BASE_URL=${SDK_BASE_URL} export SERVING_TOKEN=${SERVING_TOKEN} -export HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} +export HF_TOKEN=${HF_TOKEN} export workflow_id=${workflow_id} # workflow_id of the serving workflow export vllm_port=${vllm_port} # vllm serving port export ip_address=$(hostname -I | awk '{print $1}') diff --git a/WorkflowExecAgent/tests/test_compose_vllm_on_xeon.sh b/WorkflowExecAgent/tests/test_compose_vllm_on_xeon.sh index d1faa05a85..f9352214cc 100644 --- a/WorkflowExecAgent/tests/test_compose_vllm_on_xeon.sh +++ b/WorkflowExecAgent/tests/test_compose_vllm_on_xeon.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 From 08347dd694de7006a5a1fe59e90115d0aaf19094 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Fri, 6 Jun 2025 16:40:18 +0800 Subject: [PATCH 095/217] update secrets token name for DocSum. (#2036) Signed-off-by: ZePan110 --- DocSum/docker_compose/amd/gpu/rocm/README.md | 2 +- DocSum/docker_compose/amd/gpu/rocm/set_env.sh | 2 +- DocSum/docker_compose/amd/gpu/rocm/set_env_vllm.sh | 2 +- DocSum/docker_compose/intel/cpu/xeon/compose.yaml | 6 +++--- DocSum/docker_compose/intel/cpu/xeon/compose_tgi.yaml | 6 +++--- DocSum/docker_compose/intel/hpu/gaudi/compose.yaml | 6 +++--- DocSum/docker_compose/intel/hpu/gaudi/compose_tgi.yaml | 6 +++--- DocSum/docker_compose/intel/set_env.sh | 2 +- DocSum/tests/README.md | 2 +- 9 files changed, 17 insertions(+), 17 deletions(-) diff --git a/DocSum/docker_compose/amd/gpu/rocm/README.md b/DocSum/docker_compose/amd/gpu/rocm/README.md index da9d7d749f..1c765cbd81 100644 --- a/DocSum/docker_compose/amd/gpu/rocm/README.md +++ b/DocSum/docker_compose/amd/gpu/rocm/README.md @@ -65,7 +65,7 @@ Set the values of the variables: Setting variables in the operating system environment: ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_HuggingFace_API_Token" +export HF_TOKEN="Your_HuggingFace_API_Token" source ./set_env_*.sh # replace the script name with the appropriate one ``` diff --git a/DocSum/docker_compose/amd/gpu/rocm/set_env.sh b/DocSum/docker_compose/amd/gpu/rocm/set_env.sh index f597849987..771bce2297 100644 --- a/DocSum/docker_compose/amd/gpu/rocm/set_env.sh +++ b/DocSum/docker_compose/amd/gpu/rocm/set_env.sh @@ -9,7 +9,7 @@ export DOCSUM_MAX_TOTAL_TOKENS="4096" export DOCSUM_LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" export DOCSUM_TGI_SERVICE_PORT="8008" export DOCSUM_TGI_LLM_ENDPOINT="http://${HOST_IP}:${DOCSUM_TGI_SERVICE_PORT}" -export DOCSUM_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export DOCSUM_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} export DOCSUM_WHISPER_PORT="7066" export ASR_SERVICE_HOST_IP="${HOST_IP}" export DOCSUM_LLM_SERVER_PORT="9000" diff --git a/DocSum/docker_compose/amd/gpu/rocm/set_env_vllm.sh b/DocSum/docker_compose/amd/gpu/rocm/set_env_vllm.sh index 800e502071..1c33250fde 100644 --- a/DocSum/docker_compose/amd/gpu/rocm/set_env_vllm.sh +++ b/DocSum/docker_compose/amd/gpu/rocm/set_env_vllm.sh @@ -4,7 +4,7 @@ # SPDX-License-Identifier: Apache-2.0 export HOST_IP=${ip_address} -export DOCSUM_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export DOCSUM_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} export DOCSUM_MAX_INPUT_TOKENS=2048 export DOCSUM_MAX_TOTAL_TOKENS=4096 export DOCSUM_LLM_MODEL_ID="Intel/neural-chat-7b-v3-3" diff --git a/DocSum/docker_compose/intel/cpu/xeon/compose.yaml b/DocSum/docker_compose/intel/cpu/xeon/compose.yaml index d6aa67ced3..5489325d50 100644 --- a/DocSum/docker_compose/intel/cpu/xeon/compose.yaml +++ b/DocSum/docker_compose/intel/cpu/xeon/compose.yaml @@ -14,7 +14,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} LLM_MODEL_ID: ${LLM_MODEL_ID} VLLM_TORCH_PROFILER_DIR: "/mnt" VLLM_CPU_KVCACHE_SPACE: 40 @@ -40,8 +40,8 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} MAX_INPUT_TOKENS: ${MAX_INPUT_TOKENS} MAX_TOTAL_TOKENS: ${MAX_TOTAL_TOKENS} DocSum_COMPONENT_NAME: ${DocSum_COMPONENT_NAME} diff --git a/DocSum/docker_compose/intel/cpu/xeon/compose_tgi.yaml b/DocSum/docker_compose/intel/cpu/xeon/compose_tgi.yaml index 2343d726c7..b929900830 100644 --- a/DocSum/docker_compose/intel/cpu/xeon/compose_tgi.yaml +++ b/DocSum/docker_compose/intel/cpu/xeon/compose_tgi.yaml @@ -14,7 +14,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} host_ip: ${host_ip} healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:80/health || exit 1"] @@ -39,8 +39,8 @@ services: https_proxy: ${https_proxy} LLM_ENDPOINT: ${LLM_ENDPOINT} LLM_MODEL_ID: ${LLM_MODEL_ID} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} MAX_INPUT_TOKENS: ${MAX_INPUT_TOKENS} MAX_TOTAL_TOKENS: ${MAX_TOTAL_TOKENS} DocSum_COMPONENT_NAME: ${DocSum_COMPONENT_NAME} diff --git a/DocSum/docker_compose/intel/hpu/gaudi/compose.yaml b/DocSum/docker_compose/intel/hpu/gaudi/compose.yaml index 2efa09e890..739a41feba 100644 --- a/DocSum/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/DocSum/docker_compose/intel/hpu/gaudi/compose.yaml @@ -13,7 +13,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} HABANA_VISIBLE_DEVICES: all OMPI_MCA_btl_vader_single_copy_mechanism: none LLM_MODEL_ID: ${LLM_MODEL_ID} @@ -44,8 +44,8 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} MAX_INPUT_TOKENS: ${MAX_INPUT_TOKENS} MAX_TOTAL_TOKENS: ${MAX_TOTAL_TOKENS} LLM_ENDPOINT: ${LLM_ENDPOINT} diff --git a/DocSum/docker_compose/intel/hpu/gaudi/compose_tgi.yaml b/DocSum/docker_compose/intel/hpu/gaudi/compose_tgi.yaml index 6b922ebc68..987706b0ee 100644 --- a/DocSum/docker_compose/intel/hpu/gaudi/compose_tgi.yaml +++ b/DocSum/docker_compose/intel/hpu/gaudi/compose_tgi.yaml @@ -13,7 +13,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGING_FACE_HUB_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 HABANA_VISIBLE_DEVICES: all @@ -48,8 +48,8 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} + HF_TOKEN: ${HF_TOKEN} MAX_INPUT_TOKENS: ${MAX_INPUT_TOKENS} MAX_TOTAL_TOKENS: ${MAX_TOTAL_TOKENS} LLM_ENDPOINT: ${LLM_ENDPOINT} diff --git a/DocSum/docker_compose/intel/set_env.sh b/DocSum/docker_compose/intel/set_env.sh index 1d9a013375..a0271fb8f4 100644 --- a/DocSum/docker_compose/intel/set_env.sh +++ b/DocSum/docker_compose/intel/set_env.sh @@ -10,7 +10,7 @@ export host_ip=$(hostname -I | awk '{print $1}') # Example: host_ip="192.168.1.1 export no_proxy="${no_proxy},${host_ip}" # Example: no_proxy="localhost, 127.0.0.1, 192.168.1.1" export http_proxy=$http_proxy export https_proxy=$https_proxy -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export LLM_ENDPOINT_PORT=8008 export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct" diff --git a/DocSum/tests/README.md b/DocSum/tests/README.md index 6d5f55c7f1..2d002f3485 100644 --- a/DocSum/tests/README.md +++ b/DocSum/tests/README.md @@ -3,7 +3,7 @@ ## Set the required environment variable ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` ## Run test From a0a6aa41afabdedbe3bb050fe9f757c2e687035c Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Fri, 6 Jun 2025 16:40:37 +0800 Subject: [PATCH 096/217] update secrets token name for VideoQnA and VisualQnA (#2040) Signed-off-by: ZePan110 --- VideoQnA/docker_compose/intel/cpu/xeon/README.md | 2 +- VideoQnA/docker_compose/intel/cpu/xeon/compose.yaml | 4 ++-- VideoQnA/docker_compose/intel/cpu/xeon/set_env.sh | 2 +- VideoQnA/tests/test_compose_on_xeon.sh | 1 - VisualQnA/docker_compose/amd/gpu/rocm/README.md | 4 ++-- VisualQnA/docker_compose/amd/gpu/rocm/set_env.sh | 2 +- VisualQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh | 2 +- VisualQnA/docker_compose/intel/cpu/xeon/compose.yaml | 2 +- VisualQnA/docker_compose/intel/hpu/gaudi/compose.yaml | 2 +- VisualQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml | 2 +- VisualQnA/tests/README.md | 2 +- VisualQnA/tests/test_compose_tgi_on_gaudi.sh | 2 +- VisualQnA/tests/test_compose_tgi_on_xeon.sh | 2 +- 13 files changed, 14 insertions(+), 15 deletions(-) diff --git a/VideoQnA/docker_compose/intel/cpu/xeon/README.md b/VideoQnA/docker_compose/intel/cpu/xeon/README.md index 96b1d97ec0..6f06577af4 100644 --- a/VideoQnA/docker_compose/intel/cpu/xeon/README.md +++ b/VideoQnA/docker_compose/intel/cpu/xeon/README.md @@ -151,7 +151,7 @@ export http_proxy=${your_http_proxy} export https_proxy=${your_http_proxy} export HF_TOKEN=${HF_TOKEN} -export HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} +export HF_TOKEN=${HF_TOKEN} export INDEX_NAME="mega-videoqna" export LLM_DOWNLOAD="True" # Set to "False" before redeploy LVM server to avoid model download diff --git a/VideoQnA/docker_compose/intel/cpu/xeon/compose.yaml b/VideoQnA/docker_compose/intel/cpu/xeon/compose.yaml index fd67f82eeb..4d9f7ffec4 100644 --- a/VideoQnA/docker_compose/intel/cpu/xeon/compose.yaml +++ b/VideoQnA/docker_compose/intel/cpu/xeon/compose.yaml @@ -26,7 +26,7 @@ services: VDMS_PORT: ${VDMS_PORT} INDEX_NAME: ${INDEX_NAME} COLLECTION_NAME: ${INDEX_NAME} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} volumes: - videoqna-cache:/home/user/.cache healthcheck: @@ -64,7 +64,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} INDEX_NAME: ${INDEX_NAME} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_VDMS" VDMS_INDEX_NAME: ${INDEX_NAME} VDMS_HOST: ${VDMS_HOST} diff --git a/VideoQnA/docker_compose/intel/cpu/xeon/set_env.sh b/VideoQnA/docker_compose/intel/cpu/xeon/set_env.sh index ada41f8ba9..307f849b88 100644 --- a/VideoQnA/docker_compose/intel/cpu/xeon/set_env.sh +++ b/VideoQnA/docker_compose/intel/cpu/xeon/set_env.sh @@ -9,7 +9,7 @@ popd > /dev/null host_ip=$(hostname -I | awk '{print $1}') export HF_TOKEN=${HF_TOKEN} -export HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} +export HF_TOKEN=${HF_TOKEN} export INDEX_NAME="mega-videoqna" export LLM_DOWNLOAD="True" # Set to "False" before redeploy LVM server to avoid model download diff --git a/VideoQnA/tests/test_compose_on_xeon.sh b/VideoQnA/tests/test_compose_on_xeon.sh index d4c1b5a3b5..a2306399b2 100755 --- a/VideoQnA/tests/test_compose_on_xeon.sh +++ b/VideoQnA/tests/test_compose_on_xeon.sh @@ -17,7 +17,6 @@ export host_ip=${ip_address} function setup_env() { export HF_TOKEN=${HF_TOKEN} - export HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} export INDEX_NAME="mega-videoqna" export LLM_DOWNLOAD="True" # Set to "False" before redeploy LVM server to avoid model download diff --git a/VisualQnA/docker_compose/amd/gpu/rocm/README.md b/VisualQnA/docker_compose/amd/gpu/rocm/README.md index 1647b16b2a..08e93c2776 100644 --- a/VisualQnA/docker_compose/amd/gpu/rocm/README.md +++ b/VisualQnA/docker_compose/amd/gpu/rocm/README.md @@ -151,11 +151,11 @@ Use AMD GPU driver utilities to determine the correct `cardN` and `renderN` IDs #### Setting variables in the operating system environment: -##### Set variable HUGGINGFACEHUB_API_TOKEN: +##### Set variable HF_TOKEN: ```bash ### Replace the string 'your_huggingfacehub_token' with your HuggingFacehub repository access token. -export HUGGINGFACEHUB_API_TOKEN='your_huggingfacehub_token' +export HF_TOKEN='your_huggingfacehub_token' ``` #### Set variables value in set_env\*\*\*\*.sh file: diff --git a/VisualQnA/docker_compose/amd/gpu/rocm/set_env.sh b/VisualQnA/docker_compose/amd/gpu/rocm/set_env.sh index 1cdf88a262..48893357c3 100644 --- a/VisualQnA/docker_compose/amd/gpu/rocm/set_env.sh +++ b/VisualQnA/docker_compose/amd/gpu/rocm/set_env.sh @@ -6,7 +6,7 @@ export HOST_IP=${host_ip} export EXTERNAL_HOST_IP=${host_ip} export VISUALQNA_TGI_SERVICE_PORT="8399" -export VISUALQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export VISUALQNA_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} export VISUALQNA_CARD_ID="card1" export VISUALQNA_RENDER_ID="renderD136" export LVM_MODEL_ID="Xkev/Llama-3.2V-11B-cot" diff --git a/VisualQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh b/VisualQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh index 68a1bb0b9d..b36390d840 100644 --- a/VisualQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh +++ b/VisualQnA/docker_compose/amd/gpu/rocm/set_env_vllm.sh @@ -6,7 +6,7 @@ export HOST_IP=${host_ip} export EXTERNAL_HOST_IP=${host_ip} export VISUALQNA_VLLM_SERVICE_PORT="8081" -export VISUALQNA_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export VISUALQNA_HUGGINGFACEHUB_API_TOKEN=${HF_TOKEN} export VISUALQNA_CARD_ID="card1" export VISUALQNA_RENDER_ID="renderD136" export VISUALQNA_LVM_MODEL_ID="Xkev/Llama-3.2V-11B-cot" diff --git a/VisualQnA/docker_compose/intel/cpu/xeon/compose.yaml b/VisualQnA/docker_compose/intel/cpu/xeon/compose.yaml index 7c7d9c9317..47a99a6b0b 100644 --- a/VisualQnA/docker_compose/intel/cpu/xeon/compose.yaml +++ b/VisualQnA/docker_compose/intel/cpu/xeon/compose.yaml @@ -13,7 +13,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} VLLM_TORCH_PROFILER_DIR: "/mnt" healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:80/health || exit 1"] diff --git a/VisualQnA/docker_compose/intel/hpu/gaudi/compose.yaml b/VisualQnA/docker_compose/intel/hpu/gaudi/compose.yaml index c1950a14d4..3430ac1052 100644 --- a/VisualQnA/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/VisualQnA/docker_compose/intel/hpu/gaudi/compose.yaml @@ -13,7 +13,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} HABANA_VISIBLE_DEVICES: all OMPI_MCA_btl_vader_single_copy_mechanism: none LLM_MODEL_ID: ${LVM_MODEL_ID} diff --git a/VisualQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml b/VisualQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml index 251b4fce70..adfbc01543 100644 --- a/VisualQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml +++ b/VisualQnA/docker_compose/intel/hpu/gaudi/compose_tgi.yaml @@ -17,7 +17,7 @@ services: HF_HUB_ENABLE_HF_TRANSFER: 0 HABANA_VISIBLE_DEVICES: all OMPI_MCA_btl_vader_single_copy_mechanism: none - HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGING_FACE_HUB_TOKEN: ${HF_TOKEN} ENABLE_HPU_GRAPH: true LIMIT_HPU_GRAPH: true USE_FLASH_ATTENTION: true diff --git a/VisualQnA/tests/README.md b/VisualQnA/tests/README.md index 8d07371b51..664c9d7b44 100644 --- a/VisualQnA/tests/README.md +++ b/VisualQnA/tests/README.md @@ -3,7 +3,7 @@ ## Set the required environment variable ```bash -export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token" +export HF_TOKEN="Your_Huggingface_API_Token" ``` ## Run test diff --git a/VisualQnA/tests/test_compose_tgi_on_gaudi.sh b/VisualQnA/tests/test_compose_tgi_on_gaudi.sh index b469166a8e..ba49821249 100644 --- a/VisualQnA/tests/test_compose_tgi_on_gaudi.sh +++ b/VisualQnA/tests/test_compose_tgi_on_gaudi.sh @@ -34,7 +34,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/hpu/gaudi - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} + export HF_TOKEN=${HF_TOKEN} export NGINX_PORT=80 export host_ip=${ip_address} source ./set_env.sh diff --git a/VisualQnA/tests/test_compose_tgi_on_xeon.sh b/VisualQnA/tests/test_compose_tgi_on_xeon.sh index 29a009904d..270b638350 100644 --- a/VisualQnA/tests/test_compose_tgi_on_xeon.sh +++ b/VisualQnA/tests/test_compose_tgi_on_xeon.sh @@ -34,7 +34,7 @@ function build_docker_images() { function start_services() { cd $WORKPATH/docker_compose/intel/cpu/xeon/ - export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} + export HF_TOKEN=${HF_TOKEN} export NGINX_PORT=80 export host_ip=${ip_address} source ./set_env.sh From 8886dbbef484c376e01ccc516264b3e981954032 Mon Sep 17 00:00:00 2001 From: ZePan110 Date: Mon, 9 Jun 2025 09:14:51 +0800 Subject: [PATCH 097/217] Fix shellcheck issues and update secrets TOKEN name (#2043) Signed-off-by: ZePan110 --- .github/env/_build_image.sh | 1 + .set_env.sh | 1 + HybridRAG/docker_compose/intel/hpu/gaudi/compose.yaml | 10 +++++----- HybridRAG/docker_compose/intel/hpu/gaudi/set_env.sh | 3 +-- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/.github/env/_build_image.sh b/.github/env/_build_image.sh index d559137fed..b83b4b0f65 100644 --- a/.github/env/_build_image.sh +++ b/.github/env/_build_image.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2025 Intel Corporation # SPDX-License-Identifier: Apache-2.0 diff --git a/.set_env.sh b/.set_env.sh index 4480362d81..f0c6f4e400 100644 --- a/.set_env.sh +++ b/.set_env.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 # diff --git a/HybridRAG/docker_compose/intel/hpu/gaudi/compose.yaml b/HybridRAG/docker_compose/intel/hpu/gaudi/compose.yaml index 2ae35cf5ed..d296cfeb00 100644 --- a/HybridRAG/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/HybridRAG/docker_compose/intel/hpu/gaudi/compose.yaml @@ -16,7 +16,7 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} INDEX_NAME: ${INDEX_NAME} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG:-False} HABANA_VISIBLE_DEVICES: all @@ -90,7 +90,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} tei-embedding-service: image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.6 container_name: tei-embedding-server @@ -120,7 +120,7 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} LOGFLAG: ${LOGFLAG} RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped @@ -136,7 +136,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HUGGINGFACEHUB_API_TOKEN: ${HF_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 command: --model-id ${RERANK_MODEL_ID} --auto-truncate @@ -152,7 +152,7 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + HF_TOKEN: ${HF_TOKEN} LLM_MODEL_ID: ${LLM_MODEL_ID} VLLM_TORCH_PROFILER_DIR: "/mnt" VLLM_CPU_KVCACHE_SPACE: 40 diff --git a/HybridRAG/docker_compose/intel/hpu/gaudi/set_env.sh b/HybridRAG/docker_compose/intel/hpu/gaudi/set_env.sh index a828fb565d..c743b61494 100644 --- a/HybridRAG/docker_compose/intel/hpu/gaudi/set_env.sh +++ b/HybridRAG/docker_compose/intel/hpu/gaudi/set_env.sh @@ -4,8 +4,7 @@ # SPDX-License-Identifier: Apache-2.0 export host_ip=$(hostname -I | awk '{print $1}') -export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} -export HF_TOKEN=${HUGGINGFACEHUB_API_TOKEN} +export HF_TOKEN=${HF_TOKEN} export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" export RERANK_MODEL_ID="BAAI/bge-reranker-base" From 260650717445eb6964b04229547520db1a00d53f Mon Sep 17 00:00:00 2001 From: Zhu Yongbo Date: Mon, 9 Jun 2025 09:32:13 +0800 Subject: [PATCH 098/217] add new feature for EC-RAG (#2013) Signed-off-by: Yongbozzz --- .github/code_spell_ignore.txt | 3 +- EdgeCraftRAG/Dockerfile.server | 3 +- EdgeCraftRAG/README.md | 64 ++- EdgeCraftRAG/chatqna.py | 1 + .../docker_compose/intel/gpu/arc/compose.yaml | 32 +- .../intel/gpu/arc/compose_gradio.yaml | 4 +- .../intel/gpu/arc/compose_vllm.yaml | 94 ----- .../intel/gpu/arc/compose_vllm_multi-arc.yaml | 72 +++- .../docker_compose/intel/gpu/arc/set_env.sh | 11 + EdgeCraftRAG/docker_image_build/build.yaml | 5 - EdgeCraftRAG/edgecraftrag/VERSION | 1 + EdgeCraftRAG/edgecraftrag/api/v1/chatqna.py | 44 +- EdgeCraftRAG/edgecraftrag/api/v1/data.py | 6 +- .../edgecraftrag/api/v1/knowledge_base.py | 136 +++++++ EdgeCraftRAG/edgecraftrag/api/v1/prompt.py | 58 +++ EdgeCraftRAG/edgecraftrag/api_schema.py | 14 +- EdgeCraftRAG/edgecraftrag/components/data.py | 2 +- .../edgecraftrag/components/generator.py | 41 +- .../edgecraftrag/components/knowledge_base.py | 51 +++ .../edgecraftrag/components/pipeline.py | 10 +- EdgeCraftRAG/edgecraftrag/context.py | 5 + .../controllers/knowledge_basemgr.py | 73 ++++ EdgeCraftRAG/edgecraftrag/requirements.txt | 0 EdgeCraftRAG/edgecraftrag/server.py | 4 +- EdgeCraftRAG/edgecraftrag/utils.py | 45 ++- EdgeCraftRAG/nginx/nginx.conf.template | 37 ++ ...vllm.json => test_pipeline_ipex_vllm.json} | 8 +- .../tests/test_compose_vllm_on_arc.sh | 52 +-- .../tests/test_pipeline_ipex_vllm.json | 44 ++ EdgeCraftRAG/ui/vue/.env.development | 4 +- EdgeCraftRAG/ui/vue/components.d.ts | 10 + EdgeCraftRAG/ui/vue/nginx.conf | 3 +- EdgeCraftRAG/ui/vue/package.json | 1 + EdgeCraftRAG/ui/vue/src/api/chatbot/index.ts | 25 +- .../ui/vue/src/api/knowledgeBase/index.ts | 76 ++++ EdgeCraftRAG/ui/vue/src/api/pipeline/index.ts | 8 +- EdgeCraftRAG/ui/vue/src/api/request.ts | 8 +- .../ui/vue/src/assets/iconFont/iconfont.css | 94 ++++- .../ui/vue/src/assets/iconFont/iconfont.js | 86 ++-- .../ui/vue/src/assets/iconFont/iconfont.json | 154 +++++++ .../ui/vue/src/assets/iconFont/iconfont.ttf | Bin 6480 -> 11444 bytes .../ui/vue/src/assets/iconFont/iconfont.woff | Bin 4332 -> 7448 bytes .../ui/vue/src/assets/iconFont/iconfont.woff2 | Bin 3596 -> 6272 bytes .../ui/vue/src/components/SvgIcon.vue | 2 +- EdgeCraftRAG/ui/vue/src/i18n/en.ts | 218 +++++++++- EdgeCraftRAG/ui/vue/src/i18n/index.ts | 8 +- EdgeCraftRAG/ui/vue/src/i18n/zh.ts | 238 ++++++++++- EdgeCraftRAG/ui/vue/src/layout/Header.vue | 70 +++- EdgeCraftRAG/ui/vue/src/layout/Main.vue | 35 +- EdgeCraftRAG/ui/vue/src/main.ts | 2 +- EdgeCraftRAG/ui/vue/src/store/theme.ts | 4 + EdgeCraftRAG/ui/vue/src/theme/ant.less | 3 + EdgeCraftRAG/ui/vue/src/theme/common.less | 80 ++++ EdgeCraftRAG/ui/vue/src/theme/layout.less | 8 +- EdgeCraftRAG/ui/vue/src/theme/markdown.less | 80 +++- EdgeCraftRAG/ui/vue/src/theme/variables.less | 70 +++- EdgeCraftRAG/ui/vue/src/utils/common.ts | 26 ++ .../ui/vue/src/utils/customRenderer.ts | 128 ++++++ EdgeCraftRAG/ui/vue/src/utils/mitt.ts | 8 + .../src/views/chatbot/components/Chatbot.vue | 275 ------------- .../views/chatbot/components/Chatbot/Chat.vue | 376 ++++++++++++++++++ .../components/{ => Chatbot}/ConfigDrawer.vue | 67 ++-- .../components/{ => Chatbot}/MessageItem.vue | 72 ++-- .../components/{ => Chatbot}/SseService.ts | 4 +- .../views/chatbot/components/Chatbot/index.ts | 8 + .../chatbot/components/Chatbot/index.vue | 91 +++++ .../src/views/chatbot/components/Header.vue | 118 ------ .../KnowledgeBase/KnowledgeDetial.vue | 295 ++++++++++++++ .../components/KnowledgeBase/UpdateDialog.vue | 162 ++++++++ .../chatbot/components/KnowledgeBase/index.ts | 6 + .../components/KnowledgeBase/index.vue | 302 ++++++++++++++ .../views/chatbot/components/UploadFile.vue | 276 ------------- .../vue/src/views/chatbot/components/index.ts | 11 +- .../ui/vue/src/views/chatbot/index.vue | 304 ++++++++++---- EdgeCraftRAG/ui/vue/src/views/chatbot/type.ts | 2 +- EdgeCraftRAG/ui/vue/src/views/error/404.vue | 5 +- .../pipeline/components/Configuration.vue | 54 --- .../pipeline/components/DetailDrawer.vue | 92 +++-- .../pipeline/components/ImportDialog.vue | 20 +- .../views/pipeline/components/QuickStart.vue | 21 +- .../src/views/pipeline/components/System.vue | 12 +- .../views/pipeline/components/SystemChart.vue | 32 +- .../src/views/pipeline/components/Table.vue | 38 +- .../components/UpdateDialog/Activated.vue | 10 +- .../components/UpdateDialog/Basic.vue | 15 +- .../components/UpdateDialog/CreateDialog.vue | 26 +- .../components/UpdateDialog/EditDialog.vue | 26 +- .../components/UpdateDialog/Generator.vue | 45 ++- .../components/UpdateDialog/Indexer.vue | 33 +- .../components/UpdateDialog/NodeParser.vue | 51 ++- .../components/UpdateDialog/PostProcessor.vue | 37 +- .../components/UpdateDialog/Retriever.vue | 37 +- .../src/views/pipeline/components/index.ts | 3 +- .../ui/vue/src/views/pipeline/enum.ts | 23 +- .../ui/vue/src/views/pipeline/index.vue | 10 +- 95 files changed, 3878 insertions(+), 1450 deletions(-) mode change 100644 => 100755 EdgeCraftRAG/Dockerfile.server mode change 100644 => 100755 EdgeCraftRAG/chatqna.py mode change 100644 => 100755 EdgeCraftRAG/docker_compose/intel/gpu/arc/compose.yaml mode change 100644 => 100755 EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_gradio.yaml delete mode 100644 EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm.yaml mode change 100644 => 100755 EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm_multi-arc.yaml mode change 100644 => 100755 EdgeCraftRAG/docker_compose/intel/gpu/arc/set_env.sh create mode 100755 EdgeCraftRAG/edgecraftrag/VERSION mode change 100644 => 100755 EdgeCraftRAG/edgecraftrag/api/v1/chatqna.py create mode 100755 EdgeCraftRAG/edgecraftrag/api/v1/knowledge_base.py create mode 100644 EdgeCraftRAG/edgecraftrag/api/v1/prompt.py mode change 100644 => 100755 EdgeCraftRAG/edgecraftrag/components/data.py mode change 100644 => 100755 EdgeCraftRAG/edgecraftrag/components/generator.py create mode 100644 EdgeCraftRAG/edgecraftrag/components/knowledge_base.py create mode 100644 EdgeCraftRAG/edgecraftrag/controllers/knowledge_basemgr.py mode change 100644 => 100755 EdgeCraftRAG/edgecraftrag/requirements.txt mode change 100644 => 100755 EdgeCraftRAG/edgecraftrag/utils.py create mode 100755 EdgeCraftRAG/nginx/nginx.conf.template rename EdgeCraftRAG/tests/configs/{test_pipeline_vllm.json => test_pipeline_ipex_vllm.json} (82%) create mode 100644 EdgeCraftRAG/tests/test_pipeline_ipex_vllm.json create mode 100644 EdgeCraftRAG/ui/vue/src/api/knowledgeBase/index.ts create mode 100644 EdgeCraftRAG/ui/vue/src/utils/customRenderer.ts create mode 100644 EdgeCraftRAG/ui/vue/src/utils/mitt.ts delete mode 100644 EdgeCraftRAG/ui/vue/src/views/chatbot/components/Chatbot.vue create mode 100644 EdgeCraftRAG/ui/vue/src/views/chatbot/components/Chatbot/Chat.vue rename EdgeCraftRAG/ui/vue/src/views/chatbot/components/{ => Chatbot}/ConfigDrawer.vue (75%) rename EdgeCraftRAG/ui/vue/src/views/chatbot/components/{ => Chatbot}/MessageItem.vue (77%) rename EdgeCraftRAG/ui/vue/src/views/chatbot/components/{ => Chatbot}/SseService.ts (92%) create mode 100644 EdgeCraftRAG/ui/vue/src/views/chatbot/components/Chatbot/index.ts create mode 100644 EdgeCraftRAG/ui/vue/src/views/chatbot/components/Chatbot/index.vue delete mode 100644 EdgeCraftRAG/ui/vue/src/views/chatbot/components/Header.vue create mode 100644 EdgeCraftRAG/ui/vue/src/views/chatbot/components/KnowledgeBase/KnowledgeDetial.vue create mode 100644 EdgeCraftRAG/ui/vue/src/views/chatbot/components/KnowledgeBase/UpdateDialog.vue create mode 100644 EdgeCraftRAG/ui/vue/src/views/chatbot/components/KnowledgeBase/index.ts create mode 100644 EdgeCraftRAG/ui/vue/src/views/chatbot/components/KnowledgeBase/index.vue delete mode 100644 EdgeCraftRAG/ui/vue/src/views/chatbot/components/UploadFile.vue delete mode 100644 EdgeCraftRAG/ui/vue/src/views/pipeline/components/Configuration.vue diff --git a/.github/code_spell_ignore.txt b/.github/code_spell_ignore.txt index 3c59d07a31..c72099bfd8 100644 --- a/.github/code_spell_ignore.txt +++ b/.github/code_spell_ignore.txt @@ -1,3 +1,4 @@ ModelIn modelin -pressEnter \ No newline at end of file +pressEnter +PromptIn \ No newline at end of file diff --git a/EdgeCraftRAG/Dockerfile.server b/EdgeCraftRAG/Dockerfile.server old mode 100644 new mode 100755 index 13efc304ca..f2bbf49252 --- a/EdgeCraftRAG/Dockerfile.server +++ b/EdgeCraftRAG/Dockerfile.server @@ -40,11 +40,10 @@ USER user WORKDIR /home/user/edgecraftrag RUN pip install --no-cache-dir --upgrade pip setuptools==70.0.0 && \ - pip install --no-cache-dir --extra-index-url https://download.pytorch.org/whl/cpu -r requirements.txt + pip install --no-cache-dir --extra-index-url https://download.pytorch.org/whl/cpu -r requirements.txt WORKDIR /home/user/ RUN git clone https://github.com/openvinotoolkit/openvino.genai.git genai ENV PYTHONPATH="$PYTHONPATH:/home/user/genai/tools/llm_bench" - ENTRYPOINT ["python", "-m", "edgecraftrag.server"] \ No newline at end of file diff --git a/EdgeCraftRAG/README.md b/EdgeCraftRAG/README.md index 93546869da..4b8603bbec 100755 --- a/EdgeCraftRAG/README.md +++ b/EdgeCraftRAG/README.md @@ -106,19 +106,53 @@ docker compose -f compose_vllm.yaml up -d The docker file can be pulled automatically‌, you can also pull the image manually: ```bash -docker pull intelanalytics/ipex-llm-serving-xpu:latest +docker pull intelanalytics/ipex-llm-serving-xpu:0.8.3-b18 +``` + +Generate your nginx config file + +```bash +export HOST_IP=#your host ip +export NGINX_PORT=8086 #set port for nginx +# If you are running with 1 vllm container: +export NGINX_PORT_0=8100 # you can change the port to your preferrance +export NGINX_PORT_1=8100 # you can change the port to your preferrance +# If you are running with 2 vllm containers: +export NGINX_PORT_0=8100 # you can change the port to your preferrance +export NGINX_PORT_1=8200 # you can change the port to your preferrance +# Generate your nginx config file +envsubst < GenAIExamples/EdgeCraftRAG/nginx/nginx.conf.template > /nginx.conf +# set NGINX_CONFIG_PATH +export NGINX_CONFIG_PATH="/nginx.conf" ``` Set up Additional Environment Variables and start with compose_vllm_multi-arc.yaml ```bash +# For 1 vLLM container(1 DP) with multi Intel Arc GPUs +export vLLM_ENDPOINT="http://${HOST_IP}:${NGINX_PORT}" +export LLM_MODEL_PATH=#your model path export LLM_MODEL=#your model id -export VLLM_SERVICE_PORT=8008 -export vLLM_ENDPOINT="http://${HOST_IP}:${VLLM_SERVICE_PORT}" +export CONTAINER_COUNT="single_container" +export TENSOR_PARALLEL_SIZE=#your Intel Arc GPU number to do inference +export SELECTED_XPU_0= # example for selecting 2 Arc GPUs: SELECTED_XPU_0=0,1 +``` + +```bash +# For 2 vLLM container(2 DP) with multi Intel Arc GPUs +export vLLM_ENDPOINT="http://${HOST_IP}:${NGINX_PORT}" export LLM_MODEL_PATH=#your model path +export LLM_MODEL=#your model id +export CONTAINER_COUNT="multi_container" export TENSOR_PARALLEL_SIZE=#your Intel Arc GPU number to do inference +export SELECTED_XPU_0= +export SELECTED_XPU_1= +``` + +start with compose_vllm_multi-arc.yaml -docker compose -f compose_vllm_multi-arc.yaml up -d +```bash +docker compose -f docker_compose/intel/gpu/arc/compose_vllm_multi-arc.yaml --profile ${CONTAINER_COUNT} up -d ``` ### ChatQnA with LLM Example (Command Line) @@ -355,8 +389,26 @@ curl -X PATCH http://${HOST_IP}:16010/v1/data/files/test.pdf -H "Content-Type: a ### System Prompt Management -#### Use custom system prompt +#### Get system prompt + +```bash +curl -X GET http://${HOST_IP}:16010/v1/chatqna/prompt -H "Content-Type: application/json" | jq '.' +``` + +#### Update system prompt + +```bash +curl -X POST http://${HOST_IP}:16010/v1/chatqna/prompt -H "Content-Type: application/json" -d '{"prompt":"This is a template prompt"}' | jq '.' +``` + +#### Reset system prompt + +```bash +curl -X POST http://${HOST_IP}:16010/v1/chatqna/prompt/reset -H "Content-Type: application/json" | jq '.' +``` + +#### Use custom system prompt file ```bash -curl -X POST http://${HOST_IP}:16010/v1/chatqna/prompt -H "Content-Type: multipart/form-data" -F "file=@your_prompt_file.txt" +curl -X POST http://${HOST_IP}:16010/v1/chatqna/prompt-file -H "Content-Type: multipart/form-data" -F "file=@your_prompt_file.txt" ``` diff --git a/EdgeCraftRAG/chatqna.py b/EdgeCraftRAG/chatqna.py old mode 100644 new mode 100755 index bc6f0a6437..1073a66eb4 --- a/EdgeCraftRAG/chatqna.py +++ b/EdgeCraftRAG/chatqna.py @@ -43,6 +43,7 @@ def add_remote_service(self): async def handle_request(self, request: Request): input = await request.json() stream_opt = input.get("stream", False) + input["user"] = request.headers.get("sessionid", None) chat_request = ChatCompletionRequest.parse_obj(input) parameters = LLMParams( max_tokens=chat_request.max_tokens if chat_request.max_tokens else 1024, diff --git a/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose.yaml b/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose.yaml old mode 100644 new mode 100755 index e4465e0e7f..46271b1c8e --- a/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose.yaml +++ b/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose.yaml @@ -18,6 +18,7 @@ services: - ${UI_TMPFILE_PATH:-${PWD}}:/home/user/ui_cache - ${HF_CACHE:-${HOME}/.cache}:/home/user/.cache - ${PROMPT_PATH:-${PWD}}:/templates/custom + restart: always ports: - ${PIPELINE_SERVICE_PORT:-16010}:${PIPELINE_SERVICE_PORT:-16010} devices: @@ -36,6 +37,7 @@ services: MEGA_SERVICE_HOST_IP: ${MEGA_SERVICE_HOST_IP:-${HOST_IP}} PIPELINE_SERVICE_PORT: ${PIPELINE_SERVICE_PORT:-16010} PIPELINE_SERVICE_HOST_IP: ${PIPELINE_SERVICE_HOST_IP:-${HOST_IP}} + restart: always ports: - ${MEGA_SERVICE_PORT:-16011}:${MEGA_SERVICE_PORT:-16011} depends_on: @@ -55,40 +57,12 @@ services: UI_SERVICE_HOST_IP: ${UI_SERVICE_HOST_IP:-0.0.0.0} volumes: - ${UI_TMPFILE_PATH:-${PWD}}:/home/user/ui_cache + restart: always ports: - ${UI_SERVICE_PORT:-8082}:${UI_SERVICE_PORT:-8082} - restart: always depends_on: - server - ecrag - # vllm-openvino-server: - # container_name: vllm-openvino-server - # image: opea/vllm-arc:latest - # ports: - # - ${VLLM_SERVICE_PORT:-8008}:80 - # environment: - # HTTPS_PROXY: ${https_proxy} - # HTTP_PROXY: ${https_proxy} - # VLLM_OPENVINO_DEVICE: GPU - # HF_ENDPOINT: ${HF_ENDPOINT} - # HF_TOKEN: ${HF_TOKEN} - # volumes: - # - /dev/dri/by-path:/dev/dri/by-path - # - $HOME/.cache/huggingface:/root/.cache/huggingface - # devices: - # - /dev/dri - # group_add: - # - ${VIDEOGROUPID:-44} - # - ${RENDERGROUPID:-109} - # entrypoint: /bin/bash -c "\ - # cd / && \ - # export VLLM_CPU_KVCACHE_SPACE=50 && \ - # export VLLM_OPENVINO_ENABLE_QUANTIZED_WEIGHTS=ON && \ - # python3 -m vllm.entrypoints.openai.api_server \ - # --model '${LLM_MODEL}' \ - # --max_model_len=1024 \ - # --host 0.0.0.0 \ - # --port 80" networks: default: driver: bridge diff --git a/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_gradio.yaml b/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_gradio.yaml old mode 100644 new mode 100755 index f753a17460..13c029687e --- a/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_gradio.yaml +++ b/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_gradio.yaml @@ -18,6 +18,7 @@ services: - ${UI_TMPFILE_PATH:-${PWD}}:/home/user/ui_cache - ${HF_CACHE:-${HOME}/.cache}:/home/user/.cache - ${PROMPT_PATH:-${PWD}}:/templates/custom + restart: always ports: - ${PIPELINE_SERVICE_PORT:-16010}:${PIPELINE_SERVICE_PORT:-16010} devices: @@ -36,6 +37,7 @@ services: MEGA_SERVICE_HOST_IP: ${MEGA_SERVICE_HOST_IP:-${HOST_IP}} PIPELINE_SERVICE_PORT: ${PIPELINE_SERVICE_PORT:-16010} PIPELINE_SERVICE_HOST_IP: ${PIPELINE_SERVICE_HOST_IP:-${HOST_IP}} + restart: always ports: - ${MEGA_SERVICE_PORT:-16011}:${MEGA_SERVICE_PORT:-16011} depends_on: @@ -55,9 +57,9 @@ services: UI_SERVICE_HOST_IP: ${UI_SERVICE_HOST_IP:-0.0.0.0} volumes: - ${UI_TMPFILE_PATH:-${PWD}}:/home/user/ui_cache + restart: always ports: - ${UI_SERVICE_PORT:-8082}:${UI_SERVICE_PORT:-8082} - restart: always depends_on: - server - ecrag diff --git a/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm.yaml b/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm.yaml deleted file mode 100644 index d1811a4aca..0000000000 --- a/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm.yaml +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright (C) 2024 Intel Corporation -# SPDX-License-Identifier: Apache-2.0 - -services: - server: - image: ${REGISTRY:-opea}/edgecraftrag-server:${TAG:-latest} - container_name: edgecraftrag-server - environment: - no_proxy: ${no_proxy} - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} - HF_ENDPOINT: ${HF_ENDPOINT} - vLLM_ENDPOINT: ${vLLM_ENDPOINT} - ENABLE_BENCHMARK: ${ENABLE_BENCHMARK:-false} - volumes: - - ${MODEL_PATH:-${PWD}}:/home/user/models - - ${DOC_PATH:-${PWD}}:/home/user/docs - - ${UI_TMPFILE_PATH:-${PWD}}:/home/user/ui_cache - - ${HF_CACHE:-${HOME}/.cache}:/home/user/.cache - - ${PROMPT_PATH:-${PWD}}:/templates/custom - ports: - - ${PIPELINE_SERVICE_PORT:-16010}:${PIPELINE_SERVICE_PORT:-16010} - devices: - - /dev/dri:/dev/dri - group_add: - - ${VIDEOGROUPID:-44} - - ${RENDERGROUPID:-109} - ecrag: - image: ${REGISTRY:-opea}/edgecraftrag:${TAG:-latest} - container_name: edgecraftrag - environment: - no_proxy: ${no_proxy} - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} - MEGA_SERVICE_PORT: ${MEGA_SERVICE_PORT:-16011} - MEGA_SERVICE_HOST_IP: ${MEGA_SERVICE_HOST_IP:-${HOST_IP}} - PIPELINE_SERVICE_PORT: ${PIPELINE_SERVICE_PORT:-16010} - PIPELINE_SERVICE_HOST_IP: ${PIPELINE_SERVICE_HOST_IP:-${HOST_IP}} - ports: - - ${MEGA_SERVICE_PORT:-16011}:${MEGA_SERVICE_PORT:-16011} - depends_on: - - server - ui: - image: ${REGISTRY:-opea}/edgecraftrag-ui:${TAG:-latest} - container_name: edgecraftrag-ui - environment: - no_proxy: ${no_proxy} - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} - MEGA_SERVICE_PORT: ${MEGA_SERVICE_PORT:-16011} - MEGA_SERVICE_HOST_IP: ${MEGA_SERVICE_HOST_IP:-${HOST_IP}} - PIPELINE_SERVICE_PORT: ${PIPELINE_SERVICE_PORT:-16010} - PIPELINE_SERVICE_HOST_IP: ${PIPELINE_SERVICE_HOST_IP:-${HOST_IP}} - UI_SERVICE_PORT: ${UI_SERVICE_PORT:-8082} - UI_SERVICE_HOST_IP: ${UI_SERVICE_HOST_IP:-0.0.0.0} - volumes: - - ${UI_TMPFILE_PATH:-${PWD}}:/home/user/ui_cache - ports: - - ${UI_SERVICE_PORT:-8082}:${UI_SERVICE_PORT:-8082} - restart: always - depends_on: - - server - - ecrag - vllm-openvino-server: - container_name: vllm-openvino-server - image: ${REGISTRY:-opea}/vllm-arc:${TAG:-latest} - ports: - - ${VLLM_SERVICE_PORT:-8008}:80 - environment: - no_proxy: ${no_proxy} - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} - VLLM_OPENVINO_DEVICE: GPU - HF_ENDPOINT: ${HF_ENDPOINT} - HF_TOKEN: ${HF_TOKEN} - volumes: - - ${HF_CACHE:-${HOME}/.cache}:/root/.cache - devices: - - /dev/dri - group_add: - - ${VIDEOGROUPID:-44} - - ${RENDERGROUPID:-109} - entrypoint: /bin/bash -c "\ - cd / && \ - export VLLM_CPU_KVCACHE_SPACE=50 && \ - export VLLM_OPENVINO_ENABLE_QUANTIZED_WEIGHTS=ON && \ - python3 -m vllm.entrypoints.openai.api_server \ - --model '${LLM_MODEL}' \ - --max_model_len=4096 \ - --host 0.0.0.0 \ - --port 80" -networks: - default: - driver: bridge diff --git a/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm_multi-arc.yaml b/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm_multi-arc.yaml old mode 100644 new mode 100755 index d82f9c9747..5453ae1aa4 --- a/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm_multi-arc.yaml +++ b/EdgeCraftRAG/docker_compose/intel/gpu/arc/compose_vllm_multi-arc.yaml @@ -19,6 +19,7 @@ services: - ${UI_TMPFILE_PATH:-${PWD}}:/home/user/ui_cache - ${HF_CACHE:-${HOME}/.cache}:/home/user/.cache - ${PROMPT_PATH:-${PWD}}:/templates/custom + restart: always ports: - ${PIPELINE_SERVICE_PORT:-16010}:${PIPELINE_SERVICE_PORT:-16010} devices: @@ -26,6 +27,9 @@ services: group_add: - ${VIDEOGROUPID:-44} - ${RENDERGROUPID:-109} + profiles: + - single_container + - multi_container ecrag: image: ${REGISTRY:-opea}/edgecraftrag:${TAG:-latest} container_name: edgecraftrag @@ -37,10 +41,26 @@ services: MEGA_SERVICE_HOST_IP: ${MEGA_SERVICE_HOST_IP:-${HOST_IP}} PIPELINE_SERVICE_PORT: ${PIPELINE_SERVICE_PORT:-16010} PIPELINE_SERVICE_HOST_IP: ${PIPELINE_SERVICE_HOST_IP:-${HOST_IP}} + restart: always ports: - ${MEGA_SERVICE_PORT:-16011}:${MEGA_SERVICE_PORT:-16011} depends_on: - server + profiles: + - single_container + - multi_container + nginx: + image: nginx:latest + restart: always + ports: + - ${NGINX_PORT:-8086}:8086 + volumes: + - ${NGINX_CONFIG_PATH:-${PWD}}:/etc/nginx/nginx.conf + depends_on: + - server + profiles: + - single_container + - multi_container ui: image: ${REGISTRY:-opea}/edgecraftrag-ui:${TAG:-latest} container_name: edgecraftrag-ui @@ -56,18 +76,54 @@ services: UI_SERVICE_HOST_IP: ${UI_SERVICE_HOST_IP:-0.0.0.0} volumes: - ${UI_TMPFILE_PATH:-${PWD}}:/home/user/ui_cache + restart: always ports: - ${UI_SERVICE_PORT:-8082}:${UI_SERVICE_PORT:-8082} - restart: always depends_on: - server - ecrag - llm-serving-xpu: - container_name: ipex-llm-serving-xpu-container - image: intelanalytics/ipex-llm-serving-xpu:latest + profiles: + - single_container + - multi_container + llm-serving-xpu-0: + container_name: ipex-llm-serving-xpu-container-0 + image: intelanalytics/ipex-llm-serving-xpu:0.8.3-b18 + privileged: true + restart: always + ports: + - ${VLLM_SERVICE_PORT_0:-8100}:${VLLM_SERVICE_PORT_0:-8100} + group_add: + - video + - ${VIDEOGROUPID:-44} + - ${RENDERGROUPID:-109} + volumes: + - ${LLM_MODEL_PATH:-${PWD}}:/llm/models + devices: + - /dev/dri + environment: + no_proxy: ${no_proxy} + http_proxy: ${http_proxy} + https_proxy: ${https_proxy} + HF_ENDPOINT: ${HF_ENDPOINT} + MODEL_PATH: "/llm/models" + SERVED_MODEL_NAME: ${LLM_MODEL} + TENSOR_PARALLEL_SIZE: ${TENSOR_PARALLEL_SIZE:-1} + PORT: ${VLLM_SERVICE_PORT_0:-8100} + ZE_AFFINITY_MASK: ${SELECTED_XPU_0:-0} + shm_size: '32g' + entrypoint: /bin/bash -c "\ + cd /llm && \ + bash start-vllm-service.sh" + profiles: + - single_container + - multi_container + llm-serving-xpu-1: + container_name: ipex-llm-serving-xpu-container-1 + image: intelanalytics/ipex-llm-serving-xpu:0.8.3-b18 privileged: true + restart: always ports: - - ${VLLM_SERVICE_PORT:-8008}:8000 + - ${VLLM_SERVICE_PORT_1:-8200}:${VLLM_SERVICE_PORT_1:-8200} group_add: - video - ${VIDEOGROUPID:-44} @@ -84,10 +140,14 @@ services: MODEL_PATH: "/llm/models" SERVED_MODEL_NAME: ${LLM_MODEL} TENSOR_PARALLEL_SIZE: ${TENSOR_PARALLEL_SIZE:-1} - shm_size: '16g' + PORT: ${VLLM_SERVICE_PORT_1:-8200} + ZE_AFFINITY_MASK: ${SELECTED_XPU_1:-1} + shm_size: '32g' entrypoint: /bin/bash -c "\ cd /llm && \ bash start-vllm-service.sh" + profiles: + - multi_container networks: default: driver: bridge diff --git a/EdgeCraftRAG/docker_compose/intel/gpu/arc/set_env.sh b/EdgeCraftRAG/docker_compose/intel/gpu/arc/set_env.sh old mode 100644 new mode 100755 index c70928a492..7655f09e8c --- a/EdgeCraftRAG/docker_compose/intel/gpu/arc/set_env.sh +++ b/EdgeCraftRAG/docker_compose/intel/gpu/arc/set_env.sh @@ -15,3 +15,14 @@ export HF_ENDPOINT=${HF_ENDPOINT} export vLLM_ENDPOINT=${vLLM_ENDPOINT} export HF_TOKEN=${HF_TOKEN} export no_proxy="localhost, 127.0.0.1, 192.168.1.1" +export UI_UPLOAD_PATH=${UI_UPLOAD_PATH} +export LLM_MODEL_PATH=${LLM_MODEL_PATH} +export NGINX_PORT_0=${NGINX_PORT_0} +export NGINX_PORT_1=${NGINX_PORT_1} +export VLLM_SERVICE_PORT_0=${VLLM_SERVICE_PORT_0} +export VLLM_SERVICE_PORT_1=${VLLM_SERVICE_PORT_1} +export TENSOR_PARALLEL_SIZE=${TENSOR_PARALLEL_SIZE} +export NGINX_CONFIG_PATH=${NGINX_CONFIG_PATH} +export SELECTED_XPU_0=${SELECTED_XPU_0} +export SELECTED_XPU_1=${SELECTED_XPU_1} +export vLLM_ENDPOINT=${vLLM_ENDPOINT} diff --git a/EdgeCraftRAG/docker_image_build/build.yaml b/EdgeCraftRAG/docker_image_build/build.yaml index 18ad867c75..cde5d49778 100644 --- a/EdgeCraftRAG/docker_image_build/build.yaml +++ b/EdgeCraftRAG/docker_image_build/build.yaml @@ -30,8 +30,3 @@ services: dockerfile: ./ui/docker/Dockerfile.gradio extends: edgecraftrag image: ${REGISTRY:-opea}/edgecraftrag-ui-gradio:${TAG:-latest} - vllm-arc: - build: - context: GenAIComps - dockerfile: comps/third_parties/vllm/src/Dockerfile.intel_gpu - image: ${REGISTRY:-opea}/vllm-arc:${TAG:-latest} diff --git a/EdgeCraftRAG/edgecraftrag/VERSION b/EdgeCraftRAG/edgecraftrag/VERSION new file mode 100755 index 0000000000..40766d6bb4 --- /dev/null +++ b/EdgeCraftRAG/edgecraftrag/VERSION @@ -0,0 +1 @@ +25.05-Release diff --git a/EdgeCraftRAG/edgecraftrag/api/v1/chatqna.py b/EdgeCraftRAG/edgecraftrag/api/v1/chatqna.py old mode 100644 new mode 100755 index d0236c82e8..be7aee3d58 --- a/EdgeCraftRAG/edgecraftrag/api/v1/chatqna.py +++ b/EdgeCraftRAG/edgecraftrag/api/v1/chatqna.py @@ -5,6 +5,7 @@ from comps.cores.proto.api_protocol import ChatCompletionRequest from edgecraftrag.api_schema import RagOut from edgecraftrag.context import ctx +from edgecraftrag.utils import serialize_contexts, set_current_session from fastapi import FastAPI, File, HTTPException, UploadFile, status from fastapi.responses import StreamingResponse @@ -19,7 +20,7 @@ async def retrieval(request: ChatCompletionRequest): if nodeswithscore is not None: ret = [] for n in nodeswithscore: - ret.append((n.node.node_id, n.node.text, n.score)) + ret.append((n.node.node_id, n.node.text, round(float(n.score), 8))) return ret return None @@ -29,14 +30,16 @@ async def retrieval(request: ChatCompletionRequest): @chatqna_app.post(path="/v1/chatqna") async def chatqna(request: ChatCompletionRequest): try: + sessionid = request.user + set_current_session(sessionid) generator = ctx.get_pipeline_mgr().get_active_pipeline().generator if generator: request.model = generator.model_id if request.stream: - ret, retri_res = ctx.get_pipeline_mgr().run_pipeline(chat_request=request) + ret, contexts = ctx.get_pipeline_mgr().run_pipeline(chat_request=request) return ret else: - ret, retri_res = ctx.get_pipeline_mgr().run_pipeline(chat_request=request) + ret, contexts = ctx.get_pipeline_mgr().run_pipeline(chat_request=request) return str(ret) except Exception as e: raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)) @@ -46,7 +49,7 @@ async def chatqna(request: ChatCompletionRequest): @chatqna_app.post(path="/v1/ragqna") async def ragqna(request: ChatCompletionRequest): try: - res, retri_res = ctx.get_pipeline_mgr().run_pipeline(chat_request=request) + res, contexts = ctx.get_pipeline_mgr().run_pipeline(chat_request=request) if isinstance(res, GeneratedDoc): res = res.text elif isinstance(res, StreamingResponse): @@ -55,36 +58,9 @@ async def ragqna(request: ChatCompletionRequest): collected_data.append(chunk) res = "".join(collected_data) - ragout = RagOut(query=request.messages, contexts=[], response=str(res)) - for n in retri_res: - origin_text = n.node.get_text() - ragout.contexts.append(origin_text.strip()) - return ragout - except Exception as e: - raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)) - + serialized_contexts = serialize_contexts(contexts) -# Upload prompt file for LLM ChatQnA -@chatqna_app.post(path="/v1/chatqna/prompt") -async def load_prompt(file: UploadFile = File(...)): - try: - generator = ctx.get_pipeline_mgr().get_active_pipeline().generator - if generator: - content = await file.read() - prompt_str = content.decode("utf-8") - generator.set_prompt(prompt_str) - return "Set LLM Prompt Successfully" - except Exception as e: - raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)) - - -# Reset prompt for LLM ChatQnA -@chatqna_app.post(path="/v1/chatqna/prompt/reset") -async def reset_prompt(): - try: - generator = ctx.get_pipeline_mgr().get_active_pipeline().generator - if generator: - generator.reset_prompt() - return "Reset LLM Prompt Successfully" + ragout = RagOut(query=request.messages, contexts=serialized_contexts, response=str(res)) + return ragout except Exception as e: raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)) diff --git a/EdgeCraftRAG/edgecraftrag/api/v1/data.py b/EdgeCraftRAG/edgecraftrag/api/v1/data.py index 4a584de425..35a1ab3ff4 100755 --- a/EdgeCraftRAG/edgecraftrag/api/v1/data.py +++ b/EdgeCraftRAG/edgecraftrag/api/v1/data.py @@ -94,8 +94,8 @@ async def delete_file(name): # Upload & save a file from UI -@data_app.post(path="/v1/data/file") -async def upload_file(file: UploadFile = File(...)): +@data_app.post(path="/v1/data/file/{file_name}") +async def upload_file(file_name: str, file: UploadFile = File(...)): if ctx.get_pipeline_mgr().get_active_pipeline() is None: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Please activate pipeline and upload the file" @@ -103,7 +103,7 @@ async def upload_file(file: UploadFile = File(...)): try: # DIR for server to save files uploaded by UI UI_DIRECTORY = os.getenv("UI_TMPFILE_PATH", "/home/user/ui_cache") - UPLOAD_DIRECTORY = os.path.join(UI_DIRECTORY, "documents") + UPLOAD_DIRECTORY = os.path.join(UI_DIRECTORY, file_name) os.makedirs(UPLOAD_DIRECTORY, exist_ok=True) file_path = os.path.join(UPLOAD_DIRECTORY, file.filename) with open(file_path, "wb") as buffer: diff --git a/EdgeCraftRAG/edgecraftrag/api/v1/knowledge_base.py b/EdgeCraftRAG/edgecraftrag/api/v1/knowledge_base.py new file mode 100755 index 0000000000..1a4a8e54a3 --- /dev/null +++ b/EdgeCraftRAG/edgecraftrag/api/v1/knowledge_base.py @@ -0,0 +1,136 @@ +# Copyright (C) 2025 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +import os + +from edgecraftrag.api.v1.data import add_data +from edgecraftrag.api_schema import DataIn, KnowledgeBaseCreateIn +from edgecraftrag.context import ctx +from fastapi import FastAPI, HTTPException, status + +kb_app = FastAPI() + + +# Get all knowledge bases +@kb_app.get(path="/v1/knowledge") +async def get_all_knowledge_bases(): + try: + return ctx.knowledgemgr.get_all_knowledge_bases() + except Exception as e: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) + + +# Get the specified knowledge base. +@kb_app.get("/v1/knowledge/{knowledge_name}") +async def get_knowledge_base(knowledge_name: str): + kb = ctx.knowledgemgr.get_knowledge_base_by_name_or_id(knowledge_name) + return kb + + +# Create a new knowledge base +@kb_app.post(path="/v1/knowledge") +async def create_knowledge_base(knowledge: KnowledgeBaseCreateIn): + try: + kb = ctx.knowledgemgr.create_knowledge_base(knowledge) + if kb.active: + await update_knowledge_base_handler(kb.get_file_paths()) + return "Create knowledge base successfully" + except Exception as e: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) + + +# Delete the knowledge base by name +@kb_app.delete(path="/v1/knowledge/{knowledge_name}") +async def delete_knowledge_base(knowledge_name: str): + try: + return ctx.knowledgemgr.delete_knowledge_base(knowledge_name) + except Exception as e: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) + + +# Switch the active knowledge base +@kb_app.patch(path="/v1/knowledge/patch") +async def update_knowledge_base(knowledge: KnowledgeBaseCreateIn): + try: + kb = ctx.knowledgemgr.get_knowledge_base_by_name_or_id(knowledge.name) + if knowledge.active is not None and knowledge.active != kb.active: + file_paths = kb.get_file_paths() if knowledge.active else None + await update_knowledge_base_handler(file_paths) + result = ctx.knowledgemgr.update_knowledge_base(knowledge) + return result + except Exception as e: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) + + +# Add a files to the knowledge base +@kb_app.post(path="/v1/knowledge/{knowledge_name}/files") +async def add_file_to_knowledge_base(knowledge_name, file_path: DataIn): + try: + kb = ctx.knowledgemgr.get_knowledge_base_by_name_or_id(knowledge_name) + if os.path.isdir(file_path.local_path): + for root, _, files in os.walk(file_path.local_path): + for file in files: + file_full_path = os.path.join(root, file) + if file_full_path not in kb.get_file_paths(): + kb.add_file_path(file_full_path) + else: + raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail="File upload failed") + elif os.path.isfile(file_path.local_path) and file_path.local_path not in kb.get_file_paths(): + kb.add_file_path(file_path.local_path) + else: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="File upload failed") + + active_kb = ctx.knowledgemgr.get_active_knowledge_base() + if active_kb: + if active_kb.name == knowledge_name or active_kb.idx == knowledge_name: + await update_knowledge_base_handler(file_path, add_file=True) + + return "File upload successfully" + except ValueError as e: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) + + +# Remove a file from the knowledge base +@kb_app.delete(path="/v1/knowledge/{knowledge_name}/files") +async def remove_file_from_knowledge_base(knowledge_name, file_path: DataIn): + try: + kb = ctx.knowledgemgr.get_knowledge_base_by_name_or_id(knowledge_name) + if file_path.local_path in kb.get_file_paths(): + kb.remove_file_path(file_path.local_path) + else: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="File remove failure") + + file_path = kb.get_file_paths() + active_kb = ctx.knowledgemgr.get_active_knowledge_base() + if active_kb: + if active_kb.name == knowledge_name or active_kb.idx == knowledge_name: + await update_knowledge_base_handler(file_path) + return "File deleted successfully" + except ValueError as e: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) + + +# Update knowledge base data +async def update_knowledge_base_handler(file_path=None, add_file: bool = False): + if ctx.get_pipeline_mgr().get_active_pipeline() is None: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Please activate pipeline") + + if add_file and file_path: + return await add_data(file_path) + + elif file_path: + pl = ctx.get_pipeline_mgr().get_active_pipeline() + ctx.get_node_mgr().del_nodes_by_np_idx(pl.node_parser.idx) + pl.indexer.reinitialize_indexer() + pl.update_indexer_to_retriever() + for file in file_path: + request = DataIn(local_path=file) + await add_data(request) + return "Done" + + else: + pl = ctx.get_pipeline_mgr().get_active_pipeline() + ctx.get_node_mgr().del_nodes_by_np_idx(pl.node_parser.idx) + pl.indexer.reinitialize_indexer() + pl.update_indexer_to_retriever() + return "Done" diff --git a/EdgeCraftRAG/edgecraftrag/api/v1/prompt.py b/EdgeCraftRAG/edgecraftrag/api/v1/prompt.py new file mode 100644 index 0000000000..86639a40a7 --- /dev/null +++ b/EdgeCraftRAG/edgecraftrag/api/v1/prompt.py @@ -0,0 +1,58 @@ +# Copyright (C) 2025 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +from edgecraftrag.api_schema import PromptIn +from edgecraftrag.context import ctx +from fastapi import FastAPI, File, HTTPException, UploadFile, status + +prompt_app = FastAPI() + + +# Upload prompt for LLM ChatQnA using file +@prompt_app.post(path="/v1/chatqna/prompt-file") +async def load_prompt_file(file: UploadFile = File(...)): + try: + generator = ctx.get_pipeline_mgr().get_active_pipeline().generator + if generator: + content = await file.read() + prompt_str = content.decode("utf-8") + generator.set_prompt(prompt_str) + return "Set LLM Prompt Successfully" + except Exception as e: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)) + + +# Update prompt for LLM ChatQnA +@prompt_app.post(path="/v1/chatqna/prompt") +async def load_prompt(request: PromptIn): + try: + generator = ctx.get_pipeline_mgr().get_active_pipeline().generator + if generator: + prompt_str = request.prompt + generator.set_prompt(prompt_str) + return "Set LLM Prompt Successfully" + except Exception as e: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)) + + +# Get prompt of LLM ChatQnA +@prompt_app.get(path="/v1/chatqna/prompt") +async def get_prompt(): + try: + generator = ctx.get_pipeline_mgr().get_active_pipeline().generator + if generator: + return generator.prompt + except Exception as e: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)) + + +# Reset prompt for LLM ChatQnA +@prompt_app.post(path="/v1/chatqna/prompt/reset") +async def reset_prompt(): + try: + generator = ctx.get_pipeline_mgr().get_active_pipeline().generator + if generator: + generator.reset_prompt() + return "Reset LLM Prompt Successfully" + except Exception as e: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e)) diff --git a/EdgeCraftRAG/edgecraftrag/api_schema.py b/EdgeCraftRAG/edgecraftrag/api_schema.py index 7a8a493b1e..599d53115a 100644 --- a/EdgeCraftRAG/edgecraftrag/api_schema.py +++ b/EdgeCraftRAG/edgecraftrag/api_schema.py @@ -1,7 +1,7 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -from typing import Optional +from typing import Any, Optional from pydantic import BaseModel @@ -65,5 +65,15 @@ class FilesIn(BaseModel): class RagOut(BaseModel): query: str - contexts: Optional[list[str]] = None + contexts: Optional[dict[str, Any]] = None response: str + + +class PromptIn(BaseModel): + prompt: Optional[str] = None + + +class KnowledgeBaseCreateIn(BaseModel): + name: str + description: Optional[str] = None + active: Optional[bool] = None diff --git a/EdgeCraftRAG/edgecraftrag/components/data.py b/EdgeCraftRAG/edgecraftrag/components/data.py old mode 100644 new mode 100755 index e7fa19e7ad..34b568abde --- a/EdgeCraftRAG/edgecraftrag/components/data.py +++ b/EdgeCraftRAG/edgecraftrag/components/data.py @@ -54,7 +54,7 @@ def convert_text_to_documents(text) -> List[Document]: def convert_file_to_documents(file_path) -> List[Document]: from llama_index.core import SimpleDirectoryReader - supported_exts = [".pdf", ".txt", ".doc", ".docx", ".pptx", ".ppt", ".csv", ".md", ".html", ".rst"] + supported_exts = [".pdf", ".txt", ".doc", ".docx", ".pptx", ".ppt", ".csv", ".md", ".html", ".rst", ".epub"] if file_path.is_dir(): docs = SimpleDirectoryReader(input_dir=file_path, recursive=True, required_exts=supported_exts).load_data() elif file_path.is_file(): diff --git a/EdgeCraftRAG/edgecraftrag/components/generator.py b/EdgeCraftRAG/edgecraftrag/components/generator.py old mode 100644 new mode 100755 index d5a3e73ccd..e002e92a78 --- a/EdgeCraftRAG/edgecraftrag/components/generator.py +++ b/EdgeCraftRAG/edgecraftrag/components/generator.py @@ -8,8 +8,8 @@ import urllib.request from urllib.parse import urlparse -from comps import GeneratedDoc -from edgecraftrag.base import BaseComponent, CompType, GeneratorType, NodeParserType +from edgecraftrag.base import BaseComponent, CompType, GeneratorType, InferenceType, NodeParserType +from edgecraftrag.utils import concat_history, save_history from fastapi.responses import StreamingResponse from langchain_core.prompts import PromptTemplate from llama_index.llms.openai_like import OpenAILike @@ -95,13 +95,33 @@ def extract_unstructured_eles(retrieved_nodes=[], text_gen_context=""): return unstructured_str +async def local_stream_generator(lock, llm, prompt_str, unstructured_str): + async with lock: + response = llm.stream_complete(prompt_str) + collected_data = [] + for r in response: + collected_data.append(r.delta) + yield r.delta + await asyncio.sleep(0) + if unstructured_str: + collected_data.append(unstructured_str) + yield unstructured_str + res = "".join(collected_data) + save_history(res) + + async def stream_generator(llm, prompt_str, unstructured_str): response = llm.stream_complete(prompt_str) + collected_data = [] for r in response: + collected_data.append(r.delta) yield r.delta await asyncio.sleep(0) if unstructured_str: + collected_data.append(unstructured_str) yield unstructured_str + res = "".join(collected_data) + save_history(res) class QnAGenerator(BaseComponent): @@ -135,6 +155,8 @@ def __init__(self, llm_model, prompt_template_file, inference_type, **kwargs): self.model_id = llm_model else: self.model_id = llm_model().model_id + if self.inference_type == InferenceType.LOCAL: + self.lock = asyncio.Lock() def set_prompt(self, prompt): if "{context}" not in prompt: @@ -170,6 +192,7 @@ def run(self, chat_request, retrieved_nodes, node_parser_type, **kwargs): # This could happen when User delete all LLMs through RESTful API raise ValueError("No LLM available, please load LLM") # query transformation + chat_request.messages = concat_history(chat_request.messages) text_gen_context, prompt_str = self.query_transform(chat_request, retrieved_nodes) generate_kwargs = dict( temperature=chat_request.temperature, @@ -186,14 +209,17 @@ def run(self, chat_request, retrieved_nodes, node_parser_type, **kwargs): unstructured_str = extract_unstructured_eles(retrieved_nodes, text_gen_context) if chat_request.stream: return StreamingResponse( - stream_generator(self.llm(), prompt_str, unstructured_str), + local_stream_generator(self.lock, self.llm(), prompt_str, unstructured_str), media_type="text/event-stream", ) else: - return self.llm().complete(prompt_str) + result = self.llm().complete(prompt_str) + save_history(str(result.text)) + return result def run_vllm(self, chat_request, retrieved_nodes, node_parser_type, **kwargs): # query transformation + chat_request.messages = concat_history(chat_request.messages) text_gen_context, prompt_str = self.query_transform(chat_request, retrieved_nodes) llm_endpoint = os.getenv("vLLM_ENDPOINT", "http://localhost:8008") model_name = os.getenv("LLM_MODEL", self.model_id) @@ -216,10 +242,9 @@ def run_vllm(self, chat_request, retrieved_nodes, node_parser_type, **kwargs): stream_generator(llm, prompt_str, unstructured_str), media_type="text/event-stream" ) else: - response = llm.complete(prompt_str) - response = response.text - - return GeneratedDoc(text=response, prompt=prompt_str) + result = llm.complete(prompt_str) + save_history(str(result)) + return result @model_serializer def ser_model(self): diff --git a/EdgeCraftRAG/edgecraftrag/components/knowledge_base.py b/EdgeCraftRAG/edgecraftrag/components/knowledge_base.py new file mode 100644 index 0000000000..259c4a463f --- /dev/null +++ b/EdgeCraftRAG/edgecraftrag/components/knowledge_base.py @@ -0,0 +1,51 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +import os +from typing import Any, List, Optional + +from edgecraftrag.base import BaseComponent +from pydantic import model_serializer + + +class Knowledge(BaseComponent): + file_paths: Optional[List[str]] = [] + file_map: Optional[List[str]] = {} + description: Optional[str] = "None" + comp_type: str = "knowledge" + active: bool + + def _update_file_names(self) -> None: + self.file_map = {os.path.basename(path): path for path in self.file_paths if path is not None} + + def add_file_path(self, file_path: str) -> bool: + if file_path not in self.file_paths: + self.file_paths.append(file_path) + self._update_file_names() + return True + return False + + def remove_file_path(self, file_path: str) -> bool: + if file_path in self.file_paths: + self.file_paths.remove(file_path) + self._update_file_names() + return True + return False + + def get_file_paths(self) -> List[str]: + return self.file_paths + + def run(self, **kwargs) -> Any: + pass + + @model_serializer + def ser_model(self): + set = { + "idx": self.idx, + "name": self.name, + "comp_type": self.comp_type, + "file_map": self.file_map, + "description": self.description, + "active": self.active, + } + return set diff --git a/EdgeCraftRAG/edgecraftrag/components/pipeline.py b/EdgeCraftRAG/edgecraftrag/components/pipeline.py index e0a01eba96..a7dd8c4cce 100644 --- a/EdgeCraftRAG/edgecraftrag/components/pipeline.py +++ b/EdgeCraftRAG/edgecraftrag/components/pipeline.py @@ -221,11 +221,13 @@ async def timing_wrapper(): def run_test_generator_ben(pl: Pipeline, chat_request: ChatCompletionRequest) -> Any: benchmark_index, benchmark_data = pl.benchmark.init_benchmark_data() + contexts = {} start = time.perf_counter() query = chat_request.messages retri_res = pl.retriever.run(query=query) query_bundle = QueryBundle(query) benchmark_data[CompType.RETRIEVER] = time.perf_counter() - start + contexts[CompType.RETRIEVER] = retri_res start = time.perf_counter() if pl.postprocessor: @@ -236,6 +238,7 @@ def run_test_generator_ben(pl: Pipeline, chat_request: ChatCompletionRequest) -> ): processor.top_n = chat_request.top_n retri_res = processor.run(retri_res=retri_res, query_bundle=query_bundle) + contexts[CompType.POSTPROCESSOR] = retri_res benchmark_data[CompType.POSTPROCESSOR] = time.perf_counter() - start if pl.generator is None: @@ -260,12 +263,14 @@ def run_test_generator_ben(pl: Pipeline, chat_request: ChatCompletionRequest) -> benchmark_data[CompType.GENERATOR] = end - start pl.benchmark.insert_llm_data(benchmark_index, input_token_size) pl.benchmark.insert_benchmark_data(benchmark_data) - return ret, retri_res + return ret, contexts def run_test_generator(pl: Pipeline, chat_request: ChatCompletionRequest) -> Any: query = chat_request.messages + contexts = {} retri_res = pl.retriever.run(query=query) + contexts[CompType.RETRIEVER] = retri_res query_bundle = QueryBundle(query) if pl.postprocessor: @@ -276,6 +281,7 @@ def run_test_generator(pl: Pipeline, chat_request: ChatCompletionRequest) -> Any ): processor.top_n = chat_request.top_n retri_res = processor.run(retri_res=retri_res, query_bundle=query_bundle) + contexts[CompType.POSTPROCESSOR] = retri_res if pl.generator is None: raise ValueError("No Generator Specified") @@ -286,4 +292,4 @@ def run_test_generator(pl: Pipeline, chat_request: ChatCompletionRequest) -> Any ret = pl.generator.run_vllm(chat_request, retri_res, np_type) else: raise ValueError("LLM inference_type not supported") - return ret, retri_res + return ret, contexts diff --git a/EdgeCraftRAG/edgecraftrag/context.py b/EdgeCraftRAG/edgecraftrag/context.py index 3555ce4beb..4d013b9bd2 100644 --- a/EdgeCraftRAG/edgecraftrag/context.py +++ b/EdgeCraftRAG/edgecraftrag/context.py @@ -3,6 +3,7 @@ from edgecraftrag.controllers.compmgr import GeneratorMgr, IndexerMgr, NodeParserMgr, PostProcessorMgr, RetrieverMgr from edgecraftrag.controllers.filemgr import FilelMgr +from edgecraftrag.controllers.knowledge_basemgr import KnowledgeManager from edgecraftrag.controllers.modelmgr import ModelMgr from edgecraftrag.controllers.nodemgr import NodeMgr from edgecraftrag.controllers.pipelinemgr import PipelineMgr @@ -20,6 +21,7 @@ def __init__(self): self.modmgr = ModelMgr() self.genmgr = GeneratorMgr() self.filemgr = FilelMgr() + self.knowledgemgr = KnowledgeManager() def get_pipeline_mgr(self): return self.plmgr @@ -48,5 +50,8 @@ def get_generator_mgr(self): def get_file_mgr(self): return self.filemgr + def get_knowledge_mgr(self): + return self.knowledgemgr + ctx = Context() diff --git a/EdgeCraftRAG/edgecraftrag/controllers/knowledge_basemgr.py b/EdgeCraftRAG/edgecraftrag/controllers/knowledge_basemgr.py new file mode 100644 index 0000000000..091175f64f --- /dev/null +++ b/EdgeCraftRAG/edgecraftrag/controllers/knowledge_basemgr.py @@ -0,0 +1,73 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +from typing import Any, Dict, List, Optional + +from edgecraftrag.api_schema import KnowledgeBaseCreateIn +from edgecraftrag.base import BaseMgr +from edgecraftrag.components.knowledge_base import Knowledge +from fastapi import HTTPException, status + + +class KnowledgeManager(BaseMgr): + def __init__(self): + super().__init__() + self.active_knowledge_idx: Optional[str] = None + + def get_knowledge_base_by_name_or_id(self, name: str): + for _, kb in self.components.items(): + if kb.name == name or kb.idx == name: + return kb + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="knowledge base does not exist") + + def get_active_knowledge_base(self) -> Optional[Knowledge]: + if self.active_knowledge_idx: + return self.get_knowledge_base_by_name_or_id(self.active_knowledge_idx) + else: + return None + + def active_knowledge(self, knowledge: KnowledgeBaseCreateIn): + kb = self.get_knowledge_base_by_name_or_id(knowledge.name) + self.active_knowledge_idx = kb.idx if knowledge.active else None + + for idx, comp in self.components.items(): + if isinstance(comp, Knowledge): + comp.active = idx == self.active_knowledge_idx + return kb + + def create_knowledge_base(self, knowledge: KnowledgeBaseCreateIn) -> Knowledge: + for _, kb in self.components.items(): + if kb.name == knowledge.name: + raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail="The knowledge base already exists.") + if knowledge.active is None: + knowledge.active = False + kb = Knowledge(name=knowledge.name, description=knowledge.description, active=knowledge.active) + self.add(kb) + if knowledge.active: + self.active_knowledge(knowledge) + return kb + + def delete_knowledge_base(self, name: str): + kb = self.get_knowledge_base_by_name_or_id(name) + if kb.idx == self.active_knowledge_idx: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Cannot delete a running knowledge base." + ) + self.remove(kb.idx) + return "Knowledge base removed successfully" + + def update_knowledge_base(self, knowledge) -> Knowledge: + kb = self.get_knowledge_base_by_name_or_id(knowledge.name) + + if knowledge.description is not None: + kb.description = knowledge.description + + if knowledge.active is not None and kb.active != knowledge.active: + kb = self.active_knowledge(knowledge) + return "Knowledge base update successfully" + + def get_all_knowledge_bases(self) -> List[Dict[str, Any]]: + kb_list = [] + for idx, kb in self.components.items(): + kb_list.append(kb) + return kb_list diff --git a/EdgeCraftRAG/edgecraftrag/requirements.txt b/EdgeCraftRAG/edgecraftrag/requirements.txt old mode 100644 new mode 100755 diff --git a/EdgeCraftRAG/edgecraftrag/server.py b/EdgeCraftRAG/edgecraftrag/server.py index cd8a7f8eab..becf0902f8 100644 --- a/EdgeCraftRAG/edgecraftrag/server.py +++ b/EdgeCraftRAG/edgecraftrag/server.py @@ -6,8 +6,10 @@ import uvicorn from edgecraftrag.api.v1.chatqna import chatqna_app from edgecraftrag.api.v1.data import data_app +from edgecraftrag.api.v1.knowledge_base import kb_app from edgecraftrag.api.v1.model import model_app from edgecraftrag.api.v1.pipeline import pipeline_app +from edgecraftrag.api.v1.prompt import prompt_app from edgecraftrag.api.v1.system import system_app from edgecraftrag.utils import UI_DIRECTORY from fastapi import FastAPI @@ -26,7 +28,7 @@ ) -sub_apps = [data_app, model_app, pipeline_app, chatqna_app, system_app] +sub_apps = [data_app, model_app, pipeline_app, chatqna_app, system_app, prompt_app, kb_app] for sub_app in sub_apps: for route in sub_app.routes: app.router.routes.append(route) diff --git a/EdgeCraftRAG/edgecraftrag/utils.py b/EdgeCraftRAG/edgecraftrag/utils.py old mode 100644 new mode 100755 index be83f47135..0572000f80 --- a/EdgeCraftRAG/edgecraftrag/utils.py +++ b/EdgeCraftRAG/edgecraftrag/utils.py @@ -3,7 +3,7 @@ import io import os -from typing import Iterator +from typing import Iterator, Optional from docx.text.paragraph import Paragraph from PIL import Image as Img @@ -29,3 +29,46 @@ def iter_elements(cls, paragraph: Paragraph, opts: DocxPartitionerOptions) -> It image.save(image_path) element_metadata = ElementMetadata(image_path=image_path) yield Image(text="IMAGE", metadata=element_metadata) + + +def serialize_node_with_score(node_with_score): + return { + "node": node_with_score.node.__dict__, + "score": node_with_score.score.item() if hasattr(node_with_score.score, "item") else node_with_score.score, + } + + +def serialize_contexts(contexts): + return {key: [serialize_node_with_score(node) for node in nodes] for key, nodes in contexts.items()} + + +_history_map = {} +_current_session_id: Optional[str] = None + + +def set_current_session(session_id: str) -> None: + global _current_session_id + _current_session_id = session_id if session_id not in (None, "", "None") else "default_session" + + +def get_current_session() -> Optional[str]: + return _current_session_id + + +def clear_history() -> None: + session_id = get_current_session() + if session_id in _history_map: + _history_map[session_id] = [] + + +def save_history(message: str) -> str: + session_id = get_current_session() + _history_map.setdefault(session_id, []).append(f"content: {message}") + return "History appended successfully" + + +def concat_history(message: str) -> str: + history_id = get_current_session() + _history_map.setdefault(history_id, []).append(f"user: {message}") + str_message = "".join(_history_map.get(history_id, [])) + return str_message[-6000:] if len(str_message) > 6000 else str_message diff --git a/EdgeCraftRAG/nginx/nginx.conf.template b/EdgeCraftRAG/nginx/nginx.conf.template new file mode 100755 index 0000000000..005420e386 --- /dev/null +++ b/EdgeCraftRAG/nginx/nginx.conf.template @@ -0,0 +1,37 @@ +worker_processes auto; +events { + worker_connections 1024; +} +http { + + upstream multi-arc-serving-container { + server ${HOST_IP}:${NGINX_PORT_0}; + server ${HOST_IP}:${NGINX_PORT_1}; + } + include /etc/nginx/mime.types; + default_type application/octet-stream; + client_max_body_size 50M; + sendfile on; + + keepalive_timeout 65; + keepalive_requests 1000; + server { + listen 8086; + server_name _; + location / { + root /usr/share/nginx/html; + index index.html index.htm; + add_header Cache-Control "no-cache"; + try_files $uri $uri/ /index.html; + } + location /v1/completions { + proxy_pass http://multi-arc-serving-container/v1/completions; + proxy_http_version 1.1; + proxy_set_header Connection ""; + } + + location ~ /\. { + deny all; + } + } +} \ No newline at end of file diff --git a/EdgeCraftRAG/tests/configs/test_pipeline_vllm.json b/EdgeCraftRAG/tests/configs/test_pipeline_ipex_vllm.json similarity index 82% rename from EdgeCraftRAG/tests/configs/test_pipeline_vllm.json rename to EdgeCraftRAG/tests/configs/test_pipeline_ipex_vllm.json index 21d709943d..f49367612c 100644 --- a/EdgeCraftRAG/tests/configs/test_pipeline_vllm.json +++ b/EdgeCraftRAG/tests/configs/test_pipeline_ipex_vllm.json @@ -1,5 +1,5 @@ { - "name": "rag_test_vllm", + "name": "rag_test_local_llm", "node_parser": { "chunk_size": 400, "chunk_overlap": 48, @@ -31,14 +31,14 @@ } ], "generator": { + "inference_type": "vllm", "model": { "model_id": "Qwen/Qwen2-7B-Instruct", - "model_path": "./models/Qwen/Qwen2-7B-Instruct/INT4_compressed_weights", + "model_path": "./models/Qwen/Qwen2-7B-Instruct/", "device": "auto", "weight": "INT4" }, - "prompt_path": "./default_prompt.txt", - "inference_type": "vllm" + "prompt_path": "./default_prompt.txt" }, "active": "True" } diff --git a/EdgeCraftRAG/tests/test_compose_vllm_on_arc.sh b/EdgeCraftRAG/tests/test_compose_vllm_on_arc.sh index 43df736fd2..c3fe7785e3 100755 --- a/EdgeCraftRAG/tests/test_compose_vllm_on_arc.sh +++ b/EdgeCraftRAG/tests/test_compose_vllm_on_arc.sh @@ -18,19 +18,26 @@ LOG_PATH="$WORKPATH/tests" ip_address=$(hostname -I | awk '{print $1}') HOST_IP=$ip_address -COMPOSE_FILE="compose_vllm.yaml" +COMPOSE_FILE="compose_vllm_multi-arc.yaml" EC_RAG_SERVICE_PORT=16010 -MODEL_PATH="/home/media/models" +MODEL_PATH="/home/media/qwen" # MODEL_PATH="$WORKPATH/models" DOC_PATH="$WORKPATH/tests" -UI_TMPFILE_PATH="$WORKPATH/tests" - -#HF_ENDPOINT=https://hf-mirror.com -LLM_MODEL="Qwen/Qwen2-7B-Instruct" -VLLM_SERVICE_PORT=8008 -vLLM_ENDPOINT="http://${HOST_IP}:${VLLM_SERVICE_PORT}" - +UI_UPLOAD_PATH="$WORKPATH/tests" + +HF_ENDPOINT=https://hf-mirror.com +NGINX_PORT=8086 +NGINX_PORT_0=8100 +NGINX_PORT_1=8100 +VLLM_SERVICE_PORT_0=8100 +TENSOR_PARALLEL_SIZE=1 +SELECTED_XPU_0=0 +vLLM_ENDPOINT="http://${HOST_IP}:${NGINX_PORT}" +CONTAINER_COUNT="single_container" +LLM_MODEL=Qwen/Qwen2-7B-Instruct +LLM_MODEL_PATH=$MODEL_PATH +NGINX_CONFIG_PATH="$WORKPATH/nginx/nginx.conf" function build_docker_images() { opea_branch=${opea_branch:-"main"} @@ -41,30 +48,29 @@ function build_docker_images() { docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile . popd && sleep 1s + echo "Pull intelanalytics/ipex-llm-serving-xpu image" + docker pull intelanalytics/ipex-llm-serving-xpu:0.8.3-b18 + echo "Build all the images with --no-cache, check docker_image_build.log for details..." docker compose -f build.yaml build --no-cache > ${LOG_PATH}/docker_image_build.log - echo "Build vllm_openvino image from GenAIComps..." - cd $WORKPATH && git clone --single-branch --branch "${opea_branch:-"main"}" https://github.com/opea-project/GenAIComps.git - cd GenAIComps/comps/third_parties/vllm/src/ - bash ./build_docker_vllm_openvino.sh gpu - docker images && sleep 1s } function start_services() { cd $WORKPATH/docker_compose/intel/gpu/arc source set_env.sh - + envsubst < $WORKPATH/nginx/nginx.conf.template > $WORKPATH/nginx/nginx.conf # Start Docker Containers - docker compose -f $COMPOSE_FILE up -d > ${LOG_PATH}/start_services_with_compose.log + docker compose -f $COMPOSE_FILE --profile $CONTAINER_COUNT up -d > ${LOG_PATH}/start_services_with_compose.log + echo "ipex-llm-serving-xpu is booting, please wait." n=0 until [[ "$n" -ge 100 ]]; do - docker logs vllm-openvino-server > ${LOG_PATH}/vllm_service_start.log - if grep -q "metrics.py" ${LOG_PATH}/vllm_service_start.log; then + docker logs ipex-llm-serving-xpu-container-0 > ${LOG_PATH}/ipex-llm-serving-xpu-container.log 2>&1 + if grep -q "Starting vLLM API server on http://0.0.0.0:" ${LOG_PATH}/ipex-llm-serving-xpu-container.log; then break fi - sleep 5s + sleep 6s n=$((n+1)) done } @@ -112,7 +118,7 @@ function validate_rag() { "active" \ "pipeline" \ "edgecraftrag-server" \ - '@configs/test_pipeline_vllm.json' + '@configs/test_pipeline_ipex_vllm.json' # add data validate_services \ @@ -127,7 +133,7 @@ function validate_rag() { "${HOST_IP}:${EC_RAG_SERVICE_PORT}/v1/chatqna" \ "1234567890" \ "query" \ - "vllm-openvino-server" \ + "ipex-llm-serving-xpu-container-0" \ '{"messages":"What is the test id?"}' } @@ -137,7 +143,7 @@ function validate_megaservice() { "${HOST_IP}:16011/v1/chatqna" \ "1234567890" \ "query" \ - "vllm-openvino-server" \ + "ipex-llm-serving-xpu-container-0" \ '{"messages":"What is the test id?"}' } @@ -148,7 +154,7 @@ function stop_docker() { function main() { - mkdir -p "$LOG_PATH" + mkdir -p $LOG_PATH echo "::group::stop_docker" stop_docker diff --git a/EdgeCraftRAG/tests/test_pipeline_ipex_vllm.json b/EdgeCraftRAG/tests/test_pipeline_ipex_vllm.json new file mode 100644 index 0000000000..f49367612c --- /dev/null +++ b/EdgeCraftRAG/tests/test_pipeline_ipex_vllm.json @@ -0,0 +1,44 @@ +{ + "name": "rag_test_local_llm", + "node_parser": { + "chunk_size": 400, + "chunk_overlap": 48, + "parser_type": "simple" + }, + "indexer": { + "indexer_type": "faiss_vector", + "embedding_model": { + "model_id": "BAAI/bge-small-en-v1.5", + "model_path": "./models/BAAI/bge-small-en-v1.5", + "device": "auto", + "weight": "INT4" + } + }, + "retriever": { + "retriever_type": "vectorsimilarity", + "retrieve_topk": 30 + }, + "postprocessor": [ + { + "processor_type": "reranker", + "top_n": 2, + "reranker_model": { + "model_id": "BAAI/bge-reranker-large", + "model_path": "./models/BAAI/bge-reranker-large", + "device": "auto", + "weight": "INT4" + } + } + ], + "generator": { + "inference_type": "vllm", + "model": { + "model_id": "Qwen/Qwen2-7B-Instruct", + "model_path": "./models/Qwen/Qwen2-7B-Instruct/", + "device": "auto", + "weight": "INT4" + }, + "prompt_path": "./default_prompt.txt" + }, + "active": "True" +} diff --git a/EdgeCraftRAG/ui/vue/.env.development b/EdgeCraftRAG/ui/vue/.env.development index c2237f95f5..d500c854a5 100644 --- a/EdgeCraftRAG/ui/vue/.env.development +++ b/EdgeCraftRAG/ui/vue/.env.development @@ -2,5 +2,5 @@ ENV = development # Local Api -VITE_API_URL = http://10.67.106.236:16010/ -VITE_CHATBOT_URL = http://10.67.106.236:16011/ \ No newline at end of file +VITE_API_URL = http://10.67.106.163:16010/ +VITE_CHATBOT_URL = http://10.67.106.163:16011/ \ No newline at end of file diff --git a/EdgeCraftRAG/ui/vue/components.d.ts b/EdgeCraftRAG/ui/vue/components.d.ts index 05af641f82..53696ff831 100644 --- a/EdgeCraftRAG/ui/vue/components.d.ts +++ b/EdgeCraftRAG/ui/vue/components.d.ts @@ -18,17 +18,25 @@ declare module 'vue' { AConfigProvider: typeof import('ant-design-vue/es')['ConfigProvider'] ADescriptions: typeof import('ant-design-vue/es')['Descriptions'] ADescriptionsItem: typeof import('ant-design-vue/es')['DescriptionsItem'] + ADivider: typeof import('ant-design-vue/es')['Divider'] ADrawer: typeof import('ant-design-vue/es')['Drawer'] + ADropdown: typeof import('ant-design-vue/es')['Dropdown'] + ADropdownButton: typeof import('ant-design-vue/es')['DropdownButton'] AEmpty: typeof import('ant-design-vue/es')['Empty'] AForm: typeof import('ant-design-vue/es')['Form'] AFormItem: typeof import('ant-design-vue/es')['FormItem'] + AImage: typeof import('ant-design-vue/es')['Image'] AInput: typeof import('ant-design-vue/es')['Input'] AInputNumber: typeof import('ant-design-vue/es')['InputNumber'] ALayout: typeof import('ant-design-vue/es')['Layout'] ALayoutContent: typeof import('ant-design-vue/es')['LayoutContent'] ALayoutHeader: typeof import('ant-design-vue/es')['LayoutHeader'] + ALayoutSider: typeof import('ant-design-vue/es')['LayoutSider'] + AMenu: typeof import('ant-design-vue/es')['Menu'] + AMenuItem: typeof import('ant-design-vue/es')['MenuItem'] AModal: typeof import('ant-design-vue/es')['Modal'] APagination: typeof import('ant-design-vue/es')['Pagination'] + APopover: typeof import('ant-design-vue/es')['Popover'] ARadio: typeof import('ant-design-vue/es')['Radio'] ARadioGroup: typeof import('ant-design-vue/es')['RadioGroup'] ARow: typeof import('ant-design-vue/es')['Row'] @@ -39,7 +47,9 @@ declare module 'vue' { ASteps: typeof import('ant-design-vue/es')['Steps'] ATable: typeof import('ant-design-vue/es')['Table'] ATag: typeof import('ant-design-vue/es')['Tag'] + ATextarea: typeof import('ant-design-vue/es')['Textarea'] ATooltip: typeof import('ant-design-vue/es')['Tooltip'] + AUpload: typeof import('ant-design-vue/es')['Upload'] AUploadDragger: typeof import('ant-design-vue/es')['UploadDragger'] FormTooltip: typeof import('./src/components/FormTooltip.vue')['default'] RouterLink: typeof import('vue-router')['RouterLink'] diff --git a/EdgeCraftRAG/ui/vue/nginx.conf b/EdgeCraftRAG/ui/vue/nginx.conf index e71fbbc109..e4d0d7fb4c 100644 --- a/EdgeCraftRAG/ui/vue/nginx.conf +++ b/EdgeCraftRAG/ui/vue/nginx.conf @@ -11,7 +11,7 @@ http { client_max_body_size 50M; sendfile on; - keepalive_timeout 65; + keepalive_timeout 90; server { listen 8082; @@ -28,6 +28,7 @@ http { proxy_pass http://server:16010; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_http_version 1.1; + proxy_read_timeout 180s; proxy_set_header Connection ""; } diff --git a/EdgeCraftRAG/ui/vue/package.json b/EdgeCraftRAG/ui/vue/package.json index 8a215ec138..516e870406 100644 --- a/EdgeCraftRAG/ui/vue/package.json +++ b/EdgeCraftRAG/ui/vue/package.json @@ -21,6 +21,7 @@ "js-cookie": "^3.0.5", "lodash": "^4.17.21", "marked": "^15.0.6", + "mitt": "^3.0.1", "pinia": "^3.0.2", "pinia-plugin-persistedstate": "^4.2.0", "qs": "^6.13.1", diff --git a/EdgeCraftRAG/ui/vue/src/api/chatbot/index.ts b/EdgeCraftRAG/ui/vue/src/api/chatbot/index.ts index 17bd6c27e7..f7946ad72d 100644 --- a/EdgeCraftRAG/ui/vue/src/api/chatbot/index.ts +++ b/EdgeCraftRAG/ui/vue/src/api/chatbot/index.ts @@ -17,17 +17,7 @@ export const requestChatbotConfig = (data: Object) => { data, showLoading: true, showSuccessMsg: true, - successMsg: "Configuration update successful !", - }); -}; - -export const requestFileDelete = (name: String) => { - return request({ - url: `/v1/data/files/${name}`, - method: "delete", - showLoading: true, - showSuccessMsg: true, - successMsg: "File deleted successfully !", + successMsg: "request.chatbot.updateSucc", }); }; @@ -37,16 +27,3 @@ export const getBenchmark = (name: String) => { method: "get", }); }; - -export const requestParsingFiles = (data: Object) => { - return request({ - url: `/v1/data`, - method: "post", - data, - showLoading: true, - showSuccessMsg: true, - successMsg: "Document uploaded and parsed successfully !", - }); -}; - -export const uploadFileUrl = `${import.meta.env.VITE_API_URL}v1/data/file`; diff --git a/EdgeCraftRAG/ui/vue/src/api/knowledgeBase/index.ts b/EdgeCraftRAG/ui/vue/src/api/knowledgeBase/index.ts new file mode 100644 index 0000000000..ca146d3651 --- /dev/null +++ b/EdgeCraftRAG/ui/vue/src/api/knowledgeBase/index.ts @@ -0,0 +1,76 @@ +// Copyright (C) 2025 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 + +import request from "../request"; + +export const getKnowledgeBaseList = () => { + return request({ + url: "/v1/knowledge", + method: "get", + showLoading: true, + }); +}; + +export const getKnowledgeBaseDetialById = (kbId: String) => { + return request({ + url: `/v1/knowledge/${kbId}`, + method: "get", + showLoading: true, + }); +}; + +export const requestKnowledgeBaseCreate = (data: Object) => { + return request({ + url: "/v1/knowledge", + method: "post", + data, + showLoading: true, + showSuccessMsg: true, + successMsg: "request.knowledge.createSucc", + }); +}; + +export const requestKnowledgeBaseUpdate = (data: Object) => { + return request({ + url: `/v1/knowledge/patch`, + method: "patch", + data, + showLoading: true, + showSuccessMsg: true, + successMsg: "request.knowledge.updateSucc", + }); +}; + +export const requestKnowledgeBaseDelete = (kbId: String) => { + return request({ + url: `/v1/knowledge/${kbId}`, + method: "delete", + showLoading: true, + showSuccessMsg: true, + successMsg: "request.knowledge.deleteSucc", + }); +}; + +export const requestKnowledgeBaseRelation = (kbId: String, data: Object) => { + return request({ + url: `/v1/knowledge/${kbId}/files`, + method: "post", + data, + showLoading: true, + showSuccessMsg: true, + successMsg: "request.knowledge.uploadSucc", + }); +}; + +export const requestFileDelete = (name: String, data: Object) => { + return request({ + url: `/v1/knowledge/${name}/files`, + method: "delete", + data, + showLoading: true, + showSuccessMsg: true, + successMsg: "request.knowledge.deleteFileSucc", + }); +}; + +export const uploadFileUrl = `${import.meta.env.VITE_API_URL}v1/data/file/`; diff --git a/EdgeCraftRAG/ui/vue/src/api/pipeline/index.ts b/EdgeCraftRAG/ui/vue/src/api/pipeline/index.ts index 959caa5676..82ae41d271 100644 --- a/EdgeCraftRAG/ui/vue/src/api/pipeline/index.ts +++ b/EdgeCraftRAG/ui/vue/src/api/pipeline/index.ts @@ -32,7 +32,7 @@ export const requestPipelineCreate = (data: Object) => { data, showLoading: true, showSuccessMsg: true, - successMsg: "Pipeline created successfully !", + successMsg: "request.pipeline.createSucc", }); }; @@ -43,7 +43,7 @@ export const requestPipelineUpdate = (name: String, data: Object) => { data, showLoading: true, showSuccessMsg: true, - successMsg: "Pipeline update successfully !", + successMsg: "request.pipeline.updateSucc", }); }; @@ -53,7 +53,7 @@ export const requestPipelineDelete = (name: String) => { method: "delete", showLoading: true, showSuccessMsg: true, - successMsg: "Pipeline deleted successfully !", + successMsg: "request.pipeline.deleteSucc", }); }; @@ -64,7 +64,7 @@ export const requestPipelineSwitchState = (name: String, data: Object) => { data, showLoading: true, showSuccessMsg: true, - successMsg: "Pipeline state switch successful !", + successMsg: "request.pipeline.switchSucc", }); }; diff --git a/EdgeCraftRAG/ui/vue/src/api/request.ts b/EdgeCraftRAG/ui/vue/src/api/request.ts index 95382588fe..91805dbab5 100644 --- a/EdgeCraftRAG/ui/vue/src/api/request.ts +++ b/EdgeCraftRAG/ui/vue/src/api/request.ts @@ -5,12 +5,13 @@ import { NextLoading } from "@/utils/loading"; import serviceManager from "@/utils/serviceManager"; import axios, { AxiosInstance } from "axios"; import qs from "qs"; +import i18n from "@/i18n"; const antNotification = serviceManager.getService("antNotification"); const service: AxiosInstance = axios.create({ baseURL: import.meta.env.VITE_API_URL, - timeout: 50000, + timeout: 600000, headers: { "Content-Type": "application/json" }, }); @@ -38,7 +39,8 @@ service.interceptors.response.use( if (NextLoading) NextLoading.done(); const res = response.data; if (config.showSuccessMsg) { - if (antNotification) antNotification("success", "Success", config.successMsg); + if (antNotification) + antNotification("success", i18n.global.t("common.success"), i18n.global.t(config.successMsg)); } return Promise.resolve(res); }, @@ -53,7 +55,7 @@ service.interceptors.response.use( } else { errorMessage = error.message; } - if (antNotification) antNotification("error", "Error", errorMessage); + if (antNotification) antNotification("error", i18n.global.t("common.error"), errorMessage); return Promise.reject(error); }, diff --git a/EdgeCraftRAG/ui/vue/src/assets/iconFont/iconfont.css b/EdgeCraftRAG/ui/vue/src/assets/iconFont/iconfont.css index 0fd282ff5e..d2cdcac199 100644 --- a/EdgeCraftRAG/ui/vue/src/assets/iconFont/iconfont.css +++ b/EdgeCraftRAG/ui/vue/src/assets/iconFont/iconfont.css @@ -1,9 +1,9 @@ @font-face { font-family: "iconfont"; /* Project id 4784207 */ src: - url("iconfont.woff2?t=1739238081968") format("woff2"), - url("iconfont.woff?t=1739238081968") format("woff"), - url("iconfont.ttf?t=1739238081968") format("truetype"); + url("iconfont.woff2?t=1748479964596") format("woff2"), + url("iconfont.woff?t=1748479964596") format("woff"), + url("iconfont.ttf?t=1748479964596") format("truetype"); } .iconfont { @@ -14,6 +14,94 @@ -moz-osx-font-smoothing: grayscale; } +.icon-newChat:before { + content: "\e6c7"; +} + +.icon-chat:before { + content: "\ecb1"; +} + +.icon-knowledge:before { + content: "\e6f2"; +} + +.icon-system:before { + content: "\e799"; +} + +.icon-chatbot1:before { + content: "\e630"; +} + +.icon-lang-zh:before { + content: "\e6c5"; +} + +.icon-lang-en:before { + content: "\e609"; +} + +.icon-exit:before { + content: "\e6d9"; +} + +.icon-loading:before { + content: "\e61a"; +} + +.icon-success:before { + content: "\e8ca"; +} + +.icon-results:before { + content: "\e603"; +} + +.icon-rating:before { + content: "\e7b9"; +} + +.icon-chart-line:before { + content: "\e790"; +} + +.icon-export:before { + content: "\e619"; +} + +.icon-rename:before { + content: "\e618"; +} + +.icon-delete:before { + content: "\e664"; +} + +.icon-setting1:before { + content: "\e61b"; +} + +.icon-upload:before { + content: "\e617"; +} + +.icon-clear:before { + content: "\e765"; +} + +.icon-copy-success:before { + content: "\e666"; +} + +.icon-copy:before { + content: "\e660"; +} + +.icon-subway:before { + content: "\e6ed"; +} + .icon-stop:before { content: "\e904"; } diff --git a/EdgeCraftRAG/ui/vue/src/assets/iconFont/iconfont.js b/EdgeCraftRAG/ui/vue/src/assets/iconFont/iconfont.js index 5c567a1196..79fad33e63 100644 --- a/EdgeCraftRAG/ui/vue/src/assets/iconFont/iconfont.js +++ b/EdgeCraftRAG/ui/vue/src/assets/iconFont/iconfont.js @@ -2,67 +2,67 @@ // SPDX-License-Identifier: Apache-2.0 (window._iconfont_svg_string_4784207 = - ''), - ((l) => { - var a = (t = (t = document.getElementsByTagName("script"))[t.length - 1]).getAttribute("data-injectcss"), - t = t.getAttribute("data-disable-injectsvg"); - if (!t) { - var h, + ''), + ((h) => { + var l = (a = (a = document.getElementsByTagName("script"))[a.length - 1]).getAttribute("data-injectcss"), + a = a.getAttribute("data-disable-injectsvg"); + if (!a) { + var c, + t, i, o, - e, - c, - v = function (a, t) { - t.parentNode.insertBefore(a, t); + v, + e = function (l, a) { + a.parentNode.insertBefore(l, a); }; - if (a && !l.__iconfont__svg__cssinject__) { - l.__iconfont__svg__cssinject__ = !0; + if (l && !h.__iconfont__svg__cssinject__) { + h.__iconfont__svg__cssinject__ = !0; try { document.write( "", ); - } catch (a) { - console && console.log(a); + } catch (l) { + console && console.log(l); } } - (h = function () { - var a, - t = document.createElement("div"); - (t.innerHTML = l._iconfont_svg_string_4784207), - (t = t.getElementsByTagName("svg")[0]) && - (t.setAttribute("aria-hidden", "true"), - (t.style.position = "absolute"), - (t.style.width = 0), - (t.style.height = 0), - (t.style.overflow = "hidden"), - (t = t), - (a = document.body).firstChild ? v(t, a.firstChild) : a.appendChild(t)); + (c = function () { + var l, + a = document.createElement("div"); + (a.innerHTML = h._iconfont_svg_string_4784207), + (a = a.getElementsByTagName("svg")[0]) && + (a.setAttribute("aria-hidden", "true"), + (a.style.position = "absolute"), + (a.style.width = 0), + (a.style.height = 0), + (a.style.overflow = "hidden"), + (a = a), + (l = document.body).firstChild ? e(a, l.firstChild) : l.appendChild(a)); }), document.addEventListener ? ~["complete", "loaded", "interactive"].indexOf(document.readyState) - ? setTimeout(h, 0) - : ((i = function () { - document.removeEventListener("DOMContentLoaded", i, !1), h(); + ? setTimeout(c, 0) + : ((t = function () { + document.removeEventListener("DOMContentLoaded", t, !1), c(); }), - document.addEventListener("DOMContentLoaded", i, !1)) + document.addEventListener("DOMContentLoaded", t, !1)) : document.attachEvent && - ((o = h), - (e = l.document), - (c = !1), - d(), - (e.onreadystatechange = function () { - "complete" == e.readyState && ((e.onreadystatechange = null), n()); + ((i = c), + (o = h.document), + (v = !1), + m(), + (o.onreadystatechange = function () { + "complete" == o.readyState && ((o.onreadystatechange = null), s()); })); } - function n() { - c || ((c = !0), o()); + function s() { + v || ((v = !0), i()); } - function d() { + function m() { try { - e.documentElement.doScroll("left"); - } catch (a) { - return void setTimeout(d, 50); + o.documentElement.doScroll("left"); + } catch (l) { + return void setTimeout(m, 50); } - n(); + s(); } })(window); diff --git a/EdgeCraftRAG/ui/vue/src/assets/iconFont/iconfont.json b/EdgeCraftRAG/ui/vue/src/assets/iconFont/iconfont.json index f00702b38d..85f17b6ee4 100644 --- a/EdgeCraftRAG/ui/vue/src/assets/iconFont/iconfont.json +++ b/EdgeCraftRAG/ui/vue/src/assets/iconFont/iconfont.json @@ -5,6 +5,160 @@ "css_prefix_text": "icon-", "description": "", "glyphs": [ + { + "icon_id": "43508860", + "name": "newChat", + "font_class": "newChat", + "unicode": "e6c7", + "unicode_decimal": 59079 + }, + { + "icon_id": "6807699", + "name": "chat", + "font_class": "chat", + "unicode": "ecb1", + "unicode_decimal": 60593 + }, + { + "icon_id": "12237229", + "name": "knowledge", + "font_class": "knowledge", + "unicode": "e6f2", + "unicode_decimal": 59122 + }, + { + "icon_id": "25013769", + "name": "system", + "font_class": "system", + "unicode": "e799", + "unicode_decimal": 59289 + }, + { + "icon_id": "28670155", + "name": "chatbot", + "font_class": "chatbot1", + "unicode": "e630", + "unicode_decimal": 58928 + }, + { + "icon_id": "8358946", + "name": "lang-zh", + "font_class": "lang-zh", + "unicode": "e6c5", + "unicode_decimal": 59077 + }, + { + "icon_id": "26283816", + "name": "lang-en", + "font_class": "lang-en", + "unicode": "e609", + "unicode_decimal": 58889 + }, + { + "icon_id": "1786168", + "name": "exit", + "font_class": "exit", + "unicode": "e6d9", + "unicode_decimal": 59097 + }, + { + "icon_id": "40154691", + "name": "loading", + "font_class": "loading", + "unicode": "e61a", + "unicode_decimal": 58906 + }, + { + "icon_id": "20939277", + "name": "success", + "font_class": "success", + "unicode": "e8ca", + "unicode_decimal": 59594 + }, + { + "icon_id": "6820316", + "name": "results", + "font_class": "results", + "unicode": "e603", + "unicode_decimal": 58883 + }, + { + "icon_id": "36924379", + "name": "rating", + "font_class": "rating", + "unicode": "e7b9", + "unicode_decimal": 59321 + }, + { + "icon_id": "6151034", + "name": "chart-line", + "font_class": "chart-line", + "unicode": "e790", + "unicode_decimal": 59280 + }, + { + "icon_id": "43924556", + "name": "export", + "font_class": "export", + "unicode": "e619", + "unicode_decimal": 58905 + }, + { + "icon_id": "43924554", + "name": "rename", + "font_class": "rename", + "unicode": "e618", + "unicode_decimal": 58904 + }, + { + "icon_id": "2570142", + "name": "delete", + "font_class": "delete", + "unicode": "e664", + "unicode_decimal": 58980 + }, + { + "icon_id": "13253937", + "name": "setting", + "font_class": "setting1", + "unicode": "e61b", + "unicode_decimal": 58907 + }, + { + "icon_id": "43796752", + "name": "upload", + "font_class": "upload", + "unicode": "e617", + "unicode_decimal": 58903 + }, + { + "icon_id": "42194548", + "name": "clear", + "font_class": "clear", + "unicode": "e765", + "unicode_decimal": 59237 + }, + { + "icon_id": "1198529", + "name": "copy-success", + "font_class": "copy-success", + "unicode": "e666", + "unicode_decimal": 58982 + }, + { + "icon_id": "9080698", + "name": "copy", + "font_class": "copy", + "unicode": "e660", + "unicode_decimal": 58976 + }, + { + "icon_id": "796912", + "name": "地铁", + "font_class": "subway", + "unicode": "e6ed", + "unicode_decimal": 59117 + }, { "icon_id": "42853460", "name": "stop", diff --git a/EdgeCraftRAG/ui/vue/src/assets/iconFont/iconfont.ttf b/EdgeCraftRAG/ui/vue/src/assets/iconFont/iconfont.ttf index 38480000d31e32f85f015b1177cce49b56053dfe..92ef29764f71fcaf190bc315c5de41309afbe3fc 100644 GIT binary patch delta 5919 zcmbU_4R9OPneXkBcBPfHtCdz-Nw)R3mSjh^vAz0>lQ>So#au8n6Ci{TE;f!52b@@r z4J7TfYdUZPfzYH5ZJ~F$l)Ltz=`jT8-KFIy!zC##;U*Uvdi<28S8hchrBhnwaulua zd%F%KQ*MSiX}|aL?fX8x_x(Se?0x!%y(vBVxjPZU_alUcN4JgCe(w9?TL>|Kg^=T! zE#td3zc;V|UU5$&lsq_A9odwB&GCIeoB=3}0fPG_`5ug)0caiDHofx+JZA#Gb1;8m zd}4Iu$nX=d0R1|cuirMZvxZlqFT(o>%#+(kwpE+`f4lezgxao!AKg-$n413WFWf&t zXaO+9=l4CMuINA*)&jio@X56s#l;^Wj{^`0y>-*@h70}N+??*f1Wtlnh@n{cP`VB@ ziTtx^e29>9hi6aXzF89|XSjhK)_eW@D={ZG6etZyYg>89z2&HGX5fUyszg>(%<+`h)d{ z>W|cq)L(!5r)M4S++3>%~0wf~RrI%Izl@*oan zQ3fSZ6h%+~iAX?hB*B5xs2#PT5b`4#sVIe7P!2UC7DVJiVMLN>0g9s_@}WF(peE!+ zttbI8`CfPPRN4yMxvRE6qQ8$fBO zzSjm&AF4lS1657^AsaxYsQ!ozpj=dk%~1d~qxx$$fWlGzbsIqS=v+N$aCSS_I-h0J>?!s37lMOAFenTLo8&=XFz6eU8~>Rd$ey9dGwnlt ziObgLy&=XUN$d>o+h5sO4mJm2!TwD1T9FBOo3*wO^T$gVG*Onn%ME0MO~FLpQ2URx zb$#5h%f}LS%vJe|${6K620WC(%e_K4r!`Zp&;5r3kvxRGP6P&=DU?g_?JbvzT{@$t zIhLc_4<~R~frew1aPLwW(*@a_vyjW?veqJ5fdv}8E0q-uPF~FQV|-orWuIF!%yA@G z-FMCCwQIV2+S_@KXGOAvbvt~HJ3E)LoWt`0IoO&1^P$IwR^>Z8^Q(rgPs^grkLn!j zBB6!L|NH6{%W`cD>)=;S4Z!;=*Rkc%cHYCY9y0B96LLd<<%maJ+5TsX`_ihK?pu5> z3|buwadtRe+_KhNAveKEkC6MxAh>V@3=m8^sHWMFB!?GatV%gfN=d2SQc)^oNvaDM zN}8UcYR_4daw?8x_H-Su$;0-Dou&hpzbeZK;5GHJbOHrFE zYh|##oCG7RavA}Hf+NUbO#!(p;WCwbp$jZ6l*^Lqvt?Y&9u=p30m<7qLEFv#CoLp? z{GY|2k4o;lcKamB_f5$?HRG1Ejku}_QDLrZ#h#)4l5z8*pG8IlR}B`jdQtTF~`WcVkB>d znp$JAObn-<48&pq+H?c7HpLoW6J)tr#zAt6HIU8SJu7>f!r+oo0@NU1GW`((f z6!xuP0UB@K?NNlvw>DFEcm z@}=8q=0~+{OPA9TcF>WToY=UbRH>9UY<%;=VCBH6Qv;{W_xA7q+5Y`_=F68-ShV3w z2rSC?HgBFo$B$!lRKMq*duTMIuJbo=isAWFK$!TKsl5eV1^K_6z6@sz5Y)idL~t>g%~+lYwhZH7 zD&0jxJT+E`k@E(gRl(q4ZsiI{5P*(jh%*8{<8vS6`Gf9QkiYj{K8R-C)S&}RM+^@`6YF@6k^+Zd%)NUzFgZw9$Aabzyq%#WR>OQUq%YuU z&>qn;W9~FL1hJkbV;{I_4C|a;N40pp(PT9xGBU7alGW_R0PK&sZ_)?l=Rq^ z;L4sGmPp+TJ9}HxO%D+F1btOYXz|8=SRU6B%y`FuLrJBSg|so(-EvFwwqzFeCUUQ%)}j?&0)X(Rc}IiskzhX@Qk=zBR(l2D=$e2?^lBr+3$!mvhc&0)Ho4r z3Iv*h^M&IS^LvQ;P#c$1p_HTrb7iHhklc&Rqm5Yq_1gE|c&_Knt5^0jM&o7meC@-m z`HR}T1pWZU33W@j0hEDKl3CqWihG-b?b}LnJdUHWC=;Qcy7?;#!lX~f)JKX6T%H}l54r&-xAY_;JVT+lDO#idarc( z*$Pl3StsWlbbBy^UEC_C14>gMtgwD=$fJZ}L~yq!%gJPUfHuU-yRUM%+^U1Hi=BSg zpb!oui0Hb;|Yfw?_ntFgnAqByVl6HnSRxo7SHP&nNo*}>dR zld&7KR&`P8O<=3QAUB(v{CV+Be?UM@7Ko6M)$%#yfzX7M0II9v^@V z-U1oC7xe?uis19tQX1B&wOcpV3|wocLm`iag zTpmEb{S3GP12rYR)q(G1xKs{5{xH~#=V4wHr^QgHB^++UFPIhlawr~x=eL`0T7O?Q zm&@L_{-&2c8Qu~y?+tC$Z%wv^6DeOzY%gSTOFPwgGA6Yr$IZS7zCvN{7s3KO=I28# zz&M1j{ra!yFN8wOH!XfAD8IHg%Gw&{KAP?6dB|N%vklm;wx80S|Kx!GW9FVE&y(d)v(Zwd0Ds&Kq1EV`*{k{Z>^8nhx9+es#}}ni zmgH1U4QfFR>T52?k=+$Y!;_OC^`mnzO4Mp$mB@ZcnVbZC<)d?IxK@MYUSYn`cq_V~ zt%(W8vyRN~E477{$=YPCQu$S-!kn^KtBot{#cDVhkSmE&>Q+VzrPK;Z_BKsU=A(^w zQf-+y(;Qo1IwvRb8uM|y=CQ{fd#daS`*6qX)sYSzM{pF!a1(BZI{?k{Nu0uItm0Oj z!C9QcZMYpTfGcbV?nK=7>K!Y_My4I3(712iK5@r*b<>tAJGELzF~7i7p(V5jcY5%x zhibG3=|K#7i6GDyLujd>SD`nJKH-io;Y;>GsM{HDK6;nm&N+9^J?H%Ao~3K6b@$5> z;r1&4y9c1RFBTo2c-nIbkj?`#21h0bUhcFe09PEy>lo^f-mH4{mwbFC8i#1Z^GSY0 z_6kux6pK%EvGgA0`%Zq>$XH+W#C-Jxou_=LiA5*ISs0V_UL(JBG#cwK%zM)O0tmjP zqJ!gOx8l!_?-l@Bgn~j3R`hc!7{wA7Lkk-{xy?V3rO*cOZS}_4gJ(LOPAE*iOMV6< zXP1_9PDKnER){^5{i$hdl{HzalyBXZPK~nd{QzK#lscgidS=q8Z76t6eh8&xNnGV^ zypwnHVIJpGe1ay2S=!1WwAx^vIO(e8D|BZE(DYVJRqQK;Q0ba9e3ggssOGE zoDas^1XLBgQ$Y2>y9Lw<_^^O_0;ho>XcV%@m=aK<;4=bh8GKnl&BLY}cOV(CYXu|- z_N>4WU@r(rBq00FK5 z00g|(4SnQiY%eWnp9h02!bF001!n001^C+61*|Xk}pl z02$N(0015U001Nh-vg{@ZFG1502%ZE000;O006MC0ssDNZ)0Hq02&Mc003G5003IK zV6fJ0VR&#j000_j0000V0000W0Y(6QZeeX@000_@0003%0007K5TiV>aBp*T000}v z0003{0005_@7)Q>lL!H4e=)z=$lUP1fq{Yf4nreQm=Q%0699K|4I6lz)zrC896=bw z@&6viUK?MW3;4cm9PA@Fa7c`tAQ1o(IdVuwP5=SvMotO$0T2>z0OZwxAmIgYHAUbU zJMpxBtDTmnr>DQ_2JQeY`)X}y{-WW3H2UssT=%!Qf8F11{Pg{If4=#dwp!|FMm^1G zUJF{(l2){;HLYt)1MO*Fhq};Z>6TtuE}Lb$?38{Pl;e|w)ArfM`R>&fT#qtxi&2jL z9ObXChQF$|`CGIfHw_+emk+$>6;pH>;W3Zs@YJjIhR=LroF|O&me)MzBhMJ}d+#yf z_5H#NMtR7rD=@<_fA4tdHJ@afrYAS_55Dk!zlQ5ibL{4Bwk9XM>ZlW5e>4)V1R4$3 z1dWBOgT}*kLKESNp~;|*(G6D-O@-@;ri1#0*o9ETIn$wAcf=CmE&1)Ob!_XFp< zLGw56M&^G20C=30ISF_h)tU3=Ni#ZUG@45q$&yB6jiktyZH*4ccN~Zl+krqxa1tO6 z;Rq1G0Sc5te{pCyk`&63r7i8IVHa9x3JVm{mTpTHLduc$OQEpcZI=y{QYiW81ubo( z@&0clJ0Ydt*R^N<_rL$W|IIu8^NFG<_`{!~|4PNF35r4`r3G0fMPdp_mYB5JkBqcg zuJ)ozx#}(tpk7p}mdyf6b94)m5-5ohbafCxhtdG*e@A6A%>bQ4(1Sj1ZLV7+|j+ZCdNf*-;s&06U+ZQmb%l`r1 z)m~UK{_yx{p}oB@I(~Xu;w9gDmhoVD{_vkpe_FU8o1+<*Z}jpZXdatjYU}epzmM_b zD*|4OSA`iC`;A5U6Z?A7hLP^++X_gli^lFYw})NOdJ#prV4dH=pT#4Tmx@rWR3}A= zMw*cYN$Ew%5VEYG3wo(q5y~dki>O?+baLcb2e0V~Br&gOi9#Wv(HD1K{Ym>*bPv?} zf9z4#=VQ?xkI&~h2FD|XWSvPA@U8X-mtKneM=mcU=)WWj^*fNC_rtS;l!ySzIfL)v zCvc8RQU*2TNSrE23~Q*4*s(ehx0+Q0vC9g8A%nFr$Pz3-QY;lVT~%si)5}E=GC6DF z)6E*HnEUuEf?*+0KS1Dlp~oB|didM?e}AqQyj!*f1tIt)!F&0%SJ>|5JRZ(_^<93P ztC;l{%_!$6|BA06P4hY#@{^sYd8VEza3Yq*mNQBf*G z6{%ioF}0Ff3oFqZdu%nfwus#2Dp^w6bT@T~)M{jp3F0QoMbH++0qa~vVjL}!ef*s+f*}{d?9`=e&!~2HSI=1c+AOkY=%ts2 zUb5fYvEwH@cA)7SKFZ->%10O+K4E&t=|u`fieKY8}qpP|-qugXbD|M;TJ{AgbG{=;jl z??hXNCx$N?zM~imA=&8Mj9TV(hU11#HT3Tgz8CA1K($ayDXNz4KxP?C8HgDVx+O6g zM+F-uLDbVlG$)y}qHP+we`Dqy{{T(_$O#0Z&!Iy>?=GKjmsb;gTetc|bXWZ#G1qh_ zz;D{Sv^GuDppGU9)!qdV2yGgg1faF;C7dvTsCl<-gE4lI&YEaXL+do*Z~%Q2op#u? zIw>j8NNVO4yc_I0MFprJ=%+@RRF<-+GFX}gu-cQT)zlj5d^lUHf2#xwFuKyP0O8mQ zlDi~uUX`vg=&OEg1*u&MkC<$i3A^=?ikt0XU>9p$Rd5~XBb8SQ%|G;NVY&cMeY#Pi zUnUZXHSi`@C+%LRa0oO?B$BHW4(`xNA0Xhuas1TW$oj`~eE#SM(6dKB0J1(aGc)Z% zcr)x%6!xb=Y?q`ae-M%B0Yrqlh={A>zy~6bk0Kf%jvi`k8k%b20I6Vxrfe#gz3q&( z2iKl)TQ+wLihdFF-OstiRPg?w+vk^pzM$KC$eYp_-~R^C^oHa6JE;8%`$=C)bNfE& zOL9D7y{RDT4Kgr@{cMPu^&RKog>bI(!0u9_+z`orG%J4wf00+mZ!{#B2r1CailC^D zug@|>e<3vatFEGLs-Cox$#TNx66FNyKUHsn%uU7b&WJ#pcM@rxH+WyX|3x>L`ztcN*6d^PhV-iH@a zU5-^$$UZjqf075g4AT`=GYS9@Fu-5E#< zKa97#UH)|*&$^%xk<=dw$-tdrw-jr-Ce#mzf3a{lCQ^+U@pAePVCjQkF?3lM zEHPVCYbwsZNIy{522QuW@$MFmSF9puU-}j)w@HHWoY|;`(69p{X54|+1^DQ8=Bl=9{{!VG;H*1_@8kSY*&iv zAQFITe{3zwBLthtC?p<3Znj3YmTaJ`s1{;tJgq`YnQSs9jm|Es3jJqEl3o&9?WZ(< zH>)d88yZ?*-_CKI&p(}Uvl$dAqs7rk#E@R9MOs>hlBZts_SA_4W~n zfBHTipF87Q;f&W)Q%*!j1RV6uDfydmnoEi-(%Q8gd!H+sf5u_d} z4TPwGqqU}^56PxZSR%MrNIn`-AYJhAIL@o#1s;)^&tRPN_}CGT7rA|&g%~qT8e?{d z6+FYCP>Y5+&zP6T2|g4EjCn##H!!3ae>dwM@%j;sJnX331yLyQbu%G$+^@>O;?1XO zsZ?!<037gnm%2P&!-bhXcgQosDd8mMJPxv{Fp~zdfc?j@UZ?7j}IZAXBoq| znG}p!W(3coV2~GN%zMW@0hb~AIqn4DsJO|*1D?fRpZ)7pb*P$h05!M2=gjI5e^MO{ zDkAX2RSQ{J7GfuQ=YBO*zbT}uA$0DQJ9l2W3aaP30@~ z-Qj37JPi^2iUv~ONBq^-(R4VvqWFkf2&sB)SAs?Zd|$Q zY$x!Ym)-u2rJDF#QFFkgnc1>()xK3LZ)`P5Bz(iV)hliqr6-!rn%MaEF8n%ty8tD|PR424y*dm=Bplc%k{!NSFDyJe`>=Jo$(zn6CfZ%(B&?HU*TGVsZA>PLAO zT~>R}B}~9d5AZG~uSEIN93H|ucE2nCj2OIG(^9Ex9?8$@{4RU|k2mHnX4S08u*9Pc zfZ?pxVB#=+l09*V?4k-2hv}!m&akosVMVRl0SWNe2L}fge~WhIUPwyP{Pmx|d%w%I z|L)5sq=?dj8R_ybZh84Sn!fJkE$eopKpqFTB$JwUm5Y8F_#`RyJej&Oe?7)OxclxO zVARjM+<7I!Eq8bb@94d*+*9G;7EMbguX-rw#Aw&!|AyEy3?BLf*mV|8oXipCL@tVo zD2=jk{FT4Ce@>~XL{BnlLU%4^9Px0HFnaTzUVlXZgP2-udg! zdFPyU-Mzit>(2SarRxv!*|;58ebRVadwbjXNlz>tZz~o7p$DGm^&?29XtctQdr`y}d=2(|Lane|OfBWp2JP)?B2D!vCu$_jpgbtd_ zx9UWf)rzC}#C;P%b`}Q0<*cFcFCXL7vpSeIl6evL*n;qCQx#j~LO>K)zZPuQ+9C;4 zP!yrQRFGxCOhnqW_F$BC2PJQtnw7OIA=8foJYat>%%97*de7p;@WS5C&R8)h@N6Iu zW>WEDfA8qoQVLE}I(xLY7*8=_zn9^}z`S@zX<$UsC3w2pCBwVi=UdJheEV!Z{1kow zF91K0glw2tHRGs)lAAvLxVpQM6M`O-tZ-JJ=jz8^j#Nzy8aJ1DHoXOXNE1|5Aiy4r z5i|z=j5g&&MEH!L9tFulDk0Nv$w@l3!TvGJf91IQIgS+9q9EUv#6xQ6Wa0@w0xnF* zoDhi!kvYK0iIiGDO-{gIkH`u8N&CkP&)*N@KmkLAl5Ng?&-d}uco;HDk}H)#rpMv* zm&1O@R;6(XWt9|EF=U8Hz*`wtxfrb^OjqU)-Ba_s zH%)Dt>h6BMyPJNg(c7pmZge&jF)Vc_tNKN>B^%v1Rf=ueREXApt>-ccI4+^y8YCN#0!D$3A$_vU`7RkRyZ1ge{SC47#Y)m z2$NjFB@j8qTBQhftmdRVSvV(^RcrL)ZA)YJ9(4QuR2IEdKTN+xmru7Bp*{BK&wsvs zJDRC1EKihGIkvRTI=xJz?6y!)p#59t zPRFJQJcuUiyXZ-Kh*VO+xx1az+#QXB?ds6cUWa)|?-*(l0nkJCB!ap#B7jcl>huqa zlVWhj`2+$;(K9nM*Sjb@4eLCI*djAqYvgo15t>e|i9sb7%0k%0abN26^b0Gx_#HVl+``&u8R5r7X*tNYElT z>bIK%PrJ-%0wzIhtYe-vk-T8Mdr ziL~mbH-QLE5kW$?f9oCgu$$vNoWxy~DD_z_EmmJCQEKfnjIP!OMB9Rqn17@FP0qL1 z$32qBw~WnIWIZXkqRl%|;_xuJ4U1DLa`WrlBoXfpRu4h60#P_p6Lbc0El4_C)ZoSi zHZ?Xh*lPBizo6|4tH*9!D^*sO?A^O}Pp?GOqAO3x=N2wnf42DA72GLjExd9(u;ijs zo_unqkuEV{myY1i!da1=VIE}t<3y%wL@*(#t=9BL5CNf*BgE?L?+9oXsinyeCC(8# z49$F_h%ybk?EK8oU_NfpE8@xuu5W375Pe#S#})gAVE@uSZiN!xA1}n=LHS16W%dW{ z8%P)Q>Dew0f2;k55+hyKkWsjs2=yk8bU@<-zgmf>bGbD1a0*!_gT+x$iN)Gtu{-l= zDAM_6G1~an`eQi?aVPe86F-aNa2_SFY#`rmCs?RDR|-PorUOoe#O*aAfe>c#vp-up zw`d@nqN0lAa?R{O26z_d-m;1ygzxh5iqaMg{7W#{rYOAcPLW=+WJ#+x9DYP9iXkiv(}k$6N9(T) z3pgYerS+Ol8|lPNFgP*{DCv19W&ccV3&Kd6rtscin~E|i`_D_7_nQ0h<9HrOUmkKu zDwF-VfB3lC2$0EzJ4hplC}>^HBvB>!DQK?1_L%rXef^Iy*Z~K4r4! z;`TR!hIZulOFnhiwTl;Dd)KFyT*q>*1nc$Cte=TtSLpTPRNv$*gTra2-w0|3dT!L= zAmE_O&2|rY0?5bCFR@%RZ~qG9SL>*gAs$*!f1OWV1jISBP?_^SvySEO(JFI1vu*NR zzYIiKXx8>L47w!(O$SGfAKNO%Vq($WTO=9ZHUgXq5S^BGbjcd>GuN90tU;Hw1M$e7 zBKq>t?nQJ-u{i1U-@3KQb{xKETgCikrdrMX(yVOT(`W|1KOOC1dyB=QbB@#aTi~NG zf7oL`*pZCj#_h6aLqZMfb2tp*K7aAjfV%I^?m5(w7pL{%}@c4 zeV;$TKEx=@L$3b=@wqw+004NLV_;-pU;yGfvo3_i^V@u7;AUX}fhVyEF);f7fB*mW zER4*JKrROZ6G#*QTf_`j0001ZoMT{QU|??e-@p*V!tfsmSQr_AA}Bxy0HEszk^lgB zoMT~NU|=B%|Nl>%I&v{_<^uqeNd&C`00000b^z)DI02XfA_9a0_ycAG`UF-4umutY zFa=fxz6K%&(gyekAO}JRo(M1qN^S_u2`CAc3JeO63knN53vLVC3{(u{4T2524oVKf z4;~Mu56BP_5KsUBc${NkWME*>VJKt}U;qIoAm#!>28RD&J_7(5X92{s-ws3pe^1GF z!ypXwv<@V8y6^kGmqWgxUqV1lcqQNmI7`32c9YjrbwI;N8jZ5GY#BxNf4pN2>&Vdo z14kD<6e!Wh1~##UZR}tdd)UVT4snEIoZu8^IL8GpafNH#;1+kd#{(YmglD|q6>oTl z$hf1~$5^IZHBax$`I#j>A{G}hf6=7dvO}K++)6hR-{WN_m($!NZoSmTjd-k;q8NFg zSlbj?kg1`IW;i5ajUz_$%7?@PIXNMw$x@Nnp5bt9t=$>l+J4sl?;u3@waZU XS^1bm6?}V>53KQPrj%trC?1bCcwtg* delta 3885 zcmV+|57O|MI_x17cTYw}00961000o|01E&B000?KkrX|DC{0ykZDDW#00D>q008~~ z00RJcre9HKYaBp*T000mm z0002X0003Ztu6f^5p4E%VU-I6_S0x=Lq zpVZ-wC^!f;9S5MNrsD>10d7H?V?aL^6sgjaZ+4Or(PviAT3gn5>^Hy- zP>7KzA$^nZ_|j@>)(ak*^^#A`-+9wt%LX>Fsm*L|3tQUSc6pe`e|eL)`EV>xtMeJG zdvVYHZ_kxTI`8A+H)^VWp`!{5T2%Dt^28&zywax69kp)w%)K)C?b=sQ{Qpt;kgDHV z?VbX?B-RV`rdU64|5gXyD>ex9;@BkctTqks9J2t=Jr86CTLf~1Ed$xY)`7fXyFdoX z!$2;{<3Lu)o51VOf7`%&Iqw5GCm#aY=U4{b)2CHH`2%4XY;phqc${@td2kcw760C@ zq{X&&<+b*ZC0W;ccO@*ZjaCQXOV}jfE5HB_#yQNDl!m0V&Cwwc1SJrlf22u87nk}yohY1&Sv&2$1oGRZWPX4+hx!Rx+ne`TALNhR%fyzjlY-}m0{ zd+&)bgiQZ{eTy9@A(AG9kSi7#GCn%8 zzuDCF`$tAM?%jTSPw#y5@GpkL!4HFBw;4C|eIuhUjE>yjZ1ncrzJ2e8^=tPJFzaTP z!$CsmyN|IKfAHNcWDy}y;z~tDm3^FMK@S+F09*{zVyURcfb-$kV$5!)lvBLOB?Z7| z8a+@bva8GGvYKZ+nO76Cym-seqvt%Hb4Tx7FNf5a$0yzW=z%wOGtBNc4s80B^FhYL zA4nv^;T<04b(E9k#H$$(lUEmSp+YFibO$`XjM5lbf8)wf*}=y>ndgK2fp9pH*l{vL z$n4(FvqROr2Tj#9e4MPPb~m&^v4k7P?Hi`S2|%r=;>L0NDW4a)q&O(0VjC2UpZE0i zC^q9spNmV$3Y`lPRaZ{O>?7-rY&`!<{ae}?7v#pB`dJs##c zl#`^ye=BtB%A!py`_hpkFR{=S@OU#yNLcO4P}zawp7gT~{JwBF9>3>_G+}YyyV<|8 zdq@LmBTLch9D1DY5%*N=fuaQ|Fwy;myotL{QM*g@0CujVD!F1_Pf>clgl=)+AyE!y z&*_f3xB|5RBX#k*$y!E@K+lVT&fbni8#ceUe|f{gj*f*JHsAG?O_KpL>iE}RH>2!@tS`6!UxgCV%wGgQB_Mwjd) ze`Pb1g*-J$E)Fqh0(QNj2Aky;e^BJ=!hCBu8;TjCqKH$&qAZI>ER+qm@{OFAmuj-A zDThr;W}e3Oyz{$;z?MLB%}s$|LtjUGd&J_!0O$8N_!3d8V_>6{KyOML2Rf{1!q-q& z;}e4Z`O&sq_cAFd;cr2!q}8mhtz9i>e*yQb9z5%_>`&PxBtYV1h>W8rwJR623-U(t zs=3#l`tTkbbUJgN=jJJ&ELKlm5Q_$jH!jr%M!tG}Y!r!kD=ez2NE>G`LWhxhRb>o0 z781WHs#ltLOClyS@5pf`@g?UqPDl%n3j*zS0w2i6*|J){0ww>q`EpDa#85~Kf6X<5 z980KEYvdRzJ1NJUGtO(iK;UuIgB_}>k7voe>FMc;2N2JG(ufhPK$fHL*O2Sc9zg-%awEpM~k&G^Y5vn!Dc0-#F_DAmK8PU({iO; zuHdI!o+jloOR9|N%kRz%LVT>YuB>(II~Y`3X7xq!N=0c7P94)jN=u5l1@%o$R4Pz$ z@R^vHxPtpp#Jx;nhC@bhZ?z+^5hX1&aWPpY3f6R7v?f6UL;3xC<> zRaFo8pV~I&_0U&vc#aA7e=fF+=V&BNB#GYaBG=HYgm+O)%C?AMRiwj$z(7()imcH` zH#F)tiC$*v)KbC1xK=VtYDqOYlT$gh#5|oHjyR{`!Ltby{y6nM^A3~0-nkY|I|FaN zdH66)7y9z+@~Rve&f3@K88DwQ;TrRry@s(DJ1~FNrNx;~e(Ssve_ECX=MR@FPqwdY zVvsYgN`f)shYr}7|Kr~{K5?MwO zs#7$IaS3=@s1j#xYSH#yH8-o`bndEyN$104TU(N;(AN2I zk`D%XhtPHs#;1-kf8$P>P7<_%@ws7TZgi|I2@|exIoVd8A!5UnopHdqs}Y-aWOe!Z z;CPUqc3%P55(e+(ZVzEAc+Slc%`Tur6{=` zpqYLqf)!~3!f=$Q`PH6@X#!YGH&DO(*k|&*+AJd;x@0}mf7%ioh_$q4bh%T>%d#Hg z?O01otjdz7Ea|IRRDrTMkHkk-+t9vhT_P$szz3gO=kqfv^cfZOE&Xv+(X4bTwxTqD zF$=!sPl<*^|LCArACC+URk4nEc%Qk5@wIZp|- z6Scz0Sf*)kZXze+qNg!Q%~nl-*fo+UNubRnfm(!SdyIO3)r+0OND*Unle0xz^pKuy zOB^y-?rAm_-?}9G-Sva_-z*hIa?XhpCn_UAuDyF{Cf#@K%H=!P3af7FyL-srf7_}v zXQm0ze^>c1E?s87g}w@5Oqx&nFyA7kOB9$fgk~vOEh?~dazU)pP)kED%nnMpFylg3 zz;#IrI##d7Wyx2X6~0R8ol#}2&^g@2!+lCLsyKW3uHjB$tr9&OZHeL!GSz;8(ZxG^ zDGS^;%i@YU7nKNQ8KYW+{gk?2pbRiX$KOz*f2njjg&b}t%T!qucqJ0aMj{VoQrM+3 zGu=S-U7J2H(GqoQkxT50Y!v;G!^npC_PWIb7F<|Xiu8oCh+d&}e2F3u%dPyqJrnDu<) ze*-s$lQ=X)KK#H<;pDHihKFkcijw91KjitWq6BIm3Nrou{mnHE4JRclSkH>ROiN=j z**Nv*UXiU2TGFO)lF?GJ{iqx?4SQxjB%HsgSspco!%Cor&#Is&od5J!@i%>zeVUz* z=*y7B7{|>os+Wt^f*BBzh(?4c-nx=Of2&N)Q@C6}^BC+;E;Ogxt(B{uUA3aEy}fP4 zs`16Qw_fD6@a2E?-}~^+<;!kkw=pxs}|8#JR9gn9Dx1iRJ&%e+qLl zvupAkpN>@*%*LK-K(~WPOJbsCpF9|hM1qzxVbOnphiKzYkm#>oAV!qL*z2FR;4ENbn=)e}gmGyeyR&fNjnr%&xIU>EPJu zv4c|M7=AuKnfg1EVv5cqAb!zmb%GjFZaw&W_Ib9Cs6@vdUHPAe9+~_1e`d4Qioc{6 z^QLYvuw(0%&aSS`En9cs@RpA5?v7iw?$|ufFV})tQ{(*8%l3BT>PN`tu6;zArip)^ z^H`mqd%~yqp78t^L9H@K0001ZoMT{QU|;}ZZ>2>q;`wd9GH|mnfWYGm(T*_s|NsB> zER4*JKrROZ6G#*QO|%SElhGL)e+&Si)dqY30C=2ZVPIfj!HNI>Cr%6#0|3ox1XTb4 z00000002G!oB;R%W&zv+U;^R;5(7p9v;-^!yaieXk_I9Mng;dIf(R0C=2Z zU}Rum5M#(?;9vj&CLrblLI#HaU_Ju?75M?80C=2*kv(q0KoEtW!H@)$vzHn)0e_Co zZo)7Sggp=l`6;EerBC5QAIY(|#8T?D)*Hm*H$c@>=k#f2wKHaA9%{}1I$?!1Dr~St zjU5`a=+NT{dpzTS7aZ}5H@xG74^tK8+-dwKOV{jD=;JrVN&+gs_ zp99DqSVtuVlz_GlP_;q>$;!IAvIdeH0^dmh3{TtqhsP8Ykj$fWAYu({%-94Y5i1wz zzOp&}qI1!>a#6Y}7tPXXuXUBW4F5N0CM8V|`f;-0#6<`c0B=BsfnqY9DY7rp+ncm| zGtBTpz>bTYB5$Ao0vdoIz%0t(ARq~xb_qPgw(W3z+-jnV_W(g8Zr|6L9a7pbdRWYb zj9KemcVmCRiIMyyUHv~@X)D^(zHVE6e2z1=%3=I|vd(NZVr7w+%p^5P?3@&%k5) z%gYuMLjjb?!1}0`ACe2+zp(AMLywHuA8zT}2%0Mp0R{}^tW#ghN98FNhC__3=&KlH z2C!g8L~!$rH&Tk0D1%bm-ToTA3=H+BNIdB!12(Tf-R8 zqEdlDfkt_{w8~PefsX#;#Q?3C2hb|04Z1*G63fExHGfWaCaFn~2BpapArfJ&?_0Tf_uC14P1 zWQC=2s-Sh{dP!~7xiNp`2z!PbhV8n4)gt1?|*~baNv$e-y_(%=`jX`6v zI5dTVK_Yk4q7gAl1ZIM9j*~1GgpUB>Q;!ftJzl|>@Cpk6Fbj^dSKR<2U=>1mt*3?%tq*S0Y`a)i z3|AOw-o}_!xaWxHo${=dsyijZKc2aT^UP>&ri9|+hK2}NYanr@Z^8yIp;}cHit$jp z85z_sv2`!?#vLE&n&))w3#|@fx>8m&5Uz{$Z%vc%Cn(UT0453PYB)MPE~+@fp6!Y{ z4s$^gHnsGVp-6TEQ~clzqu<@hM6=rEm^q#TlTqj=H^Tl`9U>E850_QKJ?cXSMhwlK zp3*5X#~}piy=IJm2O;{yE-wB@gxR%6`&gi*pI7!*4Y}wuN)?`cwA9FPFNEd&>Ic+2 z%zkRAr&p&Uz~$?DCK>nIOpL)aWKT65_ORIx-E^T_W1;=K$vQ}8O;e7;y zMz&MT8xDpg4H2LcPJ;E8WbXANNElER#6y1ot@5Pqu-e3hSXNdwn|k@O!o|i$EozPO zN7sCX`XGs*)cp#HK5@%hB)k5Ym-O%|3iQeY6R_|Ng844P&X&p}ObJ1I0Eig;g|0OA zQ>vH2m8=nTjJ#th3{8I>9HceB6GT_c$dX%fv5O0jUHgD3vAsb#q1ipMHH9a~9HreY z`$~&tBKkC+NmY!+hXW|k8}ADtKOP*_9Apx!E`z5C`h-=U;ZR-6YMt7ed9dk;y$>r= zx>+2l*?vMRULDabt5)|&t>?;=!Zh9CvRe4a>>X|N^(OTS%evDIH&9-kpE7LA>O2LF zIEqDJ_ZCAlHn-?TJPaY~ky8G^aTxN@(FtKVlTBsy)QS#ojN1Khx^X|-wQtP$F_x?v zuCf^QOobzvEpM|5N!^mCsFp>7Ips`~BnP&m zjG1vZJW6*QOQJ3+eM%yrqB{{JG`BpR^XAaX-|we4KQw(h#BP18(uSF@IVTZe0UQ8b z1q3G~6zg=D(&dW80h<~g>Z;jcS!gv739W0wjxDVSq>mz2+J(a4r5=w*Vg{0!35CeX zp6k^f^|hL4KLQoQNhR4WJmOW`Boy60kdbp|Pczy=^U8l|UUUvIN-q&%c0G8kOn2|U zvlDNQ-1XJ?DpLAfq~+#F^Y-&QIg8W|qB{yiwRP6^X2ohpI%JzWx9to*Fi`&jQo5Dh z+0awOW7d40q=^krsc9RSHl-S(uD?4LRfX^}ouUXH+Ph=>j>>yF8dwPB+nF6rBMOcf zZRn*bg)1X_rak~;67!IGE&I+GL=&2y4Kg|&yrAxIN?v#3Nt0fD&E5(>q$~g5#)HL1 z14uIvhGvU^<+yGJGHY)2Jc%Y@^RBI`tV(7|$y!-nd7Q1zZz8JNiB)oxYByK9AP`S4VuG4r13SQkKQrkFkN2=ohcG6WS#_Pl`~3(w5qs6K zUTm`Zp0(&iCGF4(Bn+xmUJP2Zjq(P9HE+LaLR}c5e!gCBqqWAyyk=9q%Jt=4t)-Gv zGqIG!Ij6TzQBJ6+HJ`;X-WVT^NC-lHofFn$!{fNWHgH%%(Scee`LBf626EV&_UkXR zhc+G$Hngi0eYIw+T&3zOGnGigeKGMDUtp^(&-htN=%1p?D|fQnI`K zm%4tb>fxG624}4vVARx#M~g6f##f#o?kriw+u#w3xGdI)@F^%n<8 z5LNb!KcP>hLsAznbkl?y0^#Z%bqFzRPr0at2zQua7eFtJGT0Z~^#`8PvI8W>w|v!~ z6g6Cc=q&V2)a7&)LJ&6WY?*;CP2YHu1qac=uZ^G0>-Mx){kErVsRShj%;Bux0vllH$5Kg8PiZX~JHE+fEQU9`1tW3BArN(8}~d}!aE zTz-E?OD=pw3PsOga*vM*BG&6w+2ToA;V2d%k7A&8bTHv#kq1mNVC359!V}HjC`B8m~eT&UeRKS;?8TzA@hXp*QI!JQIc}POyFO5#a z5*Ud6?oiGDcTkce`e-KZd3SvDwxNQsu=Z^u7QsMZTM&fiDK~3wECNmF+tv8$+vr({ zWp7%6axJgS%)fq+=u+ZBWA!!8`|)eUX3?&=OVM_m z8}1-uqnBZY#R%a-O*aTZC=fD^r!Ea%ntD9J(Z+-`**D)>F0?EWaq*P?8(mXuURGGs>+<%%!rfL|)g^c8D^7}yFWzFSM2^Uv1tmyxL_b*D| z01>oUXtZC!MHOw{eOiZ{d z4JJdcIqz~eLzySQD)2{AQe`)%rVL{!<5VzK0}7>?lC+j+W{)85$$0Flt+Vd>JAS#w59C_x`n^i*s;hAeA9^kPx-4tmu-92&e+XSc z2ed{SpjYw~Pq?Mrr@q`zT;D!isk}QInla`r+mpV(G(2r*K~a8@ETmMaNuFV`{mRet zl;qvb1|aD7bX<{iqHOs`3upNsZ}HfowQt}HI=$bmNEp9gf17_hW9IUYEtd#7`5W5* zybPCUcB&2l*8@c?^m4tIqo5RrvhAz zn&lp^C(g!S#m^?{;~Q%)lih5|%cu?DN7+R=d;0_)ngH`A)rCBJ?Rif}vnHV8&A?S; z_2wF}s5-JJ#?5+)dsso;^ywnC8S`g!Cv^WCom8fH`3k+S9M0#DuU=s}AHf8x_B{di zvv5o--9F&3jTyhj=A6wMyt#QXbq(zu{U;?-w`~i`gg2vnP-eUdwa~k{{TU7jV3zyH z{tp0r2!nd0oaK-a)iVO^8fS;`IlZ)J8Ucgi9gN{$E-|2q?DicGZ!&6 z0RH~%IwMqi7ZnMNuce&0=U)#WiR4ZBq?@YaOL-yb(dC*@^8CFhM3hZcP6Qn2l~KkI zlJa%aw4V%7+#Mxhj4+40Xa@?v;;ZIsg$Bn0$EO&JWjrAtKj%ZRt8_TGX~z2X1)Z@W z9>f*U<55=73Bk!7J5CDPRTJ9awkMU%3;+FF*bFAXvbF2}$wr5U#Plnd^wOm_xdp+u zMPj|BOfyqcRJ`WLG&qZ*8?m;meQxp)OJCej^n5lWC|SbVA0=xnE2e}Sz*v?(S*#P) z)e3czG|1?DH3gCU3)}y63I;ZK&F6)YyX(ia@~1d>e{Z4 zL80c{Tr*_i+O-`u-w4H~B2s)~r{>-9NcNkauoh8|xLsX+dr#|$4r1<{y^(~iwd#wc z&ieYFKR0fiJ$t_z>^eWHd`?)u?~%i8ZI2RgEeN2~ z?i1M6-QB`9u>7W7y7ca3z_cEXoV7UdLu#3@D=86}UA1CLgsB&gWgKXT70J}$o< zv((bP_VgT1gh4_`;3p6c5rX(Z1c`wYG3WH!Zp%Eo!jPM;O&ulW0qeu0gdjl>;jmNm zhBC;BOnj9)>9xp|F=+(0cLEAX!)&tSark@!?K;A`yqbmoo5V$wAqtK#lTb@2Akaa@ zQAT8Av@25j;=DO!KlyJ#h(eJ!>@d7_>yX2cQ>3a2gLy9>l>_zWzRBvz+{-GFm-e?PyBmXlhvy$*^CwjRWOr99ZgGi<4(8F|qv;QVA7nHIHT~NY7UZ6!bW3pMmxS$& zhUIRF3b#0(y^SRF=&M{1tjP@ym{+CMMkP4lN!EbeovomIBVI5nI0vqgs(4Kg7RK_y z1yI+M<*#qmpY@-#Tw^4%oL#r|ilC?0WJMrQ1b9y#b?+>65B3QW2KxpF*LS+VyJC@~ z&b_lfIM_$)CNO4ox?L5)`=txrLVQA8LwrMmW~lWQK5&DJQPTfjN0`KO4F}22k@0Yn z{l7=#NJ11~RCdC{f+d1sxYG>uOBp?KI@l@Wn2nF^5P8V9V%%oXxtv~~J~kKc#_sd! zEGg6}#1ZGYrMMdetU5d5wZ5@_w1c?Al zok=}O)%6~WxpKbxP}%3M)#tCo9P8Cd3z>3BufZXR>U()K$_v6#0yjbHKFIe|Tp!*mK*j)WfFaw3@O^Qge4!)* z6ENRqe>&g%e_aNXkH+osRu|T5nJ2Q}kj>o( zh6Ark{G&&{(h48FbG%b*oFd-&tPjO16tR<@1!@+{Y(o_g;&k(&Ka0+y8_izX=Ss^CV7*@7kKmaCQ=)Ts0x$Dq#(6S5!^;v zVoS0Xob7b9yfICpl{^>gB+n!|@$*wHt+D*56B=@FH7WFLYrkm`PTvHqoTLC!$>#6x zwtOd&TEDe^vAAO_#GCb>-+>5^QpL>0r=^+54P;Z=^+J!PoxIc%7%6~H3Y=5uo9peJ z>r?o}QKM(L(02}kos;L~mFJ5VS|ZO2rR0pICsFBkQLFH~>!@`(6a;FURB{s}fM8HNBzsh73<1rHW8_0Y z;R%sV*~i+*K$%jsA;J&`EeSx3zhfl>3JX;rIFw}2v+e&cU?W4Bc(RR>X{*yuf z4P`&qNyu)V20!c%R`_S4Td7Qf{*=m-sU$R-MoNbsE*&r2O<3i)#MEXMUk5Ddu;dzT~A=xs)) z`-y2?g7Bt(A(}Da`tI?1(FX|fhz#9KPiCVVg%|hk9vwU!UQc` zoOfTM%tLmnIQT=H`<*#PP@kg#E zR7U#duUg*lJQf7GiCp(yn|*esHFq7hVSlEkTw7k?HH`*JH`A!VBt*N`c)fA4w;HgdaFaYIEhbMW+ww<8`|4ko=fSIUj=N q*k(%DK`ztJ$k;%xi4%McqPr#48Cm+fi`9G;Hb=5Fd9CIRj*bA_O7BSk literal 3596 zcmV+n4)gJMPew8T0RR9101gZQ3jhEB02xpK01dqW0RR9100000000000000000000 z0000SR0d!Gh5!nnP@4<^HUcCASPL!y1Rw>3X9t2J8=4iPqMWkEBKs?B%5X2*u5{4a z88>?3pjQwJ0R7S!7A_6}0IUE2uz&&!D6jwu05=%^P)hhGm+k$OEy>+cda(=&-4v=D z-G##lFb0dV%p`P&-udUZ`S*bd29MH)HV6a#G9|4($`fqLj*ZP>E}~dPVs@0{#?@AuN_+IIpd|5mRqM^{CjYk+ zxZalY#CN;}AO$R2h3bU@O0JZYSyxpO2wRKRSGSd02S-0OEse~=Yc@|@eT7otiTgny z*d~FRe|-Nk%Mod7Zdlo#D9BYcDl#(T$pS8-RH{h9y=V&^@i2##8J_fyuTOKu4h$c* zH*_+5@aFF+{;S&>|2JSCZRWQHMmN9%3)XA4NZ-*-e;_8V>v{-U+j#?+MWtUg^D>!| zWyx2jQJaP7Xm-tkc~M@MZ@sX3x!e3B;iL0!?#%Z5WS0NV&wW;aZq5SBXz7FJC@S@^ z(a+8RYaMmp{^y|%v|I}ywG$XPMyep_1`>gw3yDY&kVpg;iA-RSk_lQ!SsLk$d}!or zltIvj(Fj3=(FQ>YV<7}RFph$tKz1eQ2XY`mg}jJh0LbeIJn~I~4)QI68lP4K!dG|w zm8cJ@7g+~5Wc-pb#l5ZhqU&ad7EMkin@V1tVHF&DL8IDdy17o~mM}qzT;aNTL`TQ2 zc3)*>2WwJ6c3Q2!vYU6f!Bq=f2Upt<4Z5SPMh^IhuA^=wYp*tZ(#JVkbKALgG^{gh zS(rv;+ipi(GOZnKhz4IrA=O}D3V^66a$UG-tZUQ{-E{4pB+d(q*ChX+wR3cHpYB5DibKHP~_zns99k4_?ay3!iy_~+DZ$l_x1EoEY{I=y=h{t8mgN;8&NNN>uZ4>Ys-*6&RRS--BVIW5eZ|OYrU5 zy;lwobKxfHcIVCC#=WcL7eE5t>cc<~`RaE8g2&BXED|7F`$ZIoCuolI^N;|4()C<3 zCuack)sHQV07p{u^{=}gTY86~2p^*R2wq1b7Jm@~i2@(j3Mh~w!Fxr7%@ZS8LNR}si3?$6BfAHV4f;x*Ou3ii9@FLW3q<-VW= zA_!urmuHX}BVc1PRXr4UWLqeu%TiRiakV5@R*ASma#YGMeb|~{>$_GLUj>G&z?2KP z+AqP*MmmZg{jT}H@yQGOx_0_c^NinmxPe_+)jCaU&{#^vQjtK{{G}DZOi9(MZk<9> zD@P(~l2sHtGb`%Wt317Wv5@vuhvv&GfsHvdRgFgjU_DceGt#nHM%&XmQsG-+wE?l$ z&CN|swrmgaqBeiwvE%)8>d1AugFR&OFq5y-HMT-leT|{svVj)j6t5xPoNreX4D#tG zeV495*5v7EMiDng>T1_#)y|`YBmL1-bB2!;y)XrqUom`lt)s;vx`wXHr`mFzr!&qe z6DM)3)pa)gqVG&03~KN92Ut%J6Qk6_$pz?(pH+ zROu#N=c~ZU`g~$*XNr8%MQ0-m#j}(4hZN?ry3*zO&-HXR^?qwd!oXZu7z|lDlQh&7 z{!qV%Ai}@!`Zc@2mX7de18L)Ei-(`1)IW$0+ zI86`+2o{%;LaeKr0JeZ-g;?HcDkp1a!;OdL9ljm?`&h;Fg#fj&(M7kUovdB_uTZUG zAV4Ro#Bl?ZR> z6rLxTdwPESXx5A$zefnRo_(wCG8vSs!iH-P1ff&4+Qq0vsEe1-iwqLlTOCk}Bn^ry zETy!BYds{57cRU$E?X*MDFsc^v_M2+7Vg3DYe2)8m zI-8TiP2zGAxhV&^$(*DlP73c}G7oUyFkbROUJ55MiJQU&p(Z>#syslNxbp!SPEPPj(6Zr7Y9_oNj-p1vY^_(qgp(`!FScV#V|s&U9IdF9uZr`5qi9j^0rdf| z2S)LT_~-hJ7tCq-8E@#$JitfG+~Hze8m<`D7ajC)mU!TP5UY%IhvWpez@2aj>vs{n_i4*Lkrf$b_f~3!g?`=u2_`aq^H5nn_TzS`dnwc8p$3N3-mt(#b}T z$|ffTms#n*)6zjes8J)0RRup6gsn8t9&uAniuBb++_N$#IJKtTnhWf zjKHNyf z!A(gN28>LTi_!j*3llJS1rc>`L(HWi(V`eev^YAt#~k9l%{x&n4Keq;jUt60ptRL2 zC{Cb!>{3CDC?+^Y923>dYc9Plz>}yLeuuyrl;C)v3P?<+E1mxyBPPO0z^&Fyze}4- zyGu8>1CGtxgD0bT`G;*qm^;Kfm>fjV?CR|`C>rwg#^pSRM1x+i8zYDwOb=q%kDRin zIaXRc%B2-4Ld+e^8S$=RrMn?gub(;eOj3YP_}WnL24mjCKL`X_P;)BW+>D!K)y%A&qP`8kelaay0DVNW-HRt@* z+sU*E^HX1b8n7|d<6a$J$vXSsftj(45%}OrV!UBO+yX;foM8dEp<&)tykTO$ zZsb2pjopk|lgl;`1^WpL zVLxUgtS2&=JH7f4kJ>{_)A)L@6<>1^ysrUREo=axV%@jb>%ZX%rmv>YHrQbg=;f+U zuR%g+r4BL?mzz6{IF&dpSB~^hu3VZ^L5B^X5+Wy*iuDl@dQs_ThYh!*R6GIlPA>`% zFA~FmA&SDuM)%Kd*)9*8v>3Oeo7^1^IUV@@{Q*sr`%&wYPxGMEoa7nnxtVr;mm*=v zy3Cfy|Fz|@1qB7NF97zex1c9S7nh9$LU|VEwP^ANB2lF`kL*9Eg9} zr4OQM+{j^WjpjU85}&%r`9%Ez8I8T!bsvR^{wwUKS?~Km?z&QrQXl0T%z&-@-RL-L zfo6L!r&SLOwm$QAKv4e9d$&VMDYFJu8OIC=yZzvwMq97Dbus(c>i-?HPgWSEF| zv*?hwTH!^zSRik|B3Ki1>8MYw9<^#h95#*{b4LJCwWeojUFR<2uvWTnb$)&yAahhB!u0=GA`Tc0 z+zP@M$nZbuEo+-KS-ZhWW~Y&?locqHYFM!o3+iQpS+&8j61Zj4Mrn#zuotk~S20)L zly5c{fzD~S-L0p=f|8Y!SD{jsY6ZQQKRdP6QJ1;}3on9*B8e;)tZL;jYb+WhxCCnb zT;>v}kkeTMNL-$H#jG@9K@D0sEjfzx*qRe-4VHdWg!I>QZvD6^)Jz_JUd>uKBnKw( zqXwuJZ8p;7qzBfutNG~TT~(46bv@8hEZnH9^|(ML "#666666", + default: () => "#808080", }, inherit: { type: Boolean, diff --git a/EdgeCraftRAG/ui/vue/src/i18n/en.ts b/EdgeCraftRAG/ui/vue/src/i18n/en.ts index 39be2d81aa..8eac170d8a 100644 --- a/EdgeCraftRAG/ui/vue/src/i18n/en.ts +++ b/EdgeCraftRAG/ui/vue/src/i18n/en.ts @@ -6,21 +6,227 @@ export default { common: { update: "Update", edit: "Edit", + add: "Add", delete: "Delete", active: "Activate", deactivate: "Deactivate", cancel: "Cancel", confirm: "Confirm", save: "Save", - }, - setting: { - systemStatus: "System Status", - pipelines: "Pipelines", - create: "Create Pipeline", - config: "Configuration Log", + back: "Back", + next: "Next", + submit: "Submit", + prompt: "Prompt", + import: "Import", + chatbot: "Chatbot", jump: "Go to chatbot", + success: "Success", + error: "Error", + clear: "Clear", + uploadTip: "Click or drag file to this area to upload", + }, + system: { + title: "System Status", + cpu: "CPU Usage", + gpu: "GPU Usage", + disk: "Disk Usage", + memory: "Memory Usage", used: "Used", notUsed: "Not Used", + info: "System Information", + kernel: "Kernel", + processor: "KerProcessornel", + os: "OS", + time: "Current time", + }, + quickStart: { + title: "Quick Start", + first: "Step 1", + second: "Step 2", + step1: "Create Pipeline", + step1Tip: + "Build your RAG pipeline with customized settings to maximize the potential of AI information processing capability.", + step2: "Use the Chatbot", + step2Tip: + "Start engaging with the intelligent chatbot, which supports file uploads and information retrieval to assist you in completing tasks more efficiently.", + create: "Go Create", + }, + pipeline: { + pipelines: "Pipelines", + create: "Create Pipeline", + edit: "Edit Pipeline", + detail: "Pipeline Details", import: "Import Pipeline", + activated: "Activated", + inactive: "Inactive", + isActive: "Activated", + pipelineFormatTip: "Supports JSON format, with file size not exceeding 10M.", + importSuccTip: "Files upload successful!", + importErrTip: "Files upload failed!", + name: "Name", + id: "ID", + status: "Status", + operation: "Operation", + deactivateTip: "Are you sure deactivate this pipeline?", + activeTip: "Are you sure activate this pipeline?", + deleteTip: "Are you sure delete this pipeline?", + notActivatedTip: "There is no available pipeline. Please create or activate it first.", + validErr: "Form validation failed !", + config: { + basic: "Basic", + nodeParser: "Node Parser", + nodeParserType: "Node parser type", + chunkSize: "Chunk size", + chunkOverlap: "Chunk overlap", + windowSize: "Window Size", + indexer: "Indexer", + indexerType: "Indexer Type", + embedding: "Embedding Model", + embeddingDevice: "Embedding run device", + retriever: "Retriever", + retrieverType: "Retriever Type", + topk: "Search top k", + postProcessor: "PostProcessor", + postProcessorType: "PostProcessor Type", + rerank: "Rerank Model", + rerankDevice: "Rerank run device", + generator: "Generator", + generatorType: "Generator Type", + llm: "LLM Inference Type", + language: "Large Language Model", + llmDevice: "LLM run device", + weights: "Weights", + local: "Local", + vllm: "Vllm", + }, + valid: { + nameValid1: "Please input name", + nameValid2: "Name should be between 2 and 30 characters", + nodeParserType: "Please select Node Parser Type", + chunkSizeValid1: "Please select Chunk Size", + chunkSizeValid2: "The value of Chunk Size cannot be less than Chunk Overlap", + chunkOverlapValid1: "Please select Chunk Overlap", + chunkOverlapValid2: "The value of Chunk Overlap cannot be greater than Chunk Size", + windowSize: "Please select Chunk Window Size", + indexerType: "Please select Indexer Type", + embedding: "Please select Embedding Model", + embeddingDevice: "Please select Embedding run device", + retrieverType: "Please select Retriever Type", + topk: "Please select Top k", + postProcessorType: "Please select PostProcessor Type", + rerank: "Please select Rerank Model", + rerankDevice: "Please select Rerank run device", + generatorType: "please select Generator Type", + language: "please select Large Language Model", + llmDevice: "please select LLM run device", + weights: "please select Weights", + }, + desc: { + name: "The name identifier of the pipeline", + nodeParserType: "Node parsing type when you use RAG", + chunkSize: "Size of each chunk for processing", + chunkOverlap: "Overlap size between chunks", + windowSize: "The number of sentences on each side of a sentence to capture", + indexerType: "The type of index structure responsible for building based on the parsed nodes", + embedding: "Embed the text data to represent it and build a vector index", + embeddingDevice: "The device used by the Embedding Model", + retrieverType: + "The retrieval type used when retrieving relevant nodes from the index according to the user's query", + topk: "The number of top k results to return", + postProcessorType: "Select postprocessors for post-processing of the context", + rerank: "Rerank Model", + rerankDevice: "Rerank run device", + generatorType: "Local inference generator or vllm generator", + language: "The large model used for generating dialogues", + llmDevice: "The device used by the LLM", + weights: "Model weight", + reranker: "The model for reranking.", + metadataReplace: "Used to replace the node content with a field from the node metadata.", + vectorsimilarity: "retrieval according to vector similarity", + autoMerge: "This retriever will try to merge context into parent context.", + bm25: "A BM25 retriever that uses the BM25 algorithm to retrieve nodes.", + faissVector: "Embeddings are stored within a Faiss index.", + vector: "Vector Store Index.", + simple: "Parse text with a preference for complete sentences.", + hierarchical: "Splits a document into a recursive hierarchy Nodes using a NodeParser.", + sentencewindow: + "Sentence window node parser. Splits a document into Nodes, with each node being a sentence. Each node contains a window from the surrounding sentences in the metadata.", + unstructured: "UnstructedNodeParser is a component that processes unstructured data.", + }, + }, + generation: { + title: "Generation Configuration", + retriever: "Retriever Configuration", + config: { + top_n: "Rerank top n", + temperature: "Temperature", + top_p: "Top-p (nucleus sampling)", + top_k: "Top-k", + penalty: "Repetition Penalty", + maxToken: "Max Token Number", + }, + desc: { + top_n: "Number of rerank results", + temperature: "Higher values produce more diverse outputs", + top_p: + "Sample from the smallest possible set of tokens whose cumulative probability exceeds top_p. Set to 1 to disable and sample from all tokens.", + top_k: `Sample from a shortlist of top-k tokens — 0 to + disable and sample from all tokens.`, + penalty: "Penalize repetition — 1.0 to disable.", + maxToken: "Set Max Output Token.", + }, + }, + chat: { + title: "Chat", + tip1: "Hi, I'm EC RAG ", + tip2: "How can I help you today?", + tip3: "Choosing the right knowledge base can help AI answer questions more accurately", + tip4: "Please enter your question...", + new: "New Chat", + rag: "EC RAG", + setting: "Pipeline Setting", + clear: "Clear Message", + }, + knowledge: { + title: "Knowledge Base", + total: "Total files: ", + upload: "Upload File", + create: "Create Knowledge Base", + edit: "Edit Knowledge Base", + deleteTip: "Are you sure delete this knowledge base?", + activeTip: "Are you sure activate this knowledge base?", + uploadTip: "Supports PDF, Word, TXT,Doc,Html,PPT formats, with a single file size not exceeding 200M", + notFileTip: "The knowledge base is empty. Go upload your files.", + name: "Name", + des: "Description", + activated: "Activated", + nameValid1: "Please input knowledge base name", + nameValid2: "Name should be between 2 and 30 characters", + desValid: "Please input knowledge base description", + activeValid: "Please select whether to activate", + uploadValid: "Single file size not exceeding 50M.", + deleteFileTip: "Are you sure delete this file?", + }, + request: { + pipeline: { + createSucc: "Pipeline created successfully !", + updateSucc: "Pipeline update successfully !", + deleteSucc: "Pipeline deleted successfully !", + switchSucc: "Pipeline state switch successful !", + }, + chatbot: { + updateSucc: "Configuration update successful !", + }, + knowledge: { + uploadSucc: "Document uploaded and parsed successfully !", + deleteFileSucc: "File deleted successfully !", + createSucc: "Knowledge Base created successfully !", + updateSucc: "Knowledge Base update successfully !", + deleteSucc: "Knowledge Base deleted successfully !", + }, + }, + error: { + notFoundTip: "Uh oh! It seems like you're lost", + back: "Go Home", }, }; diff --git a/EdgeCraftRAG/ui/vue/src/i18n/index.ts b/EdgeCraftRAG/ui/vue/src/i18n/index.ts index 6d2fab0c57..ac93f5bf47 100644 --- a/EdgeCraftRAG/ui/vue/src/i18n/index.ts +++ b/EdgeCraftRAG/ui/vue/src/i18n/index.ts @@ -9,13 +9,13 @@ import en from "./en"; import zh from "./zh"; const messages = { - "en-US": { ...en, ...enLocale }, - "zh-CN": { ...zh, ...zhLocale }, + en_US: { ...en, ...enLocale }, + zh_CN: { ...zh, ...zhLocale }, }; const i18n = createI18n({ - locale: Local.get("lang") || "en-US", - fallbackLocale: "en-US", + locale: Local.get("themeInfo")?.lang || "en_US", + fallbackLocale: "en_US", messages, }); diff --git a/EdgeCraftRAG/ui/vue/src/i18n/zh.ts b/EdgeCraftRAG/ui/vue/src/i18n/zh.ts index 39be2d81aa..a96515b7a3 100644 --- a/EdgeCraftRAG/ui/vue/src/i18n/zh.ts +++ b/EdgeCraftRAG/ui/vue/src/i18n/zh.ts @@ -4,23 +4,225 @@ export default { headerTitle: "Edge Craft RAG based Q&A Chatbot", common: { - update: "Update", - edit: "Edit", - delete: "Delete", - active: "Activate", - deactivate: "Deactivate", - cancel: "Cancel", - confirm: "Confirm", - save: "Save", - }, - setting: { - systemStatus: "System Status", - pipelines: "Pipelines", - create: "Create Pipeline", - config: "Configuration Log", - jump: "Go to chatbot", - used: "Used", - notUsed: "Not Used", - import: "Import Pipeline", + update: "更新", + edit: "编辑", + add: "新增", + delete: "删除", + active: "启用", + deactivate: "停用", + cancel: "取消", + confirm: "确认", + save: "保存", + back: "返回", + next: "下一步", + submit: "提交", + prompt: "提示", + import: "导入", + chatbot: "对话", + jump: "前往对话", + success: "成功", + error: "错误", + clear: "清除", + uploadTip: "点击或将文件拖到此区域进行上传", + }, + system: { + title: "系统状态", + cpu: "CPU使用率", + gpu: "GPU使用率", + disk: "磁盘使用率", + memory: "内存使用率", + used: "已使用", + notUsed: "未使用", + info: "系统信息", + kernel: "内核", + processor: "处理器", + os: "操作系统", + time: "当前时间", + }, + quickStart: { + title: "快速开始", + first: "步骤1", + second: "步骤2", + step1: "创建 Pipeline", + step1Tip: "定制您的 RAG 流程,释放 AI 信息处理的最大能力。", + step2: "前往对话", + step2Tip: "开始与智能聊天机器人互动,它支持文件上传和信息检索,帮助您更高效地完成任务。", + create: "去创建", + }, + pipeline: { + pipelines: "Pipeline", + create: "创建 Pipeline", + edit: "编辑 Pipeline", + detail: "Pipeline 详情", + import: "导入Pipeline", + isActive: "状态", + activated: "已启用", + inactive: "已停用", + pipelineFormatTip: "仅支持JSON格式,文件大小不超过10M", + importSuccTip: "Pipeline 配置导入成功!", + importErrTip: "Pipeline 配置导入失败!", + name: "名称", + id: "ID", + status: "状态", + operation: "操作", + deactivateTip: "您确定要停用该Pipeline吗?", + activeTip: "您确定要启用该Pipeline吗?", + deleteTip: "您确定要删除此Pipeline吗 ?此操作不可恢复。", + notActivatedTip: "当前无可用Pipeline,请先配置或激活。", + validErr: "表单验证失败!", + + config: { + basic: "基础", + nodeParser: "节点解析器", + nodeParserType: "节点解析器类型", + chunkSize: "分块大小", + chunkOverlap: "分块重叠部分大小", + windowSize: "句子上下文窗口大小", + indexer: "索引器", + indexerType: "索引器类型", + embedding: "嵌入模型", + embeddingDevice: "模型运行设备", + retriever: "检索器", + retrieverType: "检索器类型", + topk: "检索 top k", + postProcessor: "节点后处理器", + postProcessorType: "节点后处理器类型", + rerank: "重排模型", + rerankDevice: "模型运行设备", + generator: "生成器", + generatorType: "生成器类型", + llm: "推理类型", + language: "语言大模型", + llmDevice: "运行设备", + weights: "权重", + local: "本地", + vllm: "Vllm", + }, + valid: { + nameValid1: "请输入名称", + nameValid2: "请输入 2 到 30 个字符的名称", + nodeParserType: "请选择节点解析器类型", + chunkSizeValid1: "请选择分块大小", + chunkSizeValid2: "分块大小的值不能小于分块重叠值", + chunkOverlapValid1: "请选择分块重叠值", + chunkOverlapValid2: "分块重叠值不能大于分块大小的值", + windowSize: "请选择句子上下文窗口大小", + indexerType: "请选择索引器类型", + embedding: "请选择嵌入模型", + embeddingDevice: "请选择嵌入模型运行设备", + retrieverType: "请选择检索器类型", + topk: "请选择Top k", + postProcessorType: "请选择后处理器类型", + rerank: "请选择重排模型", + rerankDevice: "请选择重排模型运行设备", + generatorType: "请选择生成器类型", + language: "请选择大语言模型", + llmDevice: "请选择大语言模型运行设备", + weights: "请选择模型权重", + }, + desc: { + name: "Pipeline的名称标识,用于区分不同工作流", + nodeParserType: "RAG 处理时的文本拆分策略,支持简单句子、层次结构等解析方式", + chunkSize: "文本处理时的单块数据大小", + chunkOverlap: "相邻数据块的重叠部分大小,确保跨块语义连续性", + windowSize: "每个节点捕获的上下文句子窗口大小,用于增强语义完整性", + indexerType: "基于解析节点构建的索引结构类型", + embedding: "将文本转换为向量表示的过程", + embeddingDevice: "执行嵌入模型推理的硬件设备(CPU/GPU)", + retrieverType: "根据用户查询从索引中检索节点的算法类型", + topk: "检索时返回的最相关结果数量", + postProcessorType: "对检索结果进行后处理的组件类型", + rerank: "对检索结果进行二次排序的模型,提升答案相关性", + rerankDevice: "执行重排模型推理的硬件设备(CPU/GPU)", + generatorType: "回答生成方式的类型(本地部署模型或 vllm 高效推理框架)", + language: "用于生成自然语言回答的大模型(如 LLaMA、ChatGLM)", + llmDevice: "大语言模型推理的硬件设备(需匹配模型规模要求)", + weights: "大模型的权重", + reranker: "重排序的模型", + metadataReplace: "用来将节点元数据中的字段替换节点内容", + vectorsimilarity: "根据向量相似性进行检索", + autoMerge: "该检索器会尝试将上下文合并到父级上下文中", + bm25: "使用BM25算法检索节点的BM25检索器", + faissVector: "嵌入存储在Faiss索引中。", + vector: "矢量存储索引", + simple: "解析文本,优先选择完整的句子。", + hierarchical: "使用借点解析将文档分割成递归层次节点", + sentencewindow: "将文档分割成节点,每个节点代表一个句子。每个节点包含一个来自元数据中周围句子的窗口", + unstructured: "一个处理非结构化数据的组件", + }, + }, + generation: { + title: "生成器配置", + retriever: "检索器配置", + config: { + top_n: "Rerank top n", + temperature: "Temperature", + top_p: "Top-p (nucleus sampling)", + top_k: "Top-k", + penalty: "Repetition Penalty", + maxToken: "Max Token Number", + }, + desc: { + top_n: "重排后结果的数量", + temperature: "数值越高,输出越多样化", + top_p: "从累积概率超过 top_p 的最小标记集中采样,设为1则禁用并从所有标记取样。", + top_k: "从概率前k的 Token 中采样", + penalty: "抑制重复的系数,设为1.0表示禁用", + maxToken: "生成回答的最大Token数量", + }, + }, + + chat: { + title: "对话", + tip1: "您好,我是 EC RAG", + tip2: "请问需要什么帮助?", + tip3: "合理选择知识库有助于提升人工智能在问答任务中的准确性。", + tip4: "有什么问题?请在这里输入...", + new: "开启新对话", + rag: "EC RAG", + setting: "Pipeline 配置", + clear: "清除消息", + }, + knowledge: { + title: "知识库", + total: "文档总数: ", + upload: "上 传", + create: "新建知识库", + edit: "编辑知识库", + deleteTip: "您确定要删除此知识库吗?此操作不可恢复。", + activeTip: "您确定要激活此知识库吗?", + uploadTip: "支持 PDF、Word、TXT、Doc、HTML、PPT 格式,单个文件大小不超过 200M。", + notFileTip: "您还没有上传任何文件,点击“上传”按钮开始添加内容吧~", + name: "名称", + des: "描述", + activated: "激活状态", + nameValid1: "请输入知识库名称", + nameValid2: "请输入 2 到 30 个字符的名称", + desValid: "请输入知识库描述", + activeValid: "请选择是否启用该功能。", + uploadValid: "单个文件大小不得超过 50MB", + deleteFileTip: "您确定要删除此文档吗?此操作不可恢复。", + }, + request: { + pipeline: { + createSucc: "Pipeline 创建成功!", + updateSucc: "Pipeline 更新成功!", + deleteSucc: "Pipeline 删除成功!", + switchSucc: "Pipeline 状态切换成功!", + }, + chatbot: { + updateSucc: "配置更新成功!", + }, + knowledge: { + uploadSucc: "文档上传成功,内容已解析完毕。", + deleteFileSucc: "文档删除成功!", + createSucc: "知识库创建成功!", + updateSucc: "知识库更新成功!", + deleteSucc: " 知识库删除成功!", + }, + }, + error: { + notFoundTip: "Oops 好像走错地方啦~", + back: "首页", }, }; diff --git a/EdgeCraftRAG/ui/vue/src/layout/Header.vue b/EdgeCraftRAG/ui/vue/src/layout/Header.vue index c8cd68d69a..9b42e62194 100644 --- a/EdgeCraftRAG/ui/vue/src/layout/Header.vue +++ b/EdgeCraftRAG/ui/vue/src/layout/Header.vue @@ -4,10 +4,35 @@

    {{ $t("headerTitle") }}

    -
    -
    - Sun - Moon +
    + +
    +
    + +
    +
    + +
    +
    +
    + Sun + Moon +
    @@ -17,12 +42,20 @@ import DarkIcon from "@/assets/svgs/dark-icon.svg"; import headerLog from "@/assets/svgs/header-log.svg"; import LightIcon from "@/assets/svgs/light-icon.svg"; +import SvgIcon from "@/components/SvgIcon.vue"; import { themeAppStore } from "@/store/theme"; +import { useI18n } from "vue-i18n"; +const { locale } = useI18n(); const themeStore = themeAppStore(); const emit = defineEmits(["change-theme"]); const isDark = ref(false); +const currentLanguage = computed(() => locale.value); +const handleLanguageChange = ({ key }: { key: string }) => { + locale.value = key; + themeStore.toggleLanguage(key); +}; const handleThemeChange = () => { isDark.value = !isDark.value; const theme = isDark.value ? "dark" : "light"; @@ -36,9 +69,20 @@ onMounted(() => { diff --git a/EdgeCraftRAG/ui/vue/src/layout/Main.vue b/EdgeCraftRAG/ui/vue/src/layout/Main.vue index 8fde16cd9c..8c2147cc3f 100644 --- a/EdgeCraftRAG/ui/vue/src/layout/Main.vue +++ b/EdgeCraftRAG/ui/vue/src/layout/Main.vue @@ -3,13 +3,42 @@
    - + - \ No newline at end of file + diff --git a/EdgeCraftRAG/ui/vue/src/main.ts b/EdgeCraftRAG/ui/vue/src/main.ts index 180b48b4df..25e2b930cf 100644 --- a/EdgeCraftRAG/ui/vue/src/main.ts +++ b/EdgeCraftRAG/ui/vue/src/main.ts @@ -16,7 +16,7 @@ import { Local } from "@/utils/storage"; // setting dayjs language const setDayjsLocale = (locale: string) => { - if (locale === "en-US") { + if (locale === "en_US") { dayjs.locale("en"); } else { dayjs.locale("zh-cn"); diff --git a/EdgeCraftRAG/ui/vue/src/store/theme.ts b/EdgeCraftRAG/ui/vue/src/store/theme.ts index 9f99b7c531..072d41d975 100644 --- a/EdgeCraftRAG/ui/vue/src/store/theme.ts +++ b/EdgeCraftRAG/ui/vue/src/store/theme.ts @@ -6,6 +6,7 @@ import { defineStore } from "pinia"; export const themeAppStore = defineStore("theme", { state: () => ({ theme: "light", + lang: "en_US", }), persist: { key: "themeInfo", @@ -15,5 +16,8 @@ export const themeAppStore = defineStore("theme", { toggleTheme(type: string) { this.theme = type; }, + toggleLanguage(lang: string) { + this.lang = lang; + }, }, }); diff --git a/EdgeCraftRAG/ui/vue/src/theme/ant.less b/EdgeCraftRAG/ui/vue/src/theme/ant.less index 89f82ed7bf..c2427cb257 100644 --- a/EdgeCraftRAG/ui/vue/src/theme/ant.less +++ b/EdgeCraftRAG/ui/vue/src/theme/ant.less @@ -154,3 +154,6 @@ margin: 12px auto; } } +.intel-tooltip { + max-width: 60vw; +} diff --git a/EdgeCraftRAG/ui/vue/src/theme/common.less b/EdgeCraftRAG/ui/vue/src/theme/common.less index 5fd7f82488..5aa87f8500 100644 --- a/EdgeCraftRAG/ui/vue/src/theme/common.less +++ b/EdgeCraftRAG/ui/vue/src/theme/common.less @@ -11,11 +11,82 @@ align-items: center; justify-content: space-between; } +.flex-column { + display: flex; + flex-direction: column; +} +.flex-end { + display: flex; + align-items: center; + justify-content: flex-end; +} .vertical-center { display: flex; align-items: center; justify-content: center; } +.vertical-between { + display: flex; + flex-direction: column; + justify-content: space-between; + align-items: center; +} +.single-ellipsis { + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; +} +.multi-ellipsis { + display: -webkit-box; + -webkit-line-clamp: 2; + -webkit-box-orient: vertical; + overflow: hidden; + text-overflow: ellipsis; +} +.icon-button { + width: 24px; + height: 24px; + font-size: 18px; + border-radius: 4px; + border: 1px solid var(--font-tip-color); + .vertical-center; + &.primary { + i { + color: var(--color-primary) !important; + } + &:hover { + background-color: var(--color-primaryBg); + border: 1px solid var(--color-primary); + } + } + &.success { + i { + color: var(--color-success) !important; + } + &:hover { + background-color: var(--color-successBg); + border: 1px solid var(--color-success); + } + } + &.warning { + i { + color: var(--color-warning) !important; + } + &:hover { + background-color: var(--color-warningBg); + border: 1px solid var(--color-warning); + } + } + &.error { + i { + color: var(--color-error) !important; + } + &:hover { + background-color: var(--color-errorBg); + border: 1px solid var(--color-error); + } + } +} .special-button-primary { background: linear-gradient( to bottom, @@ -106,6 +177,12 @@ padding: 16px 24px 24px !important; } } +.rename-dialog { + overflow: hidden; + .intel-modal-body { + padding: 16px 24px 0 !important; + } +} .centered-model { .ant-modal-confirm-btns { text-align: center; @@ -177,3 +254,6 @@ } .loopStyle(100); +.intel-dropdown { + z-index: 9999; +} diff --git a/EdgeCraftRAG/ui/vue/src/theme/layout.less b/EdgeCraftRAG/ui/vue/src/theme/layout.less index d27f45344d..3f8d58d5c0 100644 --- a/EdgeCraftRAG/ui/vue/src/theme/layout.less +++ b/EdgeCraftRAG/ui/vue/src/theme/layout.less @@ -82,18 +82,18 @@ div[aria-hidden="true"] { // 滚动条凹槽的颜色,还可以设置边框属性 &::-webkit-scrollbar-track-piece { - background-color: var(--bg-content-color); + background-color: var(--bg-scrollbar); } // 滚动条的宽度 &::-webkit-scrollbar { - width: 9px; - height: 9px; + width: 7px; + height: 7px; } // 滚动条的设置 &::-webkit-scrollbar-thumb { - background-color: var(--bg-scrollbar); + background-color: var(--color-scrollbar); background-clip: padding-box; min-height: 28px; border-radius: 3px; diff --git a/EdgeCraftRAG/ui/vue/src/theme/markdown.less b/EdgeCraftRAG/ui/vue/src/theme/markdown.less index 918650ff33..2a497ecac3 100644 --- a/EdgeCraftRAG/ui/vue/src/theme/markdown.less +++ b/EdgeCraftRAG/ui/vue/src/theme/markdown.less @@ -12,13 +12,16 @@ } code, tt { - background-color: var(--color-primaryBg); + background-color: var(--message-bg); border-radius: 6px; font-size: 85%; margin: 0; padding: 0.2em 0.4em; white-space: break-spaces; } + pre { + margin-bottom: 0; + } h1, h2, h3, @@ -69,4 +72,79 @@ img[align="left"] { padding-right: 20px; } + table { + width: 100%; + margin-top: 16px; + border: 1px solid var(--border-main-color); + border-collapse: collapse; + color: var(--font-main-color); + } + table th { + background-color: var(--table-th-bg); + font-weight: 600; + text-align: left; + } + + table td, + table th { + border: 1px solid var(--border-main-color); + padding: 6px 13px; + margin: 0; + } + table td { + background-color: var(--table-td-bg); + } + table td > :last-child { + margin-bottom: 0; + } + + table tr { + font-size: 14px; + border-top: 1px solid var(--border-main-color); + } + + blockquote { + color: var(--blockquote-color); + border-left: 3px solid var(--color-scrollbar); + margin: 8px 0px; + padding: 0px 10px; + } + .intel-highlighter { + width: 100%; + border-radius: 4px; + overflow: hidden; + margin-top: 16px; + margin-bottom: 12px; + .header-wrap { + align-items: center; + background-color: var(--code-header-bg); + color: var(--code-header-font); + display: flex; + align-items: center; + justify-content: space-between; + font-size: 14px; + height: 32px; + padding: 0 14px; + .copy-icon { + display: block; + color: var(--font-main-color); + cursor: pointer; + &:hover { + color: var(--color-primary); + } + } + .success-icon { + display: none; + color: var(--color-success); + } + } + .content-wrap { + overflow-x: auto; + background: var(--code-content-bg); + padding: 16px; + font-size: 13px; + margin-top: 0; + color: var(--font-main-color); + } + } } diff --git a/EdgeCraftRAG/ui/vue/src/theme/variables.less b/EdgeCraftRAG/ui/vue/src/theme/variables.less index e8914623af..42c05d8feb 100644 --- a/EdgeCraftRAG/ui/vue/src/theme/variables.less +++ b/EdgeCraftRAG/ui/vue/src/theme/variables.less @@ -3,43 +3,71 @@ sans-serif; --header-font-family: "IntelOneDisplayNormal"; --color-white: #ffffff; + --color-fuzzy-white: rgba(255, 255, 255, 0.7); --color-primary: #00377c; --color-primary-hover: #0054ae; + --color-primary-tip: #3b82f6; + --color-primary-second: #1677ff; --color-primaryBg: #e0eaff; + --color-second-primaryBg: #d4e1fd; + --message-bg: var(--color-second-primaryBg); --color-error: #ce0000; --color-error-hover: #ff5d52; + --color-errorBg: #ffa3a3; --color-info: #aaaaaa; + --color-infoBg: #ffffff; --color-success: #179958; - --color-warning: #faad14; - --color-big-icon: #111111; --color-successBg: #d6ffe8; + --color-second-successBg: #f0fdf4; + --color-warning: #faad14; + --color-second-warning: #854d0e; --color-warningBg: #feefd0; - --color-errorBg: #ffa3a3; - --color-infoBg: #ffffff; + --color-second-warningBg: #fefce8; + --color-big-icon: #111111; --bg-main-color: #f5f5f5; --bg-card-color: #f9f9f9; + --bg-second-card-bg: var(--color-white); --bg-loading-color: rgba(0, 0, 0, 0.45); --bg-content-color: var(--color-white); --font-main-color: #333333; --font-text-color: #595959; --font-info-color: #808080; --font-tip-color: #999999; - --bg-scrollbar: #dddddd; + --bg-scrollbar: #f5f5f5; + --color-scrollbar: #d5d5d5; --bg-scrollbar-hover: #bbbbbb; --bg-box-shadow: rgba(0, 0, 0, 0.05); + --bg-primary-shadow: rgba(0, 55, 124, 0.3); --bg-gradient-shadow: 0px 4px 6px -4px rgba(0, 0, 0, 0.1), 0px 10px 15px -3px rgba(0, 0, 0, 0.1); --menu-bg: var(--bg-main-color); + --input-bg: var(--color-white); --color-switch-theme: #e5e7eb; - + --think-done-icon: #356bfd; + --think-done-bg: linear-gradient(180deg, #f3f5fc 30%, #ffffff 100%); + --font-think-color: #5e5e5e; + --face-icon-bg: #a6a6a6; + --bg-switch: var(--color-primaryBg); //边框 --border-main-color: #e5e7eb; + --border-fuzzy-color: rgba(255, 255, 255, 0.1); + --border-warning: var(--color-warningBg); + --border-success: var(--color-successBg); + --border-primary: var(--color-second-primaryBg); //黑色按钮 --bg-black-color: #434343; --bg-black-hover-color: #595959; --bg-black-active-color: #262626; --border-black-color: #434343; + + //md显示 + --code-header-bg: #dddddd; + --code-header-font: #2c2c36; + --code-content-bg: #f0f0f0; + --table-th-bg: #dddddd; + --table-td-bg: #f0f0f0; + --blockquote-color: var(--font-info-color); } [data-theme="dark"] { @@ -51,26 +79,46 @@ --font-text-color: #e9e9e9; --font-info-color: #aeaeae; --font-tip-color: #aeaeae; - --bg-scrollbar: #dddddd; - --bg-scrollbar-hover: #bbbbbb; + --bg-scrollbar: #1e1e1e; + --color-scrollbar: #565656; + --bg-scrollbar-hover: #666666; --color-primary: #0054ae; --color-primary-hover: #1668dc; - --color-primaryBg: #95b5fa; - --bg-box-shadow: rgba(109, 153, 233, 0.05); + --color-primaryBg: #e0eaff; + --color-primary-second: #1677ff; + --color-second-primaryBg: #d4e1fd; + --bg-box-shadow: rgba(255, 255, 255, 0.1); --bg-gradient-shadow: 0px 4px 6px -4px rgba(255, 255, 255, 0.1), 0px 5px 8px 1px rgba(255, 255, 255, 0.1); --menu-bg: #3e3e3e; --color-big-icon: #ffffff; + --bg-second-card-bg: #111111; --color-switch-theme: var(--color-primary-hover); + --think-done-bg: linear-gradient(180deg, #32313a 30%, #2d2d2d 100%); + --font-think-color: #e0ecffcc; + --bg-switch: var(--bg-card-color); + --input-bg: var(--menu-bg); + --message-bg: var(--bg-card-color); //边框 - --border-main-color: #2b2b2b; + --border-main-color: #3b3b3b; + --border-warning: #f8e9ca; + --border-success: #d7f8e8; + --border-primary: #d5daf8; //黑色按钮 --bg-black-color: #434343; --bg-black-hover-color: #595959; --bg-black-active-color: #262626; --border-black-color: #434343; + + //md显示 + --code-header-bg: #585a73; + --code-header-font: #fafafc; + --code-content-bg: #2c2c36; + --table-th-bg: #585a73; + --table-td-bg: #2c2c36; + --blockquote-color: var(--bg-scrollbar-hover); } @use "ant-design-vue/es/style/themes/default.less"; diff --git a/EdgeCraftRAG/ui/vue/src/utils/common.ts b/EdgeCraftRAG/ui/vue/src/utils/common.ts index efdf142be9..e78100a06b 100644 --- a/EdgeCraftRAG/ui/vue/src/utils/common.ts +++ b/EdgeCraftRAG/ui/vue/src/utils/common.ts @@ -3,6 +3,7 @@ import { inject } from "vue"; import { customNotification } from "./notification"; +import { Local } from "./storage"; export const useNotification = () => { const customNotificationInjected = inject("customNotification"); @@ -19,3 +20,28 @@ export const formatDecimals = (num: number, decimalPlaces: number = 2) => { const factor = Math.pow(10, decimalPlaces); return Math.round(num * factor) / factor; }; + +export const formatCapitalize = (string: string, start: number = 0, length: number = 1) => { + const end = start + length; + const part1 = string.slice(0, start); + const part2 = string.slice(start, end).toUpperCase(); + const part3 = string.slice(end); + return part1 + part2 + part3; +}; + +export const getChatSessionId = (): string => { + const STORAGE_KEY = "chat_session_id"; + + const storedSessionId = Local.get(STORAGE_KEY); + if (storedSessionId) { + return storedSessionId; + } + const newSessionId = self.crypto?.randomUUID?.() || generateFallbackId(); + + Local.set(STORAGE_KEY, newSessionId); + return newSessionId; +}; + +const generateFallbackId = (): string => { + return `${Date.now()}_${Math.random().toString(36).substring(2, 9)}`; +}; diff --git a/EdgeCraftRAG/ui/vue/src/utils/customRenderer.ts b/EdgeCraftRAG/ui/vue/src/utils/customRenderer.ts new file mode 100644 index 0000000000..5a19ade40a --- /dev/null +++ b/EdgeCraftRAG/ui/vue/src/utils/customRenderer.ts @@ -0,0 +1,128 @@ +// Copyright (C) 2025 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 + +import { marked } from "marked"; +import hljs from "highlight.js"; +import { formatCapitalize } from "./common"; +import ClipboardJS from "clipboard"; +import { message } from "ant-design-vue"; + +interface CodeRenderParams { + text: string; + lang?: string; +} + +class ClipboardManager { + private clipboard: ClipboardJS | null = null; + private observer: MutationObserver | null = null; + + constructor() { + this.autoInit(); + } + + private autoInit() { + if (typeof document === "undefined") return; + const init = () => { + this.init(".copy-btn"); + this.setupMutationObserver(); + }; + + if (document.readyState === "complete") { + init(); + } else { + document.addEventListener("DOMContentLoaded", init); + } + } + + private init(selector: string) { + this.destroy(); + + this.clipboard = new ClipboardJS(selector, { container: document.body }); + + this.clipboard.on("success", (e) => this.handleSuccess(e)); + this.clipboard.on("error", (e) => this.handleError(e)); + } + + private setupMutationObserver() { + this.observer = new MutationObserver((mutations) => { + const hasNewButtons = mutations.some((mutation) => + Array.from(mutation.addedNodes).some( + (node) => node instanceof HTMLElement && (node.matches(".copy-btn") || node.querySelector(".copy-btn")), + ), + ); + if (hasNewButtons) this.init(".copy-btn"); + }); + + this.observer.observe(document.body, { + childList: true, + subtree: true, + }); + } + + destroy() { + this.clipboard?.destroy(); + this.observer?.disconnect(); + this.clipboard = null; + this.observer = null; + } + + private handleSuccess(e: ClipboardJS.Event) { + e.clearSelection(); + message.success("Copy Successful !"); + const button = e.trigger as HTMLElement; + const copyIcon = button.querySelector(".copy-icon") as HTMLElement; + const successIcon = button.querySelector(".success-icon") as HTMLElement; + + copyIcon.style.display = "none"; + successIcon.style.display = "block"; + + let timeout = null; + if (timeout) clearTimeout(timeout); + + timeout = setTimeout(() => { + copyIcon.style.display = "block"; + successIcon.style.display = "none"; + }, 2000); + } + + private handleError(e: ClipboardJS.Event) { + message.error("Copy Failure !"); + } +} + +export const clipboardManager = new ClipboardManager(); + +const createCustomRenderer = () => { + const renderer = new marked.Renderer(); + + renderer.link = ({ href, title, text }) => { + return `${text}`; + }; + + renderer.code = ({ text, lang }: CodeRenderParams) => { + const language = hljs.getLanguage(lang || "") ? lang : "plaintext"; + const codeTitle = formatCapitalize(language || "Code"); + const codeHtml = hljs.highlight(text, { + language: language || "plaintext", + }).value; + const uniqueId = `code-${Date.now()}-${Math.random().toString(16).slice(2)}`; + + return ` +
    +
    + ${codeTitle} + + + + +
    +
    ${codeHtml}
    +
    + `; + }; + + return renderer; +}; + +const CustomRenderer = createCustomRenderer(); +export default CustomRenderer; diff --git a/EdgeCraftRAG/ui/vue/src/utils/mitt.ts b/EdgeCraftRAG/ui/vue/src/utils/mitt.ts new file mode 100644 index 0000000000..f512797f56 --- /dev/null +++ b/EdgeCraftRAG/ui/vue/src/utils/mitt.ts @@ -0,0 +1,8 @@ +// Copyright (C) 2025 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 + +import mitt from "mitt"; + +const eventBus = mitt(); + +export default eventBus; diff --git a/EdgeCraftRAG/ui/vue/src/views/chatbot/components/Chatbot.vue b/EdgeCraftRAG/ui/vue/src/views/chatbot/components/Chatbot.vue deleted file mode 100644 index 34464506bd..0000000000 --- a/EdgeCraftRAG/ui/vue/src/views/chatbot/components/Chatbot.vue +++ /dev/null @@ -1,275 +0,0 @@ - - - - diff --git a/EdgeCraftRAG/ui/vue/src/views/chatbot/components/Chatbot/Chat.vue b/EdgeCraftRAG/ui/vue/src/views/chatbot/components/Chatbot/Chat.vue new file mode 100644 index 0000000000..8fe8bc6f83 --- /dev/null +++ b/EdgeCraftRAG/ui/vue/src/views/chatbot/components/Chatbot/Chat.vue @@ -0,0 +1,376 @@ + + + + diff --git a/EdgeCraftRAG/ui/vue/src/views/chatbot/components/ConfigDrawer.vue b/EdgeCraftRAG/ui/vue/src/views/chatbot/components/Chatbot/ConfigDrawer.vue similarity index 75% rename from EdgeCraftRAG/ui/vue/src/views/chatbot/components/ConfigDrawer.vue rename to EdgeCraftRAG/ui/vue/src/views/chatbot/components/Chatbot/ConfigDrawer.vue index 635058ed6e..732818c1c1 100644 --- a/EdgeCraftRAG/ui/vue/src/views/chatbot/components/ConfigDrawer.vue +++ b/EdgeCraftRAG/ui/vue/src/views/chatbot/components/Chatbot/ConfigDrawer.vue @@ -1,7 +1,7 @@ @@ -118,7 +131,7 @@ import { InfoCircleFilled } from "@ant-design/icons-vue"; import { FormInstance } from "ant-design-vue"; import { reactive, ref } from "vue"; -import { ConfigType } from "../type"; +import { ConfigType } from "../../type"; const props = defineProps({ drawerData: { @@ -162,7 +175,7 @@ const rules = reactive({ const sliderMarks = reactive({ top_n: { 1: "1", - 10: "10", + 30: "30", }, temperature: { 0: "0", @@ -178,7 +191,7 @@ const sliderMarks = reactive({ }, max_tokens: { 1: "1", - 8192: "8192", + 10240: "10240", }, }); const handleClose = () => { diff --git a/EdgeCraftRAG/ui/vue/src/views/chatbot/components/MessageItem.vue b/EdgeCraftRAG/ui/vue/src/views/chatbot/components/Chatbot/MessageItem.vue similarity index 77% rename from EdgeCraftRAG/ui/vue/src/views/chatbot/components/MessageItem.vue rename to EdgeCraftRAG/ui/vue/src/views/chatbot/components/Chatbot/MessageItem.vue index e1a828c165..03b64c53b0 100644 --- a/EdgeCraftRAG/ui/vue/src/views/chatbot/components/MessageItem.vue +++ b/EdgeCraftRAG/ui/vue/src/views/chatbot/components/Chatbot/MessageItem.vue @@ -1,9 +1,13 @@