Skip to content

Commit

Permalink
Merge pull request #33 from beclab/feat/bertv3-embedding
Browse files Browse the repository at this point in the history
feat: add log for MODEL_INTERNET env
  • Loading branch information
bleachzou3 authored Oct 15, 2024
2 parents f3ad9ca + 7fb174b commit ffcfc25
Show file tree
Hide file tree
Showing 3 changed files with 4 additions and 4 deletions.
2 changes: 1 addition & 1 deletion Dockerfile.r4userembedding
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ RUN cargo build --release
FROM ubuntu:jammy

# Import from builder.
ENV MODEL_INTERNET = "true"
ENV MODEL_INTERNET="true"

WORKDIR /userembedding

Expand Down
2 changes: 1 addition & 1 deletion Dockerfile.r4userembeddingwithmodel
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ RUN /userembedding/target/release/downloadmodel
FROM ubuntu:jammy

# Import from builder.
ENV MODEL_INTERNET = "false"
ENV MODEL_INTERNET="false"


WORKDIR /userembedding
Expand Down
4 changes: 2 additions & 2 deletions user-embedding/src/bertcommon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,10 +95,10 @@ async fn calculate_userembedding() -> AnyhowResult<Tensor, AnyhowError> {
let mut cumulative_tensor: Tensor = option_cumulative_tensor.unwrap();
let default_model: String = model_related_info.hugging_face_model_name.to_string();
let default_revision: String = model_related_info.hugging_face_model_revision.to_string();
let MODEL_INTERNET: String = env::var("MY_ENV_VAR").unwrap_or("false".to_string());
let MODEL_INTERNET: String = env::var("MODEL_INTERNET").unwrap_or("false".to_string());
let mut model_option: Option<BertModel> = None;
let mut model_tokenizer: Option<Tokenizer> = None;

log::info!("MODEL_INTERNET {}", MODEL_INTERNET);
if MODEL_INTERNET == "true" {
logdebug!("use internet model");
let (model, mut tokenizer, _) = embedding_common::build_model_and_tokenizer_from_internet(
Expand Down

0 comments on commit ffcfc25

Please sign in to comment.