Skip to content

Commit

Permalink
Alpha release test (#164)
Browse files Browse the repository at this point in the history
PR that includes the latest additional fixes after testing the
end-to-end pipeline
  • Loading branch information
PhilippeMoussalli authored May 23, 2023
1 parent 628cbb0 commit 228a394
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 6 deletions.
2 changes: 1 addition & 1 deletion components/segment_images/src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def segment(batch, model, processor):
outputs, target_sizes=batch["image_sizes"]
)
# turn into RGB images
segmentations = [convert_to_rgb(seg.numpy()) for seg in segmentations]
segmentations = [convert_to_rgb(seg.cpu().numpy()) for seg in segmentations]

return segmentations

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
configure_logging()
logger = logging.getLogger(__name__)


interior_styles = [
"art deco",
"bauhaus",
Expand Down Expand Up @@ -113,9 +112,6 @@ def load(self) -> dd.DataFrame:

df = dd.from_pandas(pandas_df, npartitions=1)

# TODO: remove
df = dd.from_pandas(df.head(), npartitions=1)

return df


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,13 +48,14 @@ def transform(
write_columns.extend([f"{subset_name}_{field}" for field in subset.fields])
# Get schema
subset_schema = {
f"{subset_name}_{field.name}": field.type.value
f"{subset_name}_{field.name}": field.type.name
for field in subset.fields.values()
}

schema.update(subset_schema)

dataframe_hub = dataframe[write_columns]

dd.to_parquet(dataframe_hub, path=f"{repo_path}/data", schema=schema)

return dataframe
Expand Down

0 comments on commit 228a394

Please sign in to comment.