Skip to content

Commit

Permalink
try except sp
Browse files Browse the repository at this point in the history
  • Loading branch information
DesmonDay committed Mar 26, 2024
1 parent 6b5099a commit 696f305
Show file tree
Hide file tree
Showing 5 changed files with 52 additions and 34 deletions.
24 changes: 14 additions & 10 deletions paddlenlp/transformers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,16 +29,20 @@
from .feature_extraction_utils import BatchFeature, FeatureExtractionMixin
from .image_processing_utils import ImageProcessingMixin
from .attention_utils import create_bigbird_rand_mask_idx_list
from paddle.distributed.fleet.utils.sequence_parallel_utils import (
GatherOp,
ScatterOp,
AllGatherOp,
ReduceScatterOp,
ColumnSequenceParallelLinear,
RowSequenceParallelLinear,
mark_as_sequence_parallel_parameter,
register_sequence_parallel_allreduce_hooks,
)

try:
from paddle.distributed.fleet.utils.sequence_parallel_utils import (
GatherOp,
ScatterOp,
AllGatherOp,
ReduceScatterOp,
ColumnSequenceParallelLinear,
RowSequenceParallelLinear,
mark_as_sequence_parallel_parameter,
register_sequence_parallel_allreduce_hooks,
)
except:
pass

Check warning on line 45 in paddlenlp/transformers/__init__.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/transformers/__init__.py#L44-L45

Added lines #L44 - L45 were not covered by tests
from .export import export_model

# isort: split
Expand Down
18 changes: 11 additions & 7 deletions paddlenlp/transformers/gpt/modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,17 @@
from paddle.distributed import fleet
from paddle.distributed.fleet.meta_parallel import get_rng_state_tracker
from paddle.distributed.fleet.utils import recompute
from paddle.distributed.fleet.utils.sequence_parallel_utils import (
ColumnSequenceParallelLinear,
GatherOp,
RowSequenceParallelLinear,
ScatterOp,
mark_as_sequence_parallel_parameter,
)

try:
from paddle.distributed.fleet.utils.sequence_parallel_utils import (
ColumnSequenceParallelLinear,
GatherOp,
RowSequenceParallelLinear,
ScatterOp,
mark_as_sequence_parallel_parameter,
)
except:
pass

Check warning on line 42 in paddlenlp/transformers/gpt/modeling.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/transformers/gpt/modeling.py#L41-L42

Added lines #L41 - L42 were not covered by tests
from paddle.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss

from ...utils.converter import StateDictNameMapping
Expand Down
10 changes: 7 additions & 3 deletions paddlenlp/transformers/gpt/modeling_pp.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,13 @@
SharedLayerDesc,
)
from paddle.distributed.fleet.utils import recompute
from paddle.distributed.fleet.utils.sequence_parallel_utils import (
mark_as_sequence_parallel_parameter,
)

try:
from paddle.distributed.fleet.utils.sequence_parallel_utils import (
mark_as_sequence_parallel_parameter,
)
except:
pass

Check warning on line 29 in paddlenlp/transformers/gpt/modeling_pp.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/transformers/gpt/modeling_pp.py#L28-L29

Added lines #L28 - L29 were not covered by tests

from paddlenlp.transformers.model_utils import PipelinePretrainedModel

Expand Down
17 changes: 10 additions & 7 deletions paddlenlp/transformers/llama/modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,16 @@ def swiglu(x, y=None):
return F.silu(x) * y


from paddle.distributed.fleet.utils.sequence_parallel_utils import (
ColumnSequenceParallelLinear,
GatherOp,
RowSequenceParallelLinear,
ScatterOp,
mark_as_sequence_parallel_parameter,
)
try:
from paddle.distributed.fleet.utils.sequence_parallel_utils import (
ColumnSequenceParallelLinear,
GatherOp,
RowSequenceParallelLinear,
ScatterOp,
mark_as_sequence_parallel_parameter,
)
except:
pass

Check warning on line 56 in paddlenlp/transformers/llama/modeling.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/transformers/llama/modeling.py#L55-L56

Added lines #L55 - L56 were not covered by tests
from paddle.utils import try_import

from paddlenlp.transformers.conversion_utils import (
Expand Down
17 changes: 10 additions & 7 deletions paddlenlp/transformers/mixtral/modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,16 @@
except ImportError:
fused_rotary_position_embedding = None

from paddle.distributed.fleet.utils.sequence_parallel_utils import (
ColumnSequenceParallelLinear,
GatherOp,
RowSequenceParallelLinear,
ScatterOp,
mark_as_sequence_parallel_parameter,
)
try:
from paddle.distributed.fleet.utils.sequence_parallel_utils import (
ColumnSequenceParallelLinear,
GatherOp,
RowSequenceParallelLinear,
ScatterOp,
mark_as_sequence_parallel_parameter,
)
except:
pass

Check warning on line 45 in paddlenlp/transformers/mixtral/modeling.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/transformers/mixtral/modeling.py#L44-L45

Added lines #L44 - L45 were not covered by tests

from paddlenlp.transformers.conversion_utils import (
StateDictNameMapping,
Expand Down

0 comments on commit 696f305

Please sign in to comment.