Skip to content

Commit

Permalink
Revert "add return_tensor parameter for feature extraction (#19257)" (#…
Browse files Browse the repository at this point in the history
…19680)

This reverts commit 35bd089.
  • Loading branch information
sgugger authored Oct 17, 2022
1 parent bf0addc commit f2ecb9e
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 29 deletions.
14 changes: 3 additions & 11 deletions src/transformers/pipelines/feature_extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,6 @@ class FeatureExtractionPipeline(Pipeline):
If no framework is specified, will default to the one currently installed. If no framework is specified and
both frameworks are installed, will default to the framework of the `model`, or to PyTorch if no model is
provided.
return_tensor (`bool`, *optional*):
If `True`, returns a tensor according to the specified framework, otherwise returns a list.
task (`str`, defaults to `""`):
A task-identifier for the pipeline.
args_parser ([`~pipelines.ArgumentHandler`], *optional*):
Expand All @@ -42,7 +40,7 @@ class FeatureExtractionPipeline(Pipeline):
the associated CUDA device id.
"""

def _sanitize_parameters(self, truncation=None, tokenize_kwargs=None, return_tensors=None, **kwargs):
def _sanitize_parameters(self, truncation=None, tokenize_kwargs=None, **kwargs):
if tokenize_kwargs is None:
tokenize_kwargs = {}

Expand All @@ -55,11 +53,7 @@ def _sanitize_parameters(self, truncation=None, tokenize_kwargs=None, return_ten

preprocess_params = tokenize_kwargs

postprocess_params = {}
if return_tensors is not None:
postprocess_params["return_tensors"] = return_tensors

return preprocess_params, {}, postprocess_params
return preprocess_params, {}, {}

def preprocess(self, inputs, **tokenize_kwargs) -> Dict[str, GenericTensor]:
return_tensors = self.framework
Expand All @@ -70,10 +64,8 @@ def _forward(self, model_inputs):
model_outputs = self.model(**model_inputs)
return model_outputs

def postprocess(self, model_outputs, return_tensors=False):
def postprocess(self, model_outputs):
# [0] is the first available tensor, logits or last_hidden_state.
if return_tensors:
return model_outputs[0]
if self.framework == "pt":
return model_outputs[0].tolist()
elif self.framework == "tf":
Expand Down
18 changes: 0 additions & 18 deletions tests/pipelines/test_pipelines_feature_extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@
import unittest

import numpy as np
import tensorflow as tf
import torch

from transformers import (
FEATURE_EXTRACTOR_MAPPING,
Expand Down Expand Up @@ -135,22 +133,6 @@ def test_tokenization_small_model_tf(self):
tokenize_kwargs=tokenize_kwargs,
)

@require_torch
def test_return_tensors_pt(self):
feature_extractor = pipeline(
task="feature-extraction", model="hf-internal-testing/tiny-random-distilbert", framework="pt"
)
outputs = feature_extractor("This is a test" * 100, return_tensors=True)
self.assertTrue(torch.is_tensor(outputs))

@require_tf
def test_return_tensors_tf(self):
feature_extractor = pipeline(
task="feature-extraction", model="hf-internal-testing/tiny-random-distilbert", framework="tf"
)
outputs = feature_extractor("This is a test" * 100, return_tensors=True)
self.assertTrue(tf.is_tensor(outputs))

def get_shape(self, input_, shape=None):
if shape is None:
shape = []
Expand Down

0 comments on commit f2ecb9e

Please sign in to comment.