Skip to content

Commit

Permalink
remove set tag
Browse files Browse the repository at this point in the history
  • Loading branch information
Siyuan Liu committed Nov 13, 2023
1 parent 4acf02d commit 495f844
Show file tree
Hide file tree
Showing 5 changed files with 3 additions and 18 deletions.
6 changes: 0 additions & 6 deletions torch_xla/csrc/init_python_bindings.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1974,12 +1974,6 @@ void InitXlaModuleBindings(py::module m) {
xtensor->MarkDynamicDimension(dim);
});

m.def("_xla_set_tag", [](const at::Tensor& input, const std::string& tag) {
TORCH_LAZY_COUNTER("XlaMarkDynamic", 1);
XLATensorPtr xtensor = bridge::GetXlaTensor(input);
xtensor->SetTag(tag);
});

// -------------Dynamo Integration API Start-------------------------
/*
* Return tensor ids and at::tensors for all DeviceData nodes that is needed
Expand Down
2 changes: 0 additions & 2 deletions torch_xla/csrc/ir.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -178,8 +178,6 @@ std::string XlaNode::ToString() const {
for (const auto dim : dynamic_dims_) {
ss << dim;
}
ss << ", "
<< "tags: " << experimental_tag_;
return ss.str();
}

Expand Down
7 changes: 3 additions & 4 deletions torch_xla/csrc/ir.h
Original file line number Diff line number Diff line change
Expand Up @@ -138,13 +138,12 @@ class XlaNode : public torch::lazy::Node {

std::string ToString() const override;

void MarkDynamicDimension(uint32_t dim) { dynamic_dims_.push_back(dim); }
void SetTag(const std::string& tag) { experimental_tag_ = tag; }
const std::string& experimental_tag() const { return experimental_tag_; }
void MarkDynamicDimension(uint32_t dim) {
dynamic_dims_.push_back(dim);
}
const std::vector<uint32_t>& dynamic_dims() const { return dynamic_dims_; }

protected:
std::string experimental_tag_;
std::vector<uint32_t> dynamic_dims_;

private:
Expand Down
5 changes: 0 additions & 5 deletions torch_xla/csrc/tensor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -899,9 +899,4 @@ void XLATensor::MarkDynamicDimension(uint32_t dim) {
xla_node->MarkDynamicDimension(dim);
}

void XLATensor::SetTag(const std::string& tag) {
auto* xla_node = dynamic_cast<XlaNode*>(CurrentIrValue().node.get());
xla_node->SetTag(tag);
}

} // namespace torch_xla
1 change: 0 additions & 1 deletion torch_xla/csrc/tensor.h
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,6 @@ class XLATensor : public torch::lazy::LazyTensor {
void SetScalarType(c10::optional<at::ScalarType> logical_element_type);

void MarkDynamicDimension(uint32_t dim);
void SetTag(const std::string& tag);
// We don't use the upstream shape to provide xla::shape.
runtime::util::MaybeRef<xla::Shape> shape() const;

Expand Down

0 comments on commit 495f844

Please sign in to comment.