Revert D12852205: [pytorch][PR] [jit] Add str() builtin

Differential Revision:
D12852205

Original commit changeset: 3e0e9218afdf

fbshipit-source-id: 114b4873504109394fe9d489200d39764ecc638e
This commit is contained in:
David Riazati 2018-11-01 12:46:44 -07:00 committed by Facebook Github Bot
parent e2e560d9c8
commit 99ce499bfe
8 changed files with 0 additions and 54 deletions

View File

@ -43,7 +43,6 @@ namespace c10 {
_(prim, Store) \
_(prim, Undefined) \
_(prim, Starred) \
_(prim, ToString) \
_(prim, TupleConstruct) \
_(prim, TupleUnpack) \
_(prim, TupleIndex) \

View File

@ -1,5 +0,0 @@
0
[ Variable[CPULongType]{} ]
2
1
2.5

View File

@ -1,14 +0,0 @@
graph(%a : Dynamic
%b : int
%c : bool
%d : float) {
%4 : string = prim::ToString(%a)
= prim::Print(%4)
%5 : string = prim::ToString(%b)
= prim::Print(%5)
%6 : string = prim::ToString(%c)
= prim::Print(%6)
%7 : string = prim::ToString(%d)
= prim::Print(%7)
return (%a);
}

View File

@ -8071,18 +8071,6 @@ a")
return e
self.checkScript(foo, (torch.rand(3), torch.rand(3)))
def test_str_cast(self):
def foo(a, b, c, d):
# type: (Tensor, int, bool, float) -> Tensor
print(str(a))
print(str(b))
print(str(c))
print(str(d))
return a
self.checkScript(foo, (torch.tensor(0), 2, True, 2.5), capture_output=True)
self.assertExpectedGraph(torch.jit.script(foo).graph)
class MnistNet(nn.Module):
def __init__(self):

View File

@ -1302,12 +1302,6 @@ Node* Graph::createStringToFloat(Value* value) {
return result;
}
Node* Graph::createToString(Value* value) {
auto* result = create(prim::ToString, {value});
result->output()->setType(StringType::get());
return result;
}
Node* Graph::createClone(Node * n, std::function<Value*(Value*)> value_map, bool copy_blocks) {
//n can be from a different graph
Node * r = n->allocNewInstance(this);

View File

@ -846,8 +846,6 @@ public:
TORCH_API Node* createIntToFloat(Value* value);
TORCH_API Node* createFloatToInt(Value* value);
TORCH_API Node* createStringToFloat(Value* value);
TORCH_API Node* createToString(Value* value);
Node* createPythonOp(
THPObjectPtr&& pyobj,
const std::string& cconv,

View File

@ -20,7 +20,6 @@
#include <ostream>
#include <stdexcept>
#include <string>
#include <sstream>
#include <typeinfo>
#include <unordered_map>
#include <unordered_set>
@ -185,16 +184,6 @@ RegisterOperators reg({
return 0;
};
}),
Operator(
prim::ToString,
[](Node* node) -> Operation {
return [](Stack& stack) {
std::stringstream ss;
ss << pop(stack);
push(stack, ss.str());
return 0;
};
}),
Operator(
prim::TensorDevice,
[](const Node* node) -> Operation {

View File

@ -83,8 +83,6 @@ static Value* typeCast(const SourceRange& loc, Value* value, TypePtr dst) {
n = graph.createIntToFloat(value);
} else if(dst->isSubtypeOf(FloatType::get()) && orig->isSubtypeOf(StringType::get())) {
n = graph.createStringToFloat(value);
} else if (dst->isSubtypeOf(StringType::get())) {
n = graph.createToString(value);
} else {
throw ErrorReport(loc) << "Cannot cast type '" << orig->str() << "' to type '"
<< dst->str() << "'.";
@ -326,7 +324,6 @@ struct Environment {
{"float", std::make_shared<CastValue>(FloatType::get())},
{"int", std::make_shared<CastValue>(IntType::get())},
{"bool", std::make_shared<CastValue>(BoolType::get())},
{"str", std::make_shared<CastValue>(StringType::get())},
// todo(zach): remove when we can correctly export torch.full via ONNX
// or we have implicit conversion that can convert numbers to tensors
{"_to_tensor", std::make_shared<CastValue>(DynamicType::get()) },