1 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
2 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
3 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
4 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
5 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
6 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
7 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
8 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
9 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
10 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
11 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
12 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
13 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
14 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
15 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
16 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
17 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
18 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
19 | |
20 | // YOU ARE IN THE WRONG PLACE! TURN BACK NOW! |
21 | |
22 | // This code was a temporary hack to enable embedding arbitrary C++ structures |
23 | // into Tensors. THIS IS UNSAFE AND IS NOT SUPPORTED. IF YOU USE THIS CODE, |
24 | // IT __WILL__ BREAK. |
25 | |
26 | // This code has been superseded by custom classes: |
27 | // https://pytorch.org/tutorials/advanced/torch_script_custom_classes.html |
28 | |
29 | // Please use custom classes and **DO NOT ADD MORE CALLSITES TO THINGS DEFINED |
30 | // IN THIS FILE**. |
31 | |
32 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
33 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
34 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
35 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
36 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
37 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
38 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
39 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
40 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
41 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
42 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
43 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
44 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
45 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
46 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
47 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
48 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
49 | // STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP STOP |
50 | |
51 | #include <ATen/TracerMode.h> |
52 | #include <ATen/core/Tensor.h> |
53 | |
54 | #ifndef AT_PER_OPERATOR_HEADERS |
55 | #include <ATen/Functions.h> |
56 | #else |
57 | #include <ATen/ops/empty.h> |
58 | #endif |
59 | |
60 | namespace at { |
61 | namespace cpp_custom_type_hack { |
62 | |
63 | template <typename T> |
64 | [[deprecated( |
65 | "Use custom classes instead: " |
66 | "https://pytorch.org/tutorials/advanced/torch_script_custom_classes.html" )]] bool |
67 | isa(const Tensor& packed) { |
68 | return (packed.scalar_type() == kByte) && |
69 | (packed.storage().data_ptr().get_deleter() == |
70 | caffe2::TypeMeta::Make<T>().deleteFn()); |
71 | } |
72 | |
73 | template <typename T> |
74 | [[deprecated( |
75 | "Use custom classes instead: " |
76 | "https://pytorch.org/tutorials/advanced/torch_script_custom_classes.html" )]] T& |
77 | cast(const Tensor& packed) { |
78 | TORCH_CHECK( |
79 | packed.scalar_type() == kByte, "Expected temporary cpp type wrapper" ); |
80 | TORCH_CHECK( |
81 | packed.storage().data_ptr().get_deleter() == |
82 | caffe2::TypeMeta::Make<T>().deleteFn(), |
83 | "Expected temporary cpp type wrapper of type " , |
84 | caffe2::TypeMeta::TypeName<T>()); |
85 | return *reinterpret_cast<T*>(packed.storage().data_ptr().get()); |
86 | } |
87 | |
88 | template <typename T> |
89 | [[deprecated( |
90 | "Use custom classes instead: " |
91 | "https://pytorch.org/tutorials/advanced/torch_script_custom_classes.html" )]] Tensor |
92 | create(std::unique_ptr<T> ptr, TensorOptions options) { |
93 | // None of this should trace, so turn off Tracer dispatching |
94 | at::AutoDispatchBelowADInplaceOrView guard; // TODO: remove |
95 | at::tracer::impl::NoTracerDispatchMode tracer_guard; |
96 | |
97 | // We store this instance away in a Tensor and register a deleter function |
98 | // so that we do not leak memory. On the other side, we pull out the storage's |
99 | // data_ptr and get the right typed pointer. |
100 | void* raw_ptr = ptr.release(); |
101 | at::DataPtr at_ptr( |
102 | raw_ptr, raw_ptr, caffe2::TypeMeta::Make<T>().deleteFn(), at::kCPU); |
103 | |
104 | // size doesn't really matter, but we can align it to the actual size |
105 | // returning variables because one likely want to use this hack from python |
106 | auto retval = at::empty({sizeof(T)}, options.device(kCPU).dtype(at::kByte)); |
107 | retval.storage().set_data_ptr_noswap(std::move(at_ptr)); |
108 | return retval; |
109 | } |
110 | |
111 | } // namespace cpp_custom_type_hack |
112 | } // namespace at |
113 | |