1 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved. |
2 | |
3 | Licensed under the Apache License, Version 2.0 (the "License"); |
4 | you may not use this file except in compliance with the License. |
5 | You may obtain a copy of the License at |
6 | |
7 | http://www.apache.org/licenses/LICENSE-2.0 |
8 | |
9 | Unless required by applicable law or agreed to in writing, software |
10 | distributed under the License is distributed on an "AS IS" BASIS, |
11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
12 | See the License for the specific language governing permissions and |
13 | limitations under the License. |
14 | ==============================================================================*/ |
15 | #include <string> |
16 | |
17 | #include "tensorflow/lite/c/builtin_op_data.h" |
18 | #include "tensorflow/lite/c/common.h" |
19 | #include "tensorflow/lite/core/subgraph.h" |
20 | #include "tensorflow/lite/experimental/resource/lookup_interfaces.h" |
21 | #include "tensorflow/lite/kernels/kernel_util.h" |
22 | |
23 | namespace tflite { |
24 | namespace ops { |
25 | namespace builtin { |
26 | namespace hashtable { |
27 | |
28 | // The current hash table op returns a key of the hash table resource objects, |
29 | // shared by the context. Later, this implementation might be updated by sharing |
30 | // the actual reference of hash table objects in the tensor buffer. |
31 | |
32 | static constexpr int kResourceHandleTensor = 0; |
33 | |
34 | TfLiteStatus PrepareHashtable(TfLiteContext* context, TfLiteNode* node) { |
35 | TF_LITE_ENSURE_EQ(context, NumInputs(node), 0); |
36 | TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); |
37 | |
38 | TF_LITE_ENSURE(context, node->builtin_data != nullptr); |
39 | const auto* params = |
40 | reinterpret_cast<const TfLiteHashtableParams*>(node->builtin_data); |
41 | |
42 | TF_LITE_ENSURE(context, (params->key_dtype == kTfLiteInt64 && |
43 | params->value_dtype == kTfLiteString) || |
44 | (params->key_dtype == kTfLiteString && |
45 | params->value_dtype == kTfLiteInt64)); |
46 | |
47 | TfLiteTensor* resource_handle_tensor; |
48 | TF_LITE_ENSURE_OK(context, GetOutputSafe(context, node, kResourceHandleTensor, |
49 | &resource_handle_tensor)); |
50 | TF_LITE_ENSURE_EQ(context, resource_handle_tensor->type, kTfLiteResource); |
51 | size_t bytesRequired = sizeof(int32_t); |
52 | |
53 | // Realloc space for an integer handle value. |
54 | TfLiteTensorRealloc(bytesRequired, resource_handle_tensor); |
55 | resource_handle_tensor->bytes = bytesRequired; |
56 | |
57 | TfLiteIntArray* outputSize = TfLiteIntArrayCreate(1); |
58 | outputSize->data[0] = 1; |
59 | if (resource_handle_tensor->dims) |
60 | TfLiteIntArrayFree(resource_handle_tensor->dims); |
61 | resource_handle_tensor->dims = outputSize; |
62 | |
63 | return kTfLiteOk; |
64 | } |
65 | |
66 | TfLiteStatus EvalHashtable(TfLiteContext* context, TfLiteNode* node) { |
67 | TF_LITE_ENSURE(context, node->builtin_data != nullptr); |
68 | const auto* params = |
69 | reinterpret_cast<const TfLiteHashtableParams*>(node->builtin_data); |
70 | |
71 | const int32_t resource_id = params->table_id; |
72 | |
73 | TfLiteTensor* resource_handle_tensor; |
74 | TF_LITE_ENSURE_OK(context, GetOutputSafe(context, node, kResourceHandleTensor, |
75 | &resource_handle_tensor)); |
76 | *resource_handle_tensor->data.i32 = resource_id; |
77 | |
78 | Subgraph* subgraph = reinterpret_cast<Subgraph*>(context->impl_); |
79 | auto& resources = subgraph->resources(); |
80 | resource::CreateHashtableResourceIfNotAvailable( |
81 | &resources, resource_id, params->key_dtype, params->value_dtype); |
82 | return kTfLiteOk; |
83 | } |
84 | |
85 | } // namespace hashtable |
86 | |
87 | TfLiteRegistration* Register_HASHTABLE() { |
88 | static TfLiteRegistration r = {nullptr, nullptr, hashtable::PrepareHashtable, |
89 | hashtable::EvalHashtable}; |
90 | return &r; |
91 | } |
92 | |
93 | } // namespace builtin |
94 | } // namespace ops |
95 | } // namespace tflite |
96 | |