|
10 | 10 | // - use different overloads of Run method
|
11 | 11 |
|
12 | 12 | int main(int argc, char **argv) {
|
13 |
| - |
14 | 13 | using namespace tensorflow;
|
15 | 14 | using namespace tensorflow::ops;
|
16 | 15 |
|
@@ -66,76 +65,66 @@ int main(int argc, char **argv) {
|
66 | 65 | // std::vector<Tensor>* outputs) const;
|
67 | 66 | //
|
68 | 67 | //
|
69 |
| - // which takes FeedType (alias of std::unordered_map<Output, Input::Initializer, OutputHash> |
70 |
| - // as the first argument. |
71 |
| - // Note - In std::unordered_map OutputHash is optional |
72 |
| - // So we just need to supply a map whose key of type "Output" and the |
73 |
| - // value that respect Initializer |
| 68 | + // which takes FeedType (alias of std::unordered_map<Output, |
| 69 | + // Input::Initializer, OutputHash> as the first argument. |
| 70 | + // |
| 71 | + // Note - In std::unordered_map OutputHash is optional So we just need to |
| 72 | + // supply a map whose key of type "Output" and the value that respect |
| 73 | + // Initializer |
74 | 74 | //
|
75 | 75 | // {a,2} & {b,3} would satisfiy this requirement since type 'a' & 'b'
|
76 | 76 | // is Output
|
77 |
| - |
78 |
| - auto status = session.Run({ |
79 |
| - { |
80 |
| - {a, 2}, |
81 |
| - {b, 3} |
82 |
| - } }, {c}, &outputs); |
83 |
| - |
| 77 | + |
| 78 | + auto status = session.Run({{{a, 2}, {b, 3}}}, {c}, &outputs); |
| 79 | + |
84 | 80 | TF_CHECK_OK(status);
|
85 |
| - |
| 81 | + |
86 | 82 | // we know that it will be scalar
|
87 | 83 | // we can also get the underlying data by calling flat
|
88 | 84 | std::cout << "Underlying Scalar value -> " << outputs[0].flat<int>()
|
89 | 85 | << std::endl;
|
90 | 86 | }
|
91 |
| - |
| 87 | + |
92 | 88 | {
|
93 | 89 | // This is yet another example that makes use of Placeholder however
|
94 | 90 | // this time we want one of the placeholder to have a default value
|
95 | 91 | //
|
96 | 92 | // In other words, it does not need to be specified during the session
|
97 | 93 | // execution. if you give a new value it would accept it else would use
|
98 | 94 | // the default value
|
99 |
| - |
| 95 | + |
100 | 96 | ClientSession session(scope);
|
101 |
| - |
| 97 | + |
102 | 98 | // create an input
|
103 | 99 | auto defaultAInput = Input(8);
|
104 |
| - |
| 100 | + |
105 | 101 | // we will use Placeholder as the type for our variables
|
106 | 102 | auto a = PlaceholderWithDefault(scope, defaultAInput, PartialTensorShape());
|
107 | 103 | auto b = Placeholder(scope, DT_INT32);
|
108 |
| - |
| 104 | + |
109 | 105 | // define the add operation that takes
|
110 | 106 | // the placeholders a and b as inputs
|
111 | 107 | auto c = Add(scope, a, b);
|
112 |
| - |
| 108 | + |
113 | 109 | std::vector<Tensor> outputs;
|
114 |
| - |
| 110 | + |
115 | 111 | // In this Run we are not specifying 'a'
|
116 | 112 | // so its default value i.e. 8 will be used
|
117 |
| - auto status = session.Run({ |
118 |
| - { |
119 |
| - {b, 3} |
120 |
| - } }, {c}, &outputs); |
121 |
| - |
| 113 | + auto status = session.Run({{{b, 3}}}, {c}, &outputs); |
| 114 | + |
122 | 115 | TF_CHECK_OK(status);
|
123 |
| - |
124 |
| - std::cout << "Underlying Scalar value (using default placeholder value [8]) -> " << outputs[0].flat<int>() |
125 |
| - << std::endl; |
126 |
| - |
| 116 | + |
| 117 | + std::cout |
| 118 | + << "Underlying Scalar value (using default placeholder value [8]) -> " |
| 119 | + << outputs[0].flat<int>() << std::endl; |
| 120 | + |
127 | 121 | // here we do specify a value for placeholder 'a' i.e. 9
|
128 |
| - status = session.Run({ |
129 |
| - { |
130 |
| - {a, 9}, |
131 |
| - {b, 3} |
132 |
| - } }, {c}, &outputs); |
133 |
| - |
| 122 | + status = session.Run({{{a, 9}, {b, 3}}}, {c}, &outputs); |
| 123 | + |
134 | 124 | TF_CHECK_OK(status);
|
135 |
| - |
136 |
| - std::cout << "Underlying Scalar value (after supplying new value [9]) -> " << outputs[0].flat<int>() |
137 |
| - << std::endl; |
138 |
| - |
| 125 | + |
| 126 | + std::cout << "Underlying Scalar value (after supplying new value [9]) -> " |
| 127 | + << outputs[0].flat<int>() << std::endl; |
139 | 128 | }
|
140 | 129 |
|
141 | 130 | return 0;
|
|
0 commit comments