Skip to content

Commit 33869af

Browse files
committed
add parser onnx and change converters
1 parent adaa1da commit 33869af

File tree

11 files changed

+223
-9
lines changed

11 files changed

+223
-9
lines changed

app/AlexNet/CMakeLists.txt

Lines changed: 0 additions & 6 deletions
This file was deleted.

app/CMakeLists.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
add_subdirectory(ReaderImage)
22
add_subdirectory(Accuracy)
3-
add_subdirectory(AlexNet)
3+
add_subdirectory(Converters)
44
add_subdirectory(AccuracyImgNet)
55
add_subdirectory(Graph)

app/Converters/CMakeLists.txt

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
add_executable(Reader_weights reader_weights_sample.cpp)
2+
3+
target_link_libraries(Reader_weights PUBLIC perf_lib layers_lib reader_lib)
4+
5+
add_definitions(-DMODEL_PATH_H5="${CMAKE_SOURCE_DIR}/docs/jsons/model_data_alexnet_1.json")
6+
add_definitions(-DMODEL_PATH_GOOGLENET_ONNX="${CMAKE_SOURCE_DIR}/docs/jsons/googlenet_onnx_model.json")

app/Converters/parser.py

Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
import json
2+
import os
3+
4+
import tensorflow as tf
5+
from tensorflow.keras.initializers import GlorotUniform as OriginalGlorotUniform, Zeros as OriginalZeros
6+
from tensorflow.keras.models import load_model
7+
8+
class CustomGlorotUniform(OriginalGlorotUniform):
9+
def __init__(self, seed=None, **kwargs):
10+
kwargs.pop('dtype', None) # Remove the unexpected dtype keyword if present
11+
super().__init__(seed=seed, **kwargs)
12+
13+
class CustomZeros(OriginalZeros):
14+
def __init__(self, **kwargs):
15+
kwargs.pop('dtype', None) # Remove the unexpected dtype keyword if present
16+
super().__init__(**kwargs)
17+
18+
# Пути к модели и JSON файлу
19+
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
20+
MODEL_PATH = os.path.join(BASE_DIR, 'docs', 'AlexNet-model.h5')
21+
MODEL_DATA_PATH = os.path.join(BASE_DIR, 'docs', 'model_data_alexnet_1.json')
22+
23+
# Загрузка модели
24+
model = load_model(MODEL_PATH, custom_objects={'GlorotUniform': CustomGlorotUniform, 'Zeros': CustomZeros})
25+
26+
# Получение весов модели и информации о порядке слоев
27+
layer_info = []
28+
for index, layer in enumerate(model.layers):
29+
layer_name = layer.name
30+
layer_type = type(layer).__name__ # Тип слоя (например, Conv2D, Dense, Activation и т.д.)
31+
layer_config = layer.get_config()
32+
33+
# Извлечение параметров слоя
34+
layer_padding = None
35+
layer_activation = None
36+
37+
if isinstance(layer, tf.keras.layers.Conv2D):
38+
layer_padding = layer_config.get('padding', None) # Считываем padding у Conv2D
39+
layer_activation = layer_config.get('activation', None) # Получаем функцию активации
40+
41+
# Сохранение информации о слое: его тип, имя, padding и веса
42+
layer_data = {
43+
'index': len(layer_info), # Порядковый номер слоя
44+
'name': layer_name,
45+
'type': layer_type
46+
}
47+
48+
if layer_padding is not None:
49+
layer_data['padding'] = layer_padding
50+
51+
layer_data['weights'] = [w.tolist() for w in layer.get_weights()]
52+
53+
layer_info.append(layer_data)
54+
55+
# Если активация встроена в слой, добавляем её как отдельный слой
56+
if layer_activation and not isinstance(layer, tf.keras.layers.Activation):
57+
activation_layer = {
58+
'index': len(layer_info),
59+
'name': f"activation_{layer_name}",
60+
'type': layer_activation,
61+
'weights': []
62+
}
63+
layer_info.append(activation_layer)
64+
65+
# Сохранение данных в JSON файл
66+
with open(MODEL_DATA_PATH, 'w') as f:
67+
json.dump(layer_info, f, indent=2)
68+
69+
print(f"Model data saved to {MODEL_DATA_PATH}")

app/Converters/parser_onnx.py

Lines changed: 114 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,114 @@
1+
import json
2+
import onnx
3+
import os
4+
from onnx import helper, numpy_helper
5+
6+
7+
def onnx_to_json(model_path, output_json_path):
8+
# Загрузка модели
9+
model = onnx.load(model_path)
10+
11+
# Проверка валидности модели
12+
onnx.checker.check_model(model)
13+
14+
# Создаем словарь для хранения всей информации
15+
model_info = {
16+
"model_metadata": {
17+
"ir_version": model.ir_version,
18+
"opset_version": model.opset_import[0].version,
19+
"producer_name": model.producer_name,
20+
"producer_version": model.producer_version
21+
},
22+
"graph": {
23+
"name": model.graph.name,
24+
"inputs": [],
25+
"outputs": [],
26+
"nodes": [],
27+
"initializers": []
28+
}
29+
}
30+
31+
# Обработка входных тензоров
32+
for input in model.graph.input:
33+
tensor_type = input.type.tensor_type
34+
model_info["graph"]["inputs"].append({
35+
"name": input.name,
36+
"elem_type": tensor_type.elem_type,
37+
"shape": [dim.dim_value if dim.HasField("dim_value") else dim.dim_param
38+
for dim in tensor_type.shape.dim]
39+
})
40+
41+
# Обработка выходных тензоров
42+
for output in model.graph.output:
43+
tensor_type = output.type.tensor_type
44+
model_info["graph"]["outputs"].append({
45+
"name": output.name,
46+
"elem_type": tensor_type.elem_type,
47+
"shape": [dim.dim_value if dim.HasField("dim_value") else dim.dim_param
48+
for dim in tensor_type.shape.dim]
49+
})
50+
51+
# Обработка узлов (операций)
52+
for node in model.graph.node:
53+
node_info = {
54+
"name": node.name,
55+
"op_type": node.op_type,
56+
"inputs": list(node.input), # Convert to list
57+
"outputs": list(node.output), # Convert to list
58+
"attributes": []
59+
}
60+
61+
for attr in node.attribute:
62+
attr_value = helper.get_attribute_value(attr)
63+
# Handle different attribute types
64+
if isinstance(attr_value, bytes):
65+
attr_value = attr_value.decode('utf-8', errors='ignore')
66+
elif hasattr(attr_value, 'tolist'):
67+
attr_value = attr_value.tolist()
68+
elif str(type(attr_value)).endswith("RepeatedScalarContainer'>"):
69+
attr_value = list(attr_value)
70+
71+
node_info["attributes"].append({
72+
"name": attr.name,
73+
"value": attr_value
74+
})
75+
76+
model_info["graph"]["nodes"].append(node_info)
77+
78+
# Обработка инициализаторов (весов)
79+
for initializer in model.graph.initializer:
80+
# Получаем значения весов в виде списка
81+
weights = numpy_helper.to_array(initializer).tolist()
82+
83+
model_info["graph"]["initializers"].append({
84+
"name": initializer.name,
85+
"data_type": initializer.data_type,
86+
"dims": list(initializer.dims),
87+
"values": weights # Внимание: для больших моделей это может занять много памяти!
88+
})
89+
90+
# Обработка метаданных
91+
if model.metadata_props:
92+
model_info["metadata"] = {}
93+
for prop in model.metadata_props:
94+
model_info["metadata"][prop.key] = prop.value
95+
96+
# Custom JSON encoder to handle remaining non-serializable objects
97+
class CustomEncoder(json.JSONEncoder):
98+
def default(self, obj):
99+
if hasattr(obj, 'tolist'):
100+
return obj.tolist()
101+
elif str(type(obj)).endswith("RepeatedScalarContainer'>"):
102+
return list(obj)
103+
return super().default(obj)
104+
105+
# Сохранение в JSON файл
106+
with open(output_json_path, 'w') as f:
107+
json.dump(model_info, f, indent=2, cls=CustomEncoder)
108+
109+
print(f"Модель успешно сохранена в {output_json_path}")
110+
111+
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
112+
MODEL_PATH = os.path.join(BASE_DIR, 'docs\\models', 'GoogLeNet.onnx')
113+
MODEL_DATA_PATH = os.path.join(BASE_DIR, 'docs\\jsons', 'googlenet_onnx_model.json')
114+
onnx_to_json(MODEL_PATH, MODEL_DATA_PATH)
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
#include <iostream>
2+
3+
#include "Weights_Reader/reader_weights.hpp"
4+
5+
int main() {
6+
std::string json_file = MODEL_PATH_H5;
7+
json model_data = read_json(json_file);
8+
9+
for (const auto& layer_data : model_data) {
10+
int layer_index = layer_data["index"];
11+
std::string layer_name = layer_data["name"];
12+
std::string layer_type = layer_data["type"];
13+
14+
std::cout << "Layer " << layer_index << " (" << layer_type << ", "
15+
<< layer_name << "):" << std::endl;
16+
17+
try {
18+
Tensor tensor =
19+
create_tensor_from_json(layer_data["weights"], Type::kFloat);
20+
// std::cout << tensor << std::endl;
21+
} catch (const std::exception& e) {
22+
std::cerr << "Error processing layer " << layer_name << ": " << e.what()
23+
<< std::endl;
24+
}
25+
}
26+
27+
return 0;
28+
}

app/Converters/requirements.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
tensorflow==2.19.0
2+
onnx==1.14.1

app/Graph/CMakeLists.txt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,5 +70,6 @@ file(DOWNLOAD
7070
)
7171

7272
add_definitions(-DIMAGE1_PATH="${CMAKE_SOURCE_DIR}/docs/input/")
73-
add_definitions(-DMODEL_PATH="${CMAKE_SOURCE_DIR}/docs/model_data_alexnet_1.json")
73+
add_definitions(-DMODEL_PATH_H5="${CMAKE_SOURCE_DIR}/docs/jsons/model_data_alexnet_1.json")
74+
add_definitions(-DMODEL_PATH_GOOGLENET_ONNX="${CMAKE_SOURCE_DIR}/docs/jsons/googlenet_onnx_model.json")
7475
add_definitions(-DMNIST_PATH="${CMAKE_SOURCE_DIR}/docs/mnist/mnist/test")

app/Graph/build.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ void build_graph(Tensor& input, Tensor& output, bool comments,
2525
ImplType impl2 = parallel ? kSTL : kDefault;
2626
std::vector<std::shared_ptr<Layer>> layers;
2727

28-
std::string json_file = MODEL_PATH;
28+
std::string json_file = MODEL_PATH_H5;
2929
json model_data = read_json(json_file);
3030

3131
if (comments) std::cout << "Loaded model data from JSON." << std::endl;

docs/models/AlexNet-model.h5

1.52 MB
Binary file not shown.

docs/models/GoogLeNet.onnx

25.3 MB
Binary file not shown.

0 commit comments

Comments
 (0)