Skip to content

Commit c47d368

Browse files
authored
add float16 encoding (#22)
Stores the encoding in each Layer. Also adds a dims field per Layer to store the tensor dimensions.
1 parent b326b15 commit c47d368

File tree

1 file changed

+9
-1
lines changed

1 file changed

+9
-1
lines changed

proto/net.proto

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,14 @@ message Weights {
3939
optional float min_val = 1;
4040
optional float max_val = 2;
4141
optional bytes params = 3;
42+
enum Encoding {
43+
UNKNOWN_ENCODING = 0;
44+
LINEAR16 = 1;
45+
FLOAT16 = 2;
46+
BFLOAT16 = 3;
47+
}
48+
optional Encoding encoding = 4;
49+
repeated uint32 dims = 5;
4250
}
4351

4452
message ConvBlock {
@@ -362,7 +370,7 @@ message Format {
362370
UNKNOWN = 0;
363371
LINEAR16 = 1;
364372
}
365-
373+
// Any encoding specified in a Layer overides this.
366374
optional Encoding weights_encoding = 1;
367375
// If network_format is missing, it's assumed to have
368376
// INPUT_CLASSICAL_112_PLANE / OUTPUT_CLASSICAL / NETWORK_CLASSICAL format.

0 commit comments

Comments
 (0)