Add TransformerParameters proto
PiperOrigin-RevId: 529213840
This commit is contained in:
		
							parent
							
								
									e428bdb7e8
								
							
						
					
					
						commit
						a09e39d431
					
				|  | @ -93,3 +93,8 @@ mediapipe_proto_library( | ||||||
|         "//mediapipe/framework:calculator_proto", |         "//mediapipe/framework:calculator_proto", | ||||||
|     ], |     ], | ||||||
| ) | ) | ||||||
|  | 
 | ||||||
|  | mediapipe_proto_library( | ||||||
|  |     name = "transformer_params_proto", | ||||||
|  |     srcs = ["transformer_params.proto"], | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | @ -0,0 +1,46 @@ | ||||||
|  | /* Copyright 2023 The MediaPipe Authors. | ||||||
|  | 
 | ||||||
|  | Licensed under the Apache License, Version 2.0 (the "License"); | ||||||
|  | you may not use this file except in compliance with the License. | ||||||
|  | You may obtain a copy of the License at | ||||||
|  | 
 | ||||||
|  |     http://www.apache.org/licenses/LICENSE-2.0 | ||||||
|  | 
 | ||||||
|  | Unless required by applicable law or agreed to in writing, software | ||||||
|  | distributed under the License is distributed on an "AS IS" BASIS, | ||||||
|  | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||||
|  | See the License for the specific language governing permissions and | ||||||
|  | limitations under the License. | ||||||
|  | ==============================================================================*/ | ||||||
|  | 
 | ||||||
|  | syntax = "proto3"; | ||||||
|  | 
 | ||||||
|  | package mediapipe.tasks.components.processors.proto; | ||||||
|  | 
 | ||||||
|  | option java_package = "com.google.mediapipe.tasks.components.processors.proto"; | ||||||
|  | option java_outer_classname = "TransformerParametersProto"; | ||||||
|  | 
 | ||||||
|  | // The parameters of transformer (https://arxiv.org/pdf/1706.03762.pdf) | ||||||
|  | message TransformerParameters { | ||||||
|  |   // Batch size of tensors. | ||||||
|  |   int32 batch_size = 1; | ||||||
|  | 
 | ||||||
|  |   // Maximum sequence length of the input/output tensor. | ||||||
|  |   int32 max_seq_length = 2; | ||||||
|  | 
 | ||||||
|  |   // Embedding dimension (or model dimension), `d_model` in the paper. | ||||||
|  |   // `d_k` == `d_v` == `d_model`/`h`. | ||||||
|  |   int32 embedding_dim = 3; | ||||||
|  | 
 | ||||||
|  |   // Hidden dimension used in the feedforward layer, `d_ff` in the paper. | ||||||
|  |   int32 hidden_dimension = 4; | ||||||
|  | 
 | ||||||
|  |   // Head dimension, `d_k` or `d_v` in the paper. | ||||||
|  |   int32 head_dimension = 5; | ||||||
|  | 
 | ||||||
|  |   // Number of heads, `h` in the paper. | ||||||
|  |   int32 num_heads = 6; | ||||||
|  | 
 | ||||||
|  |   // Number of stacked transformers, `N` in the paper. | ||||||
|  |   int32 num_stacks = 7; | ||||||
|  | } | ||||||
		Loading…
	
		Reference in New Issue
	
	Block a user