mediapipe-rs/mediapipe/graphs/youtube8m/yt8m_dataset_model_inference.pbtxt

140 lines
3.8 KiB
Plaintext
Raw Normal View History

2022-03-01 13:04:01 +01:00
input_side_packet: "desired_segment_size"
input_side_packet: "record_index"
input_side_packet: "tfrecord_path"
output_side_packet: "yt8m_id"
output_stream: "annotation_summary"
node {
calculator: "StringToInt32Calculator"
input_side_packet: "record_index"
output_side_packet: "record_index_int"
}
node {
calculator: "StringToInt32Calculator"
input_side_packet: "desired_segment_size"
output_side_packet: "desired_segment_size_int"
}
node {
calculator: "TFRecordReaderCalculator"
input_side_packet: "TFRECORD_PATH:tfrecord_path"
input_side_packet: "RECORD_INDEX:record_index_int"
output_side_packet: "SEQUENCE_EXAMPLE:yt8m_sequence_example"
}
node {
calculator: "UnpackYt8mSequenceExampleCalculator"
input_side_packet: "YT8M_SEQUENCE_EXAMPLE:yt8m_sequence_example"
input_side_packet: "DESIRED_SEGMENT_SIZE:desired_segment_size_int"
output_side_packet: "YT8M_ID:yt8m_id"
output_side_packet: "SEGMENT_SIZE:segment_size"
output_side_packet: "LAPPED_TENSOR_BUFFER_CALCULATOR_OPTIONS:lapped_tensor_buffer_calculator_options"
output_stream: "QUANTIZED_RGB_FEATURE:quantized_rgb_feature"
output_stream: "QUANTIZED_AUDIO_FEATURE:quantized_audio_feature"
}
node {
calculator: "DequantizeByteArrayCalculator"
input_stream: "ENCODED:quantized_rgb_feature"
output_stream: "FLOAT_VECTOR:rgb_feature_vector"
node_options: {
[type.googleapis.com/mediapipe.DequantizeByteArrayCalculatorOptions]: {
max_quantized_value: 2
min_quantized_value: -2
}
}
}
node {
calculator: "DequantizeByteArrayCalculator"
input_stream: "ENCODED:quantized_audio_feature"
output_stream: "FLOAT_VECTOR:audio_feature_vector"
node_options: {
[type.googleapis.com/mediapipe.DequantizeByteArrayCalculatorOptions]: {
max_quantized_value: 2
min_quantized_value: -2
}
}
}
node {
calculator: "ConcatenateFloatVectorCalculator"
input_stream: "rgb_feature_vector"
input_stream: "audio_feature_vector"
output_stream: "feature_vector"
}
node {
calculator: "VectorFloatToTensorCalculator"
input_stream: "feature_vector"
output_stream: "feature_tensor"
}
node {
calculator: "LappedTensorBufferCalculator"
input_stream: "feature_tensor"
input_side_packet: "CALCULATOR_OPTIONS:lapped_tensor_buffer_calculator_options"
output_stream: "lapped_feature_tensor"
}
node {
calculator: "SidePacketToStreamCalculator"
input_side_packet: "segment_size"
output_stream: "AT_ZERO:segment_size_int_stream"
}
node {
calculator: "VectorIntToTensorCalculator"
input_stream: "SINGLE_INT:segment_size_int_stream"
output_stream: "TENSOR_OUT:segment_size_tensor"
}
node {
calculator: "PacketClonerCalculator"
input_stream: "segment_size_tensor"
input_stream: "lapped_feature_tensor"
output_stream: "synced_segment_size_tensor"
}
node {
calculator: "TensorFlowSessionFromSavedModelCalculator"
output_side_packet: "SESSION:session"
node_options: {
[type.googleapis.com/mediapipe.TensorFlowSessionFromSavedModelCalculatorOptions]: {
saved_model_path: "/tmp/mediapipe/saved_model"
}
}
}
node: {
calculator: "TensorFlowInferenceCalculator"
input_side_packet: "SESSION:session"
input_stream: "NUM_FRAMES:synced_segment_size_tensor"
input_stream: "RGB_AND_AUDIO:lapped_feature_tensor"
output_stream: "PREDICTIONS:prediction_tensor"
node_options: {
[type.googleapis.com/mediapipe.TensorFlowInferenceCalculatorOptions]: {
batch_size: 32
}
}
}
node {
calculator: "TensorToVectorFloatCalculator"
input_stream: "prediction_tensor"
output_stream: "prediction_vector"
}
node {
calculator: "TopKScoresCalculator"
input_stream: "SCORES:prediction_vector"
output_stream: "SUMMARY:annotation_summary"
node_options: {
[type.googleapis.com/mediapipe.TopKScoresCalculatorOptions]: {
top_k: 9
label_map_path: "mediapipe/graphs/youtube8m/label_map.txt"
}
}
}