├── .gitmodules ├── tensorflow_serving ├── experimental │ ├── tensorflow │ │ └── ops │ │ │ └── remote_predict │ │ │ ├── WORKSPACE │ │ │ ├── __init__.py │ │ │ └── kernels │ │ │ ├── remote_predict_op_kernel.cc │ │ │ └── prediction_service_grpc_test.cc │ └── example │ │ └── BUILD ├── core │ ├── README.md │ ├── test_util │ │ ├── fake_loader_source_adapter.proto │ │ ├── mock_log_collector.h │ │ ├── mock_storage_path_target.h │ │ ├── mock_loader.h │ │ ├── fake_log_collector.h │ │ ├── mock_server_request_logger.h │ │ ├── mock_prediction_stream_logger.h │ │ ├── fake_storage_path_source_adapter.cc │ │ └── availability_test_util.h │ ├── aspired_version_policy.cc │ ├── prefix_storage_path_source_adapter.cc │ ├── storage_path_test.cc │ ├── manager_wrapper.cc │ ├── storage_path.h │ ├── static_manager.cc │ ├── servable_data_test.cc │ └── aspired_versions_manager_builder.cc ├── servables │ ├── tensorflow │ │ ├── testdata │ │ │ ├── bad_model_config.txt │ │ │ ├── saved_model_counter │ │ │ │ └── 00000123 │ │ │ │ │ ├── variables │ │ │ │ │ ├── variables.data-00000-of-00001 │ │ │ │ │ └── variables.index │ │ │ │ │ └── saved_model.pb │ │ │ ├── saved_model_half_plus_three │ │ │ │ └── 00000123 │ │ │ │ │ ├── assets │ │ │ │ │ └── foo.txt │ │ │ │ │ ├── variables │ │ │ │ │ ├── variables.data-00000-of-00001 │ │ │ │ │ └── variables.index │ │ │ │ │ └── saved_model.pb │ │ │ ├── saved_model_half_plus_two_cpu │ │ │ │ └── 00000123 │ │ │ │ │ ├── assets │ │ │ │ │ └── foo.txt │ │ │ │ │ ├── variables │ │ │ │ │ ├── variables.data-00000-of-00001 │ │ │ │ │ └── variables.index │ │ │ │ │ └── saved_model.pb │ │ │ ├── saved_model_half_plus_two_mkl │ │ │ │ └── 00000123 │ │ │ │ │ ├── assets │ │ │ │ │ └── foo.txt │ │ │ │ │ ├── saved_model.pb │ │ │ │ │ └── variables │ │ │ │ │ ├── variables.index │ │ │ │ │ └── variables.data-00000-of-00001 │ │ │ ├── saved_model_half_plus_two_mlmd │ │ │ │ └── 00000123 │ │ │ │ │ ├── assets │ │ │ │ │ └── foo.txt │ │ │ │ │ ├── assets.extra │ │ │ │ │ └── mlmd_uuid │ │ │ │ │ ├── variables │ │ │ │ │ ├── variables.data-00000-of-00001 │ │ │ │ │ └── variables.index │ │ │ │ │ └── saved_model.pb │ │ │ ├── saved_model_half_plus_two_2_versions │ │ │ │ ├── 00000123 │ │ │ │ │ ├── assets │ │ │ │ │ │ └── foo.txt │ │ │ │ │ ├── variables │ │ │ │ │ │ ├── variables.data-00000-of-00001 │ │ │ │ │ │ └── variables.index │ │ │ │ │ └── saved_model.pb │ │ │ │ └── 00000124 │ │ │ │ │ ├── assets │ │ │ │ │ └── foo.txt │ │ │ │ │ ├── variables │ │ │ │ │ ├── variables.data-00000-of-00001 │ │ │ │ │ └── variables.index │ │ │ │ │ └── saved_model.pb │ │ │ ├── saved_model_half_plus_two_gpu_trt │ │ │ │ └── 00000123 │ │ │ │ │ ├── assets │ │ │ │ │ └── foo.txt │ │ │ │ │ ├── variables │ │ │ │ │ ├── variables.data-00000-of-00001 │ │ │ │ │ └── variables.index │ │ │ │ │ └── saved_model.pb │ │ │ ├── saved_model_half_plus_two_tf2_cpu │ │ │ │ └── 00000123 │ │ │ │ │ ├── assets │ │ │ │ │ └── foo.txt │ │ │ │ │ ├── fingerprint.pb │ │ │ │ │ ├── saved_model.pb │ │ │ │ │ └── variables │ │ │ │ │ ├── variables.index │ │ │ │ │ └── variables.data-00000-of-00001 │ │ │ ├── saved_model_half_plus_two_gpu │ │ │ │ └── 00000123 │ │ │ │ │ ├── variables │ │ │ │ │ ├── variables.data-00000-of-00001 │ │ │ │ │ └── variables.index │ │ │ │ │ └── saved_model.pb │ │ │ ├── saved_model_half_plus_two_cpu_with_empty_saved_model_config │ │ │ │ └── 00000123 │ │ │ │ │ ├── assets.extra │ │ │ │ │ └── saved_model_config.pb │ │ │ │ │ ├── variables │ │ │ │ │ ├── variables.data-00000-of-00001 │ │ │ │ │ └── variables.index │ │ │ │ │ └── saved_model.pb │ │ │ ├── monitoring_config.txt │ │ │ ├── saved_model_half_plus_two_cpu_with_saved_model_config │ │ │ │ └── 00000123 │ │ │ │ │ ├── variables │ │ │ │ │ ├── variables.data-00000-of-00001 │ │ │ │ │ └── variables.index │ │ │ │ │ ├── saved_model.pb │ │ │ │ │ └── assets.extra │ │ │ │ │ └── saved_model_config.pb │ │ │ ├── bad_half_plus_two │ │ │ │ └── 00000123 │ │ │ │ │ ├── export │ │ │ │ │ ├── checkpoint │ │ │ │ │ └── export.meta │ │ │ ├── batching_config.txt │ │ │ ├── tf_text_regression │ │ │ │ └── 01 │ │ │ │ │ ├── fingerprint.pb │ │ │ │ │ ├── saved_model.pb │ │ │ │ │ ├── keras_metadata.pb │ │ │ │ │ └── variables │ │ │ │ │ ├── variables.index │ │ │ │ │ └── variables.data-00000-of-00001 │ │ │ ├── parse_example_tflite │ │ │ │ └── 00000123 │ │ │ │ │ └── model.tflite │ │ │ ├── mobilenet_v1_quant_tflite │ │ │ │ └── 00000123 │ │ │ │ │ └── model.tflite │ │ │ ├── saved_model_half_plus_two_tflite │ │ │ │ └── 00000123 │ │ │ │ │ └── model.tflite │ │ │ ├── saved_model_half_plus_two_tflite_with_sigdef │ │ │ │ └── 00000123 │ │ │ │ │ └── model.tflite │ │ │ ├── good_model_config.txt │ │ │ ├── mobilenet_v1_quant_tflite.README │ │ │ ├── tf_text_regression.README │ │ │ └── parse_example_tflite.README │ │ ├── saved_model_bundle_source_adapter.proto │ │ ├── test_util │ │ │ ├── fake_thread_pool_factory.proto │ │ │ ├── BUILD │ │ │ └── fake_thread_pool_factory.cc │ │ ├── thread_pool_factory_config.proto │ │ ├── file_acl.h │ │ ├── run_options.h │ │ ├── oss │ │ │ ├── file_acl.cc │ │ │ ├── file_acl.h │ │ │ ├── run_options.h │ │ │ ├── resource_estimator.cc │ │ │ └── BUILD │ │ ├── resource_estimator.h │ │ ├── saved_model_config.proto │ │ ├── servable_test.cc │ │ ├── remote_op_config_rewriter.proto │ │ ├── predict_response_tensor_serialization_option.h │ │ ├── serving_session.cc │ │ ├── machine_learning_metadata.h │ │ ├── thread_pool_factory.cc │ │ ├── tfrt_multi_inference.h │ │ ├── saved_model_config.h │ │ ├── tfrt_get_model_metadata_impl.h │ │ ├── saved_model_warmup.h │ │ ├── get_model_metadata_impl.h │ │ └── tfrt_saved_model_warmup.h │ └── hashmap │ │ ├── hashmap_source_adapter.proto │ │ └── BUILD ├── g3doc │ ├── images │ │ ├── tb_profile_zoom.png │ │ ├── tb_profile_overview.png │ │ ├── tb_profile_setup_dialog.png │ │ └── predict_sequence_diagram.png │ └── _toc.yaml ├── batching │ ├── testdata │ │ ├── matrix_half_plus_two │ │ │ └── 1 │ │ │ │ └── saved_model.pb │ │ └── BUILD │ ├── test_util │ │ └── BUILD │ ├── threadsafe_status.cc │ └── threadsafe_status_test.cc ├── session_bundle │ ├── testdata │ │ └── half_plus_two │ │ │ └── 00000123 │ │ │ ├── export.meta │ │ │ └── export-00000-of-00001 │ ├── manifest_proto.h │ ├── session_bundle.h │ ├── oss │ │ ├── session_bundle.h │ │ └── BUILD │ └── saved_model_config.h ├── tools │ ├── docker │ │ └── README.md │ └── pip_package │ │ └── BUILD ├── util │ ├── net_http │ │ ├── client │ │ │ └── test_client │ │ │ │ ├── public │ │ │ │ ├── README.md │ │ │ │ └── BUILD │ │ │ │ ├── internal │ │ │ │ ├── README.md │ │ │ │ └── BUILD │ │ │ │ └── testing │ │ │ │ └── BUILD │ │ ├── README.md │ │ ├── compression │ │ │ ├── README.md │ │ │ └── BUILD │ │ ├── internal │ │ │ ├── testing │ │ │ │ ├── BUILD │ │ │ │ └── net_logging_example.cc │ │ │ └── BUILD │ │ ├── server │ │ │ ├── testing │ │ │ │ └── BUILD │ │ │ └── public │ │ │ │ └── BUILD │ │ ├── socket │ │ │ └── testing │ │ │ │ └── BUILD │ │ └── public │ │ │ └── BUILD │ ├── class_registration_test.proto │ ├── test_util │ │ ├── BUILD │ │ └── mock_file_probing_env.h │ ├── hash.cc │ ├── inline_executor.cc │ ├── hash.h │ ├── inline_executor_test.cc │ ├── threadpool_executor.cc │ ├── status_util.h │ ├── class_registration_util.cc │ ├── executor.h │ ├── class_registration_util.h │ ├── inline_executor.h │ ├── status_util.cc │ ├── file_probing_env.cc │ ├── oss_or_google.h │ ├── retrier.h │ ├── prometheus_exporter.h │ └── threadpool_executor.h ├── model_servers │ ├── test_util │ │ ├── storage_path_error_injecting_source_adapter.proto │ │ └── storage_path_error_injecting_source_adapter.h │ ├── version.cc │ ├── model_platform_types.h │ ├── prediction_service_util.cc │ ├── grpc_status_util.h │ ├── profiler_client.py │ ├── platform_config_util.h │ ├── grpc_status_util.cc │ ├── http_server.h │ ├── prediction_service_util.h │ ├── platform_config_util.cc │ ├── tensorflow_model_server_test_client.py │ └── device_runner_init_stub.cc ├── config │ ├── log_collector_config.proto │ ├── ssl_config.proto │ ├── monitoring_config.proto │ ├── platform_config.proto │ └── logging_config.proto ├── sources │ └── storage_path │ │ ├── static_storage_path_source.proto │ │ └── static_storage_path_source.cc ├── tensorflow_version.bzl ├── apis │ ├── internal │ │ ├── BUILD │ │ └── serialized_input.proto │ ├── status.proto │ ├── model_management.proto │ ├── logging.proto │ ├── regression.proto │ ├── model_service.proto │ ├── get_model_metadata.proto │ ├── prediction_service.proto │ ├── model.proto │ ├── classification.proto │ └── prediction_log.proto ├── BUILD ├── example │ ├── BUILD │ └── resnet_k8s.yaml ├── repo.bzl └── test_util │ └── BUILD ├── third_party ├── tf_text │ └── BUILD ├── tensorflow │ ├── BUILD │ └── tensorflow.patch ├── tf_decision_forests │ ├── BUILD │ └── tf_decision_forests.patch ├── darts_clone │ └── BUILD ├── rapidjson │ └── BUILD ├── boost │ └── BUILD └── icu │ └── README ├── .gitignore ├── AUTHORS ├── tools └── gen_status_stamp.sh └── CONTRIBUTING.md /.gitmodules: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tensorflow_serving/experimental/tensorflow/ops/remote_predict/WORKSPACE: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /third_party/tf_text/BUILD: -------------------------------------------------------------------------------- 1 | # Empty BUILD so this is treated like a package. 2 | -------------------------------------------------------------------------------- /third_party/tensorflow/BUILD: -------------------------------------------------------------------------------- 1 | # Empty BUILD so this is treated like a package. 2 | -------------------------------------------------------------------------------- /tensorflow_serving/core/README.md: -------------------------------------------------------------------------------- 1 | Directory for non-application-specific modules. 2 | -------------------------------------------------------------------------------- /third_party/tf_decision_forests/BUILD: -------------------------------------------------------------------------------- 1 | # Empty BUILD so this is treated like a package. 2 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/bad_model_config.txt: -------------------------------------------------------------------------------- 1 | improperly formatted file -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_counter/00000123/variables/variables.data-00000-of-00001: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_three/00000123/assets/foo.txt: -------------------------------------------------------------------------------- 1 | asset-file-contents -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu/00000123/assets/foo.txt: -------------------------------------------------------------------------------- 1 | asset-file-contents -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_mkl/00000123/assets/foo.txt: -------------------------------------------------------------------------------- 1 | asset-file-contents -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_mlmd/00000123/assets/foo.txt: -------------------------------------------------------------------------------- 1 | asset-file-contents -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_2_versions/00000123/assets/foo.txt: -------------------------------------------------------------------------------- 1 | asset-file-contents -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_2_versions/00000124/assets/foo.txt: -------------------------------------------------------------------------------- 1 | asset-file-contents -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_gpu_trt/00000123/assets/foo.txt: -------------------------------------------------------------------------------- 1 | asset-file-contents -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_mlmd/00000123/assets.extra/mlmd_uuid: -------------------------------------------------------------------------------- 1 | test_mlmd_uuid -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_tf2_cpu/00000123/assets/foo.txt: -------------------------------------------------------------------------------- 1 | asset-file-contents -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_three/00000123/variables/variables.data-00000-of-00001: -------------------------------------------------------------------------------- 1 | ?@@@@ -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_gpu/00000123/variables/variables.data-00000-of-00001: -------------------------------------------------------------------------------- 1 | ?@@@ -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_2_versions/00000123/variables/variables.data-00000-of-00001: -------------------------------------------------------------------------------- 1 | ?@@@ -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_2_versions/00000124/variables/variables.data-00000-of-00001: -------------------------------------------------------------------------------- 1 | ?@@@ -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu/00000123/variables/variables.data-00000-of-00001: -------------------------------------------------------------------------------- 1 | ??@@@@@ -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_mlmd/00000123/variables/variables.data-00000-of-00001: -------------------------------------------------------------------------------- 1 | ??@@@@@ -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu_with_empty_saved_model_config/00000123/assets.extra/saved_model_config.pb: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_gpu_trt/00000123/variables/variables.data-00000-of-00001: -------------------------------------------------------------------------------- 1 | ??@@@@@ -------------------------------------------------------------------------------- /tensorflow_serving/g3doc/images/tb_profile_zoom.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/g3doc/images/tb_profile_zoom.png -------------------------------------------------------------------------------- /tensorflow_serving/g3doc/images/tb_profile_overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/g3doc/images/tb_profile_overview.png -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/monitoring_config.txt: -------------------------------------------------------------------------------- 1 | prometheus_config: { 2 | enable: true, 3 | path: "/monitoring/prometheus/metrics" 4 | } 5 | -------------------------------------------------------------------------------- /tensorflow_serving/g3doc/images/tb_profile_setup_dialog.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/g3doc/images/tb_profile_setup_dialog.png -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu_with_saved_model_config/00000123/variables/variables.data-00000-of-00001: -------------------------------------------------------------------------------- 1 | ??@@@@@ -------------------------------------------------------------------------------- /tensorflow_serving/g3doc/images/predict_sequence_diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/g3doc/images/predict_sequence_diagram.png -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu_with_empty_saved_model_config/00000123/variables/variables.data-00000-of-00001: -------------------------------------------------------------------------------- 1 | ??@@@@@ -------------------------------------------------------------------------------- /tensorflow_serving/batching/testdata/matrix_half_plus_two/1/saved_model.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/batching/testdata/matrix_half_plus_two/1/saved_model.pb -------------------------------------------------------------------------------- /tensorflow_serving/session_bundle/testdata/half_plus_two/00000123/export.meta: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/session_bundle/testdata/half_plus_two/00000123/export.meta -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/bad_half_plus_two/00000123/export: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/bad_half_plus_two/00000123/export -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/batching_config.txt: -------------------------------------------------------------------------------- 1 | max_batch_size { value: 128 } 2 | batch_timeout_micros { value: 0 } 3 | max_enqueued_batches { value: 1000000 } 4 | num_batch_threads { value: 8 } 5 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/bad_half_plus_two/00000123/checkpoint: -------------------------------------------------------------------------------- 1 | model_checkpoint_path: "/tmp/bad_half_plus_two/00000123/export" 2 | all_model_checkpoint_paths: "/tmp/bad_half_plus_two/00000123/export" 3 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/bad_half_plus_two/00000123/export.meta: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/bad_half_plus_two/00000123/export.meta -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/tf_text_regression/01/fingerprint.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/tf_text_regression/01/fingerprint.pb -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/tf_text_regression/01/saved_model.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/tf_text_regression/01/saved_model.pb -------------------------------------------------------------------------------- /tensorflow_serving/session_bundle/testdata/half_plus_two/00000123/export-00000-of-00001: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/session_bundle/testdata/half_plus_two/00000123/export-00000-of-00001 -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/tf_text_regression/01/keras_metadata.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/tf_text_regression/01/keras_metadata.pb -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/parse_example_tflite/00000123/model.tflite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/parse_example_tflite/00000123/model.tflite -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_counter/00000123/saved_model.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_counter/00000123/saved_model.pb -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/mobilenet_v1_quant_tflite/00000123/model.tflite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/mobilenet_v1_quant_tflite/00000123/model.tflite -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/tf_text_regression/01/variables/variables.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/tf_text_regression/01/variables/variables.index -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_three/00000123/saved_model.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_three/00000123/saved_model.pb -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu/00000123/saved_model.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu/00000123/saved_model.pb -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_gpu/00000123/saved_model.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_gpu/00000123/saved_model.pb -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_mkl/00000123/saved_model.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_mkl/00000123/saved_model.pb -------------------------------------------------------------------------------- /tensorflow_serving/tools/docker/README.md: -------------------------------------------------------------------------------- 1 | Files for using the [Docker](http://www.docker.com) container system. 2 | Please see [Docker instructions](https://github.com/tensorflow/serving/blob/master/tensorflow_serving/g3doc/docker.md) 3 | for more info. 4 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_counter/00000123/variables/variables.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_counter/00000123/variables/variables.index -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_mlmd/00000123/saved_model.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_mlmd/00000123/saved_model.pb -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_tflite/00000123/model.tflite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_tflite/00000123/model.tflite -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_gpu_trt/00000123/saved_model.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_gpu_trt/00000123/saved_model.pb -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_tf2_cpu/00000123/fingerprint.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_tf2_cpu/00000123/fingerprint.pb -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_tf2_cpu/00000123/saved_model.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_tf2_cpu/00000123/saved_model.pb -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_2_versions/00000123/saved_model.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_2_versions/00000123/saved_model.pb -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_2_versions/00000124/saved_model.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_2_versions/00000124/saved_model.pb -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_three/00000123/variables/variables.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_three/00000123/variables/variables.index -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/tf_text_regression/01/variables/variables.data-00000-of-00001: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/tf_text_regression/01/variables/variables.data-00000-of-00001 -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu/00000123/variables/variables.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu/00000123/variables/variables.index -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_gpu/00000123/variables/variables.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_gpu/00000123/variables/variables.index -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_mkl/00000123/variables/variables.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_mkl/00000123/variables/variables.index -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_mlmd/00000123/variables/variables.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_mlmd/00000123/variables/variables.index -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_gpu_trt/00000123/variables/variables.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_gpu_trt/00000123/variables/variables.index -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_tf2_cpu/00000123/variables/variables.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_tf2_cpu/00000123/variables/variables.index -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_tflite_with_sigdef/00000123/model.tflite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_tflite_with_sigdef/00000123/model.tflite -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_2_versions/00000123/variables/variables.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_2_versions/00000123/variables/variables.index -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_2_versions/00000124/variables/variables.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_2_versions/00000124/variables/variables.index -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu_with_saved_model_config/00000123/saved_model.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu_with_saved_model_config/00000123/saved_model.pb -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_mkl/00000123/variables/variables.data-00000-of-00001: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_mkl/00000123/variables/variables.data-00000-of-00001 -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu_with_empty_saved_model_config/00000123/saved_model.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu_with_empty_saved_model_config/00000123/saved_model.pb -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_tf2_cpu/00000123/variables/variables.data-00000-of-00001: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_tf2_cpu/00000123/variables/variables.data-00000-of-00001 -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu_with_saved_model_config/00000123/variables/variables.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu_with_saved_model_config/00000123/variables/variables.index -------------------------------------------------------------------------------- /tensorflow_serving/core/test_util/fake_loader_source_adapter.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving.test_util; 4 | 5 | // Config proto for FakeLoaderSourceAdapter. 6 | message FakeLoaderSourceAdapterConfig { 7 | // FakeLoaderSourceAdapter's 'suffix' ctor parameter. 8 | string suffix = 1; 9 | } 10 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu_with_empty_saved_model_config/00000123/variables/variables.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tensorflow/serving/HEAD/tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu_with_empty_saved_model_config/00000123/variables/variables.index -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | /bazel-bin 3 | /bazel-ci_build-cache 4 | /bazel-genfiles 5 | /bazel-out 6 | /bazel-serving 7 | /bazel-tensorflow 8 | /bazel-tensorflow_serving 9 | /bazel-testlogs 10 | /bazel-tf 11 | /bazel-workspace 12 | /third_party/py/numpy/numpy_include 13 | /util/python/python_include 14 | /util/python/python_lib 15 | -------------------------------------------------------------------------------- /third_party/darts_clone/BUILD: -------------------------------------------------------------------------------- 1 | """Darts-clone is a clone of Darts (Double-ARray Trie System).""" 2 | 3 | package(default_visibility = ["//visibility:public"]) 4 | 5 | licenses(["notice"]) 6 | 7 | exports_files(["LICENSE"]) 8 | 9 | cc_library( 10 | name = "darts_clone", 11 | hdrs = [ 12 | "include/darts.h", 13 | ], 14 | ) 15 | -------------------------------------------------------------------------------- /tensorflow_serving/util/net_http/client/test_client/public/README.md: -------------------------------------------------------------------------------- 1 | The client library is still under development, and has yet to be finalized. 2 | 3 | It should be primarily used for writing tests for users of the 4 | ServerRequestInterface and HTTPServerInterface APIs to verify basic 5 | functionality, and the current state should be considered experimental. 6 | -------------------------------------------------------------------------------- /tensorflow_serving/util/net_http/client/test_client/internal/README.md: -------------------------------------------------------------------------------- 1 | The client library is still under development, and has yet to be finalized. 2 | 3 | It should be primarily used for writing tests for users of the 4 | ServerRequestInterface and HTTPServerInterface APIs to verify basic 5 | functionality, and the current state should be considered experimental. 6 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | # This is the official list of TensorFlow Serving authors for copyright purposes. 2 | # This file is distinct from the CONTRIBUTORS files. 3 | # See the latter for an explanation. 4 | 5 | # Names should be added to this file as: 6 | # Name or Organization 7 | # The email address is not required for organizations. 8 | 9 | Google Inc. 10 | -------------------------------------------------------------------------------- /tensorflow_serving/util/net_http/README.md: -------------------------------------------------------------------------------- 1 | A (truly) lightweight OSS HTTP Server 2 | ===================================== 3 | 4 | Design and implementation started in April 2018, within the TF serving code base. 5 | 6 | APIs are subject to change. 7 | 8 | Questions? 9 | ---------- 10 | 11 | If you have any questions, please send them to [web|awk]@google.com 12 | -------------------------------------------------------------------------------- /tensorflow_serving/model_servers/test_util/storage_path_error_injecting_source_adapter.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving.test_util; 4 | 5 | // Config proto for StoragePathErrorInjectingSourceAdapter. 6 | message StoragePathErrorInjectingSourceAdapterConfig { 7 | // The error message the adapter emits. 8 | string error_message = 1; 9 | } 10 | -------------------------------------------------------------------------------- /third_party/rapidjson/BUILD: -------------------------------------------------------------------------------- 1 | # RapidJSON (rapidjson.org) library. 2 | # from https://github.com/Tencent/rapidjson 3 | 4 | package( 5 | default_visibility = ["//visibility:public"], 6 | ) 7 | 8 | licenses(["notice"]) # BSD/MIT. 9 | 10 | cc_library( 11 | name = "rapidjson", 12 | hdrs = glob(["include/rapidjson/**/*.h"]), 13 | includes = ["include"], 14 | ) 15 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/saved_model_half_plus_two_cpu_with_saved_model_config/00000123/assets.extra/saved_model_config.pb: -------------------------------------------------------------------------------- 1 | 2 | w 3 | P 4 | $ 5 | placeholder_model_name 6 | model_name( 7 | placeholder_model_nametarget_address#! 8 | placeholder_model_named"I 9 | G 10 | 4type.googleapis.com/tensorflow.tfrt_stub.TestConfig1 11 | test config 1 -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/good_model_config.txt: -------------------------------------------------------------------------------- 1 | model_config_list: { 2 | config: { 3 | name: "half_plus_two", 4 | base_path: "${TEST_HALF_PLUS_TWO_DIR}", 5 | model_platform: "tensorflow" 6 | }, 7 | config: { 8 | name: "half_plus_three", 9 | base_path: "${TEST_HALF_PLUS_THREE_DIR}", 10 | model_platform: "tensorflow" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /tensorflow_serving/config/log_collector_config.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving; 4 | option cc_enable_arenas = true; 5 | 6 | message LogCollectorConfig { 7 | // Identifies the type of the LogCollector we will use to collect these logs. 8 | string type = 1; 9 | 10 | // The prefix to use for the filenames of the logs. 11 | string filename_prefix = 2; 12 | } 13 | -------------------------------------------------------------------------------- /tensorflow_serving/util/net_http/compression/README.md: -------------------------------------------------------------------------------- 1 | Compression support 2 | =================== 3 | 4 | This package provides C++ wrappers for compression libraries such as gzip, br. 5 | 6 | APIs are subject to change but usage outside net_http is expected. 7 | 8 | gzip_zlib.h 9 | --------------------- 10 | 11 | Minimum APIs and implementation to support gzip Content-Encoding via zlib. 12 | -------------------------------------------------------------------------------- /tensorflow_serving/batching/testdata/BUILD: -------------------------------------------------------------------------------- 1 | # Description: Tensorflow Serving batching test data. 2 | 3 | package( 4 | default_visibility = ["//tensorflow_serving:internal"], 5 | features = ["-layering_check"], 6 | ) 7 | 8 | licenses(["notice"]) 9 | 10 | filegroup( 11 | name = "matrix_half_plus_two", 12 | srcs = glob( 13 | ["matrix_half_plus_two/**/*"], 14 | ), 15 | ) 16 | -------------------------------------------------------------------------------- /tensorflow_serving/sources/storage_path/static_storage_path_source.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving; 4 | 5 | // Config proto for StaticStoragePathSource. 6 | message StaticStoragePathSourceConfig { 7 | // The single servable name, version number and path to supply statically. 8 | string servable_name = 1; 9 | int64 version_num = 2; 10 | string version_path = 3; 11 | } 12 | -------------------------------------------------------------------------------- /tensorflow_serving/util/class_registration_test.proto: -------------------------------------------------------------------------------- 1 | // Proto messages used by class_registration_test.cc. 2 | 3 | syntax = "proto3"; 4 | 5 | import "google/protobuf/any.proto"; 6 | 7 | package tensorflow.serving; 8 | 9 | message Config1 { 10 | string string_field = 1; 11 | } 12 | 13 | message Config2 { 14 | string string_field = 1; 15 | } 16 | 17 | message MessageWithAny { 18 | google.protobuf.Any any_field = 1; 19 | } 20 | -------------------------------------------------------------------------------- /tensorflow_serving/tensorflow_version.bzl: -------------------------------------------------------------------------------- 1 | """ 2 | Module for build utilities to distiguish different tensorflow versions. 3 | """ 4 | 5 | load("@org_tensorflow//tensorflow:tensorflow.bzl", "VERSION_MAJOR") 6 | 7 | def if_v2(a): 8 | if VERSION_MAJOR == "2": 9 | return a 10 | else: 11 | return [] 12 | 13 | def if_not_v2(a): 14 | if VERSION_MAJOR == "2": 15 | return [] 16 | else: 17 | return a 18 | -------------------------------------------------------------------------------- /tensorflow_serving/util/net_http/internal/testing/BUILD: -------------------------------------------------------------------------------- 1 | # Description: net_http/internal/testing 2 | 3 | load("@rules_cc//cc:cc_binary.bzl", "cc_binary") 4 | 5 | package(default_visibility = ["//visibility:private"]) 6 | 7 | licenses(["notice"]) 8 | 9 | cc_binary( 10 | name = "net_logging_example", 11 | srcs = ["net_logging_example.cc"], 12 | deps = [ 13 | "//tensorflow_serving/util/net_http/internal:net_logging", 14 | ], 15 | ) 16 | -------------------------------------------------------------------------------- /tensorflow_serving/config/ssl_config.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving; 4 | option cc_enable_arenas = true; 5 | 6 | // Configuration for a secure gRPC channel 7 | message SSLConfig { 8 | // private server key for SSL 9 | string server_key = 1; 10 | // public server certificate 11 | string server_cert = 2; 12 | // custom certificate authority 13 | string custom_ca = 3; 14 | // valid client certificate required ? 15 | bool client_verify = 4; 16 | }; 17 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/hashmap/hashmap_source_adapter.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving; 4 | 5 | // Config proto for HashmapSourceAdapter. 6 | message HashmapSourceAdapterConfig { 7 | // The format used by the file containing a serialized hashmap. 8 | enum Format { 9 | // A simple kind of CSV text file of the form: 10 | // key0,value0\n 11 | // key1,value1\n 12 | // ... 13 | SIMPLE_CSV = 0; 14 | } 15 | Format format = 1; 16 | } 17 | -------------------------------------------------------------------------------- /tensorflow_serving/util/net_http/client/test_client/testing/BUILD: -------------------------------------------------------------------------------- 1 | # Description: net_http/client/test_client/testing 2 | 3 | load("@rules_cc//cc:cc_binary.bzl", "cc_binary") 4 | 5 | package(default_visibility = ["//visibility:private"]) 6 | 7 | licenses(["notice"]) 8 | 9 | cc_binary( 10 | name = "evhttp_echo_client", 11 | srcs = ["evhttp_echo_client.cc"], 12 | deps = [ 13 | "//tensorflow_serving/util/net_http/client/test_client/internal:evhttp_client", 14 | ], 15 | ) 16 | -------------------------------------------------------------------------------- /tensorflow_serving/apis/internal/BUILD: -------------------------------------------------------------------------------- 1 | # Internal implementation details of serving APIs. 2 | 3 | load("//tensorflow_serving:serving.bzl", "serving_proto_library") 4 | 5 | package(features = ["-layering_check"]) 6 | 7 | licenses(["notice"]) 8 | 9 | serving_proto_library( 10 | name = "serialized_input_proto", 11 | srcs = ["serialized_input.proto"], 12 | visibility = [ 13 | "//tensorflow_serving:internal", 14 | "@org_tensorflow//tensorflow_ranking/google:__pkg__", 15 | ], 16 | deps = [ 17 | ], 18 | ) 19 | -------------------------------------------------------------------------------- /third_party/tensorflow/tensorflow.patch: -------------------------------------------------------------------------------- 1 | diff --git a/tensorflow/tools/toolchains/python/python_repo.bzl b/tensorflow/tools/toolchains/python/python_repo.bzl 2 | index 47fe64d7b7b..a01a1f19c8b 100644 3 | --- a/tensorflow/tools/toolchains/python/python_repo.bzl 4 | +++ b/tensorflow/tools/toolchains/python/python_repo.bzl 5 | @@ -21,6 +21,7 @@ TF_PYTHON_VERSION = "{}" 6 | HERMETIC_PYTHON_VERSION = "{}" 7 | WHEEL_NAME = "{}" 8 | WHEEL_COLLAB = "{}" 9 | +USE_PYWRAP_RULES = "False" 10 | """ 11 | 12 | def _python_repository_impl(repository_ctx): 13 | -------------------------------------------------------------------------------- /tensorflow_serving/apis/status.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving; 4 | 5 | import "tensorflow/core/protobuf/error_codes.proto"; 6 | 7 | option cc_enable_arenas = true; 8 | 9 | // Status that corresponds to Status in 10 | // third_party/tensorflow/core/lib/core/status.h. 11 | message StatusProto { 12 | // Error code. 13 | error.Code error_code = 1 [json_name = "error_code"]; 14 | 15 | // Error message. Will only be set if an error was encountered. 16 | string error_message = 2 [json_name = "error_message"]; 17 | } 18 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/saved_model_bundle_source_adapter.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | import "tensorflow_serving/servables/tensorflow/session_bundle_config.proto"; 4 | 5 | package tensorflow.serving; 6 | 7 | // Config proto for SavedModelBundleSourceAdapter. 8 | message SavedModelBundleSourceAdapterConfig { 9 | // A SessionBundleConfig. 10 | // FOR INTERNAL USE ONLY DURING TRANSITION TO SAVED_MODEL. WILL BE DEPRECATED. 11 | // TODO(b/32248363): Replace this field with the "real" field(s). 12 | SessionBundleConfig legacy_config = 1000; 13 | } 14 | -------------------------------------------------------------------------------- /tensorflow_serving/config/monitoring_config.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving; 4 | option cc_enable_arenas = true; 5 | 6 | // Configuration for Prometheus monitoring. 7 | message PrometheusConfig { 8 | // Whether to expose Prometheus metrics. 9 | bool enable = 1; 10 | 11 | // The endpoint to expose Prometheus metrics. 12 | // If not specified, PrometheusExporter::kPrometheusPath value is used. 13 | string path = 2; 14 | } 15 | 16 | // Configuration for monitoring. 17 | message MonitoringConfig { 18 | PrometheusConfig prometheus_config = 1; 19 | } 20 | -------------------------------------------------------------------------------- /tensorflow_serving/util/net_http/server/testing/BUILD: -------------------------------------------------------------------------------- 1 | # Description: net_http/server/testing 2 | 3 | load("@rules_cc//cc:cc_binary.bzl", "cc_binary") 4 | 5 | package(default_visibility = ["//visibility:private"]) 6 | 7 | licenses(["notice"]) 8 | 9 | cc_binary( 10 | name = "evhttp_echo_server", 11 | srcs = ["evhttp_echo_server.cc"], 12 | deps = [ 13 | "//tensorflow_serving/util/net_http/server/public:http_server", 14 | "//tensorflow_serving/util/net_http/server/public:http_server_api", 15 | "@com_google_absl//absl/memory", 16 | "@com_google_absl//absl/strings", 17 | ], 18 | ) 19 | -------------------------------------------------------------------------------- /third_party/boost/BUILD: -------------------------------------------------------------------------------- 1 | # Build Boost library. 2 | # https://github.com/boostorg/boost 3 | 4 | package(default_visibility = ["//visibility:public"]) 5 | 6 | licenses(["notice"]) # Boost Software License 7 | 8 | cc_library( 9 | name = "boost", 10 | srcs = glob([ 11 | "libs/*/include/**/*.hpp", 12 | "libs/*/include/**/*.h", 13 | "libs/*/*/include/**/*.hpp", 14 | "libs/*/*/include/**/*.h", 15 | ]), 16 | includes = glob( 17 | [ 18 | "libs/*/include", 19 | "libs/*/*/include", 20 | ], 21 | exclude_directories = 0, 22 | ), 23 | ) 24 | -------------------------------------------------------------------------------- /tensorflow_serving/apis/model_management.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving; 4 | 5 | import "tensorflow_serving/apis/status.proto"; 6 | import "tensorflow_serving/config/model_server_config.proto"; 7 | 8 | option cc_enable_arenas = true; 9 | 10 | message ReloadConfigRequest { 11 | ModelServerConfig config = 1; 12 | repeated string metric_names = 2; 13 | } 14 | 15 | message ReloadConfigResponse { 16 | StatusProto status = 1; 17 | repeated Metric metric = 2; 18 | } 19 | 20 | message Metric { 21 | string name = 1; 22 | oneof value_increase { 23 | int64 int64_value_increase = 2; 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /tensorflow_serving/util/test_util/BUILD: -------------------------------------------------------------------------------- 1 | # Description: testing utils for Tensorflow Serving utils. 2 | 3 | load("@rules_cc//cc:cc_library.bzl", "cc_library") 4 | 5 | package( 6 | default_visibility = [ 7 | "//tensorflow_serving:internal", 8 | ], 9 | features = ["-layering_check"], 10 | ) 11 | 12 | licenses(["notice"]) 13 | 14 | cc_library( 15 | name = "mock_file_probing_env", 16 | testonly = 1, 17 | hdrs = ["mock_file_probing_env.h"], 18 | deps = [ 19 | "//tensorflow_serving/util:file_probing_env", 20 | "@com_google_googletest//:gtest", 21 | "@org_tensorflow//tensorflow/core:lib", 22 | ], 23 | ) 24 | -------------------------------------------------------------------------------- /tensorflow_serving/util/net_http/socket/testing/BUILD: -------------------------------------------------------------------------------- 1 | # Description: net_http/socket testing utils 2 | 3 | load("@rules_cc//cc:cc_binary.bzl", "cc_binary") 4 | 5 | package(default_visibility = ["//visibility:private"]) 6 | 7 | licenses(["notice"]) 8 | 9 | cc_binary( 10 | name = "ev_print_req_server", 11 | srcs = ["ev_print_req_server.cc"], 12 | deps = [ 13 | "@com_github_libevent_libevent//:libevent", 14 | "@com_google_absl//absl/strings", 15 | ], 16 | ) 17 | 18 | cc_binary( 19 | name = "ev_fetch_client", 20 | srcs = ["ev_fetch_client.cc"], 21 | deps = [ 22 | "@com_github_libevent_libevent//:libevent", 23 | ], 24 | ) 25 | -------------------------------------------------------------------------------- /tensorflow_serving/config/platform_config.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving; 4 | option cc_enable_arenas = true; 5 | 6 | import "google/protobuf/any.proto"; 7 | 8 | // Configuration for a servable platform e.g. tensorflow or other ML systems. 9 | message PlatformConfig { 10 | // The config proto for a SourceAdapter in the StoragePathSourceAdapter 11 | // registry. 12 | google.protobuf.Any source_adapter_config = 1; 13 | }; 14 | 15 | message PlatformConfigMap { 16 | // A map from a platform name to a platform config. The platform name is used 17 | // in ModelConfig.model_platform. 18 | map platform_configs = 1; 19 | }; 20 | -------------------------------------------------------------------------------- /tensorflow_serving/BUILD: -------------------------------------------------------------------------------- 1 | # Description: Tensorflow Serving. 2 | 3 | package(default_visibility = ["//tensorflow_serving:internal"]) 4 | 5 | licenses(["notice"]) 6 | 7 | exports_files(["LICENSE"]) 8 | 9 | # open source marker; do not remove 10 | 11 | package_group( 12 | name = "internal", 13 | packages = [ 14 | "//learning/gemini/deployment/disaggregation/...", 15 | "//tensorflow_serving/...", 16 | ], 17 | ) 18 | 19 | filegroup( 20 | name = "all_files", 21 | srcs = glob( 22 | ["**/*"], 23 | exclude = [ 24 | "**/METADATA", 25 | "**/OWNERS", 26 | "g3doc/sitemap.md", 27 | ], 28 | ), 29 | ) 30 | -------------------------------------------------------------------------------- /tensorflow_serving/util/net_http/internal/BUILD: -------------------------------------------------------------------------------- 1 | # Description: shared code for net_http 2 | 3 | load("@rules_cc//cc:cc_library.bzl", "cc_library") 4 | 5 | package(default_visibility = [ 6 | "//tensorflow_serving/util/net_http:__subpackages__", 7 | ]) 8 | 9 | licenses(["notice"]) 10 | 11 | cc_library( 12 | name = "fixed_thread_pool", 13 | testonly = 1, 14 | hdrs = ["fixed_thread_pool.h"], 15 | deps = [ 16 | "@com_google_absl//absl/base:core_headers", 17 | "@com_google_absl//absl/synchronization", 18 | ], 19 | ) 20 | 21 | cc_library( 22 | name = "net_logging", 23 | srcs = ["net_logging.cc"], 24 | hdrs = ["net_logging.h"], 25 | deps = [ 26 | "@com_google_absl//absl/base:config", 27 | "@com_google_absl//absl/base:core_headers", 28 | "@com_google_absl//absl/base:log_severity", 29 | ], 30 | ) 31 | -------------------------------------------------------------------------------- /tensorflow_serving/apis/logging.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving; 4 | 5 | import "tensorflow_serving/apis/model.proto"; 6 | import "tensorflow_serving/config/logging_config.proto"; 7 | 8 | option cc_enable_arenas = true; 9 | 10 | // Metadata logged along with the request logs. 11 | message LogMetadata { 12 | ModelSpec model_spec = 1; 13 | SamplingConfig sampling_config = 2; 14 | // List of tags used to load the relevant MetaGraphDef from SavedModel. 15 | repeated string saved_model_tags = 3; 16 | int64 timestamp_secs = 4; // Seconds since epoch. 17 | string dc = 5; // Datacenter where the request was logged. 18 | string request_origin = 6; // Request origin identifier. 19 | string request_criticality = 7; // Request QoS. 20 | // An aggregation key that can be used for anonymization. 21 | int64 safe_aggregation_privacy_key = 8; 22 | } 23 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/test_util/fake_thread_pool_factory.proto: -------------------------------------------------------------------------------- 1 | /* Copyright 2020 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | syntax = "proto3"; 17 | 18 | package tensorflow.serving.test_util; 19 | 20 | // Config proto for FakeThreadPoolFactory. 21 | message FakeThreadPoolFactoryConfig {} 22 | -------------------------------------------------------------------------------- /tensorflow_serving/g3doc/_toc.yaml: -------------------------------------------------------------------------------- 1 | toc: 2 | - title: TensorFlow Serving with Docker 3 | path: /tfx/serving/docker 4 | - title: Installation 5 | path: /tfx/serving/setup 6 | - title: Serve a TensorFlow model 7 | path: /tfx/serving/serving_basic 8 | - title: Architecture 9 | path: /tfx/serving/architecture 10 | - title: Advanced model server configuration 11 | path: /tfx/serving/serving_config 12 | - title: Build a TensorFlow ModelServer 13 | path: /tfx/serving/serving_advanced 14 | - title: Use TensorFlow Serving with Kubernetes 15 | path: /tfx/serving/serving_kubernetes 16 | - title: Create a new kind of servable 17 | path: /tfx/serving/custom_servable 18 | - title: Create a module that discovers new servable paths 19 | path: /tfx/serving/custom_source 20 | - title: Serving TensorFlow models with custom ops 21 | path: /tfx/serving/custom_op 22 | - title: SignatureDefs in SavedModel for TensorFlow Serving 23 | path: /tfx/serving/signature_defs 24 | -------------------------------------------------------------------------------- /tensorflow_serving/model_servers/version.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2018 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/model_servers/version.h" 17 | 18 | const char kTFS_SCM_Revision[] = TF_MODELSERVER_VERSION_STRING; 19 | 20 | extern "C" { 21 | const char* TF_Serving_Version() { return kTFS_SCM_Revision; } 22 | } 23 | -------------------------------------------------------------------------------- /tensorflow_serving/util/net_http/client/test_client/internal/BUILD: -------------------------------------------------------------------------------- 1 | # Description: a lightweight http client 2 | 3 | load("@rules_cc//cc:cc_library.bzl", "cc_library") 4 | 5 | package(default_visibility = [ 6 | "//tensorflow_serving/util/net_http:__subpackages__", 7 | ]) 8 | 9 | licenses(["notice"]) 10 | 11 | cc_library( 12 | name = "evhttp_client", 13 | srcs = [ 14 | "evhttp_connection.cc", 15 | ], 16 | hdrs = [ 17 | "evhttp_connection.h", 18 | ], 19 | deps = [ 20 | "//tensorflow_serving/util/net_http/client/test_client/public:http_client_api", 21 | "//tensorflow_serving/util/net_http/internal:net_logging", 22 | "//tensorflow_serving/util/net_http/public:shared_files", 23 | "//tensorflow_serving/util/net_http/server/public:http_server_api", 24 | "@com_github_libevent_libevent//:libevent", 25 | "@com_google_absl//absl/strings", 26 | "@com_google_absl//absl/synchronization", 27 | ], 28 | ) 29 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/mobilenet_v1_quant_tflite.README: -------------------------------------------------------------------------------- 1 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================= 15 | 16 | This model is used to test model serving on TFLite session. 17 | It's extracted from http://download.tensorflow.org/models/mobilenet_v1_2018_08_02/mobilenet_v1_1.0_224_quant.tgz 18 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/tf_text_regression.README: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | This model is used to test the integration with TF Text, and is updated using 17 | this script: 18 | https://github.com/tensorflow/text/blob/master/oss_scripts/model_server/save_models.py 19 | 20 | -------------------------------------------------------------------------------- /tensorflow_serving/example/BUILD: -------------------------------------------------------------------------------- 1 | # Description: Tensorflow Serving examples. 2 | 3 | load("@rules_cc//cc:cc_binary.bzl", "cc_binary") 4 | 5 | package( 6 | default_visibility = ["//tensorflow_serving:internal"], 7 | features = ["no_layering_check"], 8 | ) 9 | 10 | licenses(["notice"]) 11 | 12 | filegroup( 13 | name = "all_files", 14 | srcs = glob( 15 | ["**/*"], 16 | exclude = [ 17 | "**/METADATA", 18 | "**/OWNERS", 19 | ], 20 | ), 21 | ) 22 | 23 | cc_binary( 24 | name = "resnet_client_cc", 25 | srcs = [ 26 | "resnet_client.cc", 27 | ], 28 | deps = [ 29 | "//tensorflow_serving/apis:prediction_service_cc_proto", 30 | "@com_github_grpc_grpc//:grpc++", 31 | "@com_google_protobuf//:protobuf_lite", 32 | "@org_tensorflow//tensorflow/core:framework", 33 | "@org_tensorflow//tensorflow/core:lib", 34 | "@org_tensorflow//tensorflow/core/platform:jpeg", 35 | ], 36 | ) 37 | -------------------------------------------------------------------------------- /tensorflow_serving/apis/regression.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving; 4 | 5 | import "tensorflow_serving/apis/input.proto"; 6 | import "tensorflow_serving/apis/model.proto"; 7 | 8 | option cc_enable_arenas = true; 9 | 10 | // Regression result for a single item (tensorflow.Example). 11 | message Regression { 12 | float value = 1; 13 | } 14 | 15 | // Contains one result per input example, in the same order as the input in 16 | // RegressionRequest. 17 | message RegressionResult { 18 | repeated Regression regressions = 1; 19 | } 20 | 21 | // RPC interfaces. 22 | 23 | message RegressionRequest { 24 | // Model Specification. If version is not specified, will use the latest 25 | // (numerical) version. 26 | ModelSpec model_spec = 1; 27 | 28 | // Input data. 29 | tensorflow.serving.Input input = 2; 30 | } 31 | 32 | message RegressionResponse { 33 | // Effective Model Specification used for regression. 34 | ModelSpec model_spec = 2; 35 | 36 | RegressionResult result = 1; 37 | } 38 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/testdata/parse_example_tflite.README: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | This model is used to test TFLite parse example operations for tflite_session 17 | and is updated using 18 | ```sh 19 | bazel run -c opt parse_example_tflite_with_string 20 | cp /tmp/parse_example_tflite parse_example_tflite/00000123/model.tflite 21 | ``` 22 | 23 | -------------------------------------------------------------------------------- /tensorflow_serving/util/hash.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/util/hash.h" 17 | 18 | namespace tensorflow { 19 | namespace serving { 20 | 21 | uint64_t HashCombine(const uint64_t hash1, const uint64_t hash2) { 22 | return hash1 ^ (hash2 + 0x9e3779b97f4a7800 + (hash1 << 10) + (hash1 >> 4)); 23 | } 24 | 25 | } // namespace serving 26 | } // namespace tensorflow 27 | -------------------------------------------------------------------------------- /tensorflow_serving/apis/model_service.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | option cc_enable_arenas = true; 4 | 5 | import "tensorflow_serving/apis/get_model_status.proto"; 6 | import "tensorflow_serving/apis/model_management.proto"; 7 | 8 | package tensorflow.serving; 9 | 10 | // ModelService provides methods to query and update the state of the server, 11 | // e.g. which models/versions are being served. 12 | service ModelService { 13 | // Gets status of model. If the ModelSpec in the request does not specify 14 | // version, information about all versions of the model will be returned. If 15 | // the ModelSpec in the request does specify a version, the status of only 16 | // that version will be returned. 17 | rpc GetModelStatus(GetModelStatusRequest) returns (GetModelStatusResponse); 18 | 19 | // Reloads the set of served models. The new config supersedes the old one, 20 | // so if a model is omitted from the new config it will be unloaded and no 21 | // longer served. 22 | rpc HandleReloadConfigRequest(ReloadConfigRequest) 23 | returns (ReloadConfigResponse); 24 | } 25 | -------------------------------------------------------------------------------- /tensorflow_serving/config/logging_config.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving; 4 | 5 | import "google/protobuf/any.proto"; 6 | import "tensorflow_serving/config/log_collector_config.proto"; 7 | 8 | option cc_enable_arenas = true; 9 | 10 | message SamplingConfig { 11 | // Requests will be logged uniformly at random with this probability. 12 | // Valid range: [0, 1.0]. 13 | double sampling_rate = 1; 14 | 15 | // Attributes of requests that can be optionally sampled. 16 | // Note: Enabling more attributes will increase logging storage requirements. 17 | enum Attributes { 18 | ATTR_DEFAULT = 0x0; 19 | ATTR_REQUEST_ORIGIN = 0x1; 20 | ATTR_REQUEST_CRITICALITY = 0x2; 21 | } 22 | // Bitwise OR of above attributes 23 | int32 attributes = 2; 24 | } 25 | 26 | // Configuration for logging query/responses. 27 | message LoggingConfig { 28 | LogCollectorConfig log_collector_config = 1; 29 | SamplingConfig sampling_config = 2; 30 | // Additional logging config that can be processed by the logger. 31 | google.protobuf.Any custom_logging_config = 3; 32 | } 33 | -------------------------------------------------------------------------------- /tensorflow_serving/util/net_http/client/test_client/public/BUILD: -------------------------------------------------------------------------------- 1 | # Description: APIs for experimental testing of net_http server instances 2 | 3 | load("@rules_cc//cc:cc_library.bzl", "cc_library") 4 | 5 | package(default_visibility = [ 6 | ":http_client_users", 7 | "//tensorflow_serving/util/net_http:__subpackages__", 8 | ]) 9 | 10 | package_group( 11 | name = "http_client_users", 12 | packages = ["//..."], 13 | ) 14 | 15 | licenses(["notice"]) 16 | 17 | cc_library( 18 | name = "http_client_api", 19 | srcs = [], 20 | hdrs = [ 21 | "httpclient_interface.h", 22 | ], 23 | deps = [ 24 | "//tensorflow_serving/util/net_http/public:shared_files", 25 | "//tensorflow_serving/util/net_http/server/public:http_server_api", 26 | ], 27 | ) 28 | 29 | cc_library( 30 | name = "http_client", 31 | hdrs = [ 32 | "httpclient.h", 33 | ], 34 | deps = [ 35 | ":http_client_api", 36 | "//tensorflow_serving/util/net_http/client/test_client/internal:evhttp_client", 37 | "@com_google_absl//absl/memory", 38 | ], 39 | ) 40 | -------------------------------------------------------------------------------- /tensorflow_serving/util/net_http/server/public/BUILD: -------------------------------------------------------------------------------- 1 | # Description: a lightweight http server and related utils to support Web clients 2 | 3 | load("@rules_cc//cc:cc_library.bzl", "cc_library") 4 | 5 | package(default_visibility = [ 6 | "//tensorflow_serving:internal", 7 | "//tensorflow_serving/util/net_http:__subpackages__", 8 | "//tensorflow_serving/util/net_http/public:http_server_clients", 9 | ]) 10 | 11 | licenses(["notice"]) 12 | 13 | cc_library( 14 | name = "http_server_api", 15 | hdrs = [ 16 | "httpserver_interface.h", 17 | "server_request_interface.h", 18 | ], 19 | deps = [ 20 | "//tensorflow_serving/util/net_http/public:shared_files", 21 | "@com_google_absl//absl/strings", 22 | "@com_google_absl//absl/time", 23 | ], 24 | ) 25 | 26 | cc_library( 27 | name = "http_server", 28 | hdrs = [ 29 | "httpserver.h", 30 | ], 31 | deps = [ 32 | ":http_server_api", 33 | "//tensorflow_serving/util/net_http/server/internal:evhttp_server", 34 | "@com_google_absl//absl/memory", 35 | ], 36 | ) 37 | -------------------------------------------------------------------------------- /tensorflow_serving/util/inline_executor.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/util/inline_executor.h" 17 | 18 | #include 19 | 20 | namespace tensorflow { 21 | namespace serving { 22 | 23 | InlineExecutor::InlineExecutor() {} 24 | 25 | InlineExecutor::~InlineExecutor() {} 26 | 27 | void InlineExecutor::Schedule(std::function fn) { fn(); } 28 | 29 | } // namespace serving 30 | } // namespace tensorflow 31 | -------------------------------------------------------------------------------- /tensorflow_serving/model_servers/model_platform_types.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_MODEL_SERVERS_MODEL_PLATFORM_TYPES_H_ 17 | #define TENSORFLOW_SERVING_MODEL_SERVERS_MODEL_PLATFORM_TYPES_H_ 18 | 19 | namespace tensorflow { 20 | namespace serving { 21 | 22 | constexpr char kTensorFlowModelPlatform[] = "tensorflow"; 23 | 24 | } // namespace serving 25 | } // namespace tensorflow 26 | 27 | #endif // TENSORFLOW_SERVING_MODEL_SERVERS_MODEL_PLATFORM_TYPES_H_ 28 | -------------------------------------------------------------------------------- /tensorflow_serving/util/hash.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_UTIL_HASH_H_ 17 | #define TENSORFLOW_SERVING_UTIL_HASH_H_ 18 | 19 | #include "tensorflow/core/platform/types.h" 20 | 21 | namespace tensorflow { 22 | namespace serving { 23 | 24 | // Combines 2 hashes and returns a 3rd one. 25 | uint64_t HashCombine(uint64_t hash1, uint64_t hash2); 26 | 27 | } // namespace serving 28 | } // namespace tensorflow 29 | 30 | #endif // TENSORFLOW_SERVING_UTIL_HASH_H_ 31 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/thread_pool_factory_config.proto: -------------------------------------------------------------------------------- 1 | /* Copyright 2020 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | syntax = "proto3"; 17 | 18 | package tensorflow.serving; 19 | 20 | import "google/protobuf/any.proto"; 21 | 22 | option cc_enable_arenas = true; 23 | 24 | // Configuration for a thread pool factory. 25 | message ThreadPoolFactoryConfig { 26 | // The config proto for a ThreadPoolFactory in the ThreadPoolFactory registry. 27 | google.protobuf.Any thread_pool_factory_config = 1; 28 | } 29 | -------------------------------------------------------------------------------- /tensorflow_serving/apis/get_model_metadata.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving; 4 | option cc_enable_arenas = true; 5 | 6 | import "google/protobuf/any.proto"; 7 | import "tensorflow/core/protobuf/meta_graph.proto"; 8 | import "tensorflow_serving/apis/model.proto"; 9 | 10 | // Message returned for "signature_def" field. 11 | message SignatureDefMap { 12 | map signature_def = 1; 13 | }; 14 | 15 | message GetModelMetadataRequest { 16 | // Model Specification indicating which model we are querying for metadata. 17 | // If version is not specified, will use the latest (numerical) version. 18 | ModelSpec model_spec = 1; 19 | // Metadata fields to get. Currently supported: "signature_def". 20 | repeated string metadata_field = 2; 21 | } 22 | 23 | message GetModelMetadataResponse { 24 | // Model Specification indicating which model this metadata belongs to. 25 | ModelSpec model_spec = 1; 26 | // Map of metadata field name to metadata field. The options for metadata 27 | // field name are listed in GetModelMetadataRequest. Currently supported: 28 | // "signature_def". 29 | map metadata = 2; 30 | } 31 | -------------------------------------------------------------------------------- /tensorflow_serving/util/net_http/compression/BUILD: -------------------------------------------------------------------------------- 1 | # Description: compression support libraries 2 | 3 | load("@rules_cc//cc:cc_library.bzl", "cc_library") 4 | load("@rules_cc//cc:cc_test.bzl", "cc_test") 5 | 6 | package(default_visibility = [ 7 | "//tensorflow_serving:internal", 8 | "//tensorflow_serving/util/net_http:__subpackages__", 9 | ]) 10 | 11 | licenses(["notice"]) 12 | 13 | # C++ lib based on zlib for gzip support 14 | cc_library( 15 | name = "gzip_zlib", 16 | srcs = [ 17 | "gzip_zlib.cc", 18 | ], 19 | hdrs = [ 20 | "gzip_zlib.h", 21 | ], 22 | deps = [ 23 | "//tensorflow_serving/util/net_http/internal:net_logging", 24 | "@com_google_absl//absl/base", 25 | "@com_google_absl//absl/base:core_headers", 26 | "@com_google_absl//absl/strings", 27 | "@zlib", 28 | ], 29 | ) 30 | 31 | cc_test( 32 | name = "gzip_zlib_test", 33 | size = "large", 34 | srcs = ["gzip_zlib_test.cc"], 35 | features = ["-layering_check"], 36 | deps = [ 37 | ":gzip_zlib", 38 | "//tensorflow_serving/core/test_util:test_main", 39 | "@com_google_absl//absl/strings", 40 | ], 41 | ) 42 | -------------------------------------------------------------------------------- /tensorflow_serving/experimental/tensorflow/ops/remote_predict/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Ops and modules related to RemotePredict. 16 | 17 | @@run 18 | """ 19 | from __future__ import absolute_import 20 | from __future__ import division 21 | from __future__ import print_function 22 | from tensorflow.python.util.all_util import remove_undocumented 23 | from tensorflow_serving.experimental.tensorflow.ops.remote_predict.python.ops.remote_predict_ops import run 24 | 25 | remove_undocumented(__name__) 26 | -------------------------------------------------------------------------------- /tensorflow_serving/session_bundle/manifest_proto.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2019 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_SESSION_BUNDLE_MANIFEST_PROTO_H_ 17 | #define TENSORFLOW_SERVING_SESSION_BUNDLE_MANIFEST_PROTO_H_ 18 | 19 | #include "tensorflow_serving/util/oss_or_google.h" 20 | 21 | #ifdef TENSORFLOW_SERVING_GOOGLE 22 | #include "tensorflow_serving/session_bundle/google/manifest.pb.h" 23 | #else 24 | #include "tensorflow_serving/session_bundle/oss/manifest.pb.h" 25 | #endif 26 | 27 | #endif // TENSORFLOW_SERVING_SESSION_BUNDLE_MANIFEST_PROTO_H_ 28 | -------------------------------------------------------------------------------- /tensorflow_serving/session_bundle/session_bundle.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2019 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_SESSION_BUNDLE_SESSION_BUNDLE_H_ 17 | #define TENSORFLOW_SERVING_SESSION_BUNDLE_SESSION_BUNDLE_H_ 18 | 19 | #include "tensorflow_serving/util/oss_or_google.h" 20 | 21 | #ifdef TENSORFLOW_SERVING_GOOGLE 22 | #include "tensorflow/contrib/session_bundle/session_bundle.h" 23 | #else 24 | #include "tensorflow_serving/session_bundle/oss/session_bundle.h" 25 | #endif 26 | 27 | #endif // TENSORFLOW_SERVING_SESSION_BUNDLE_SESSION_BUNDLE_H_ 28 | -------------------------------------------------------------------------------- /tensorflow_serving/util/inline_executor_test.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/util/inline_executor.h" 17 | 18 | #include 19 | 20 | namespace tensorflow { 21 | namespace serving { 22 | namespace { 23 | 24 | TEST(InlineExecutorTest, Executes) { 25 | InlineExecutor inline_executor; 26 | 27 | int total_calls = 0; 28 | inline_executor.Schedule([&]() { ++total_calls; }); 29 | EXPECT_EQ(1, total_calls); 30 | } 31 | 32 | } // namespace 33 | } // namespace serving 34 | } // namespace tensorflow 35 | -------------------------------------------------------------------------------- /tensorflow_serving/model_servers/prediction_service_util.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2022 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/model_servers/prediction_service_util.h" 17 | 18 | #include "grpc/grpc.h" 19 | 20 | namespace tensorflow { 21 | namespace serving { 22 | 23 | int DeadlineToTimeoutMillis(const gpr_timespec deadline) { 24 | return gpr_time_to_millis( 25 | gpr_time_sub(gpr_convert_clock_type(deadline, GPR_CLOCK_MONOTONIC), 26 | gpr_now(GPR_CLOCK_MONOTONIC))); 27 | } 28 | 29 | } // namespace serving 30 | } // namespace tensorflow 31 | -------------------------------------------------------------------------------- /tensorflow_serving/apis/prediction_service.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving; 4 | option cc_enable_arenas = true; 5 | 6 | import "tensorflow_serving/apis/classification.proto"; 7 | import "tensorflow_serving/apis/get_model_metadata.proto"; 8 | import "tensorflow_serving/apis/inference.proto"; 9 | import "tensorflow_serving/apis/predict.proto"; 10 | import "tensorflow_serving/apis/regression.proto"; 11 | 12 | // open source marker; do not remove 13 | // PredictionService provides access to machine-learned models loaded by 14 | // model_servers. 15 | service PredictionService { 16 | // Classify. 17 | rpc Classify(ClassificationRequest) returns (ClassificationResponse); 18 | 19 | // Regress. 20 | rpc Regress(RegressionRequest) returns (RegressionResponse); 21 | 22 | // Predict -- provides access to loaded TensorFlow model. 23 | rpc Predict(PredictRequest) returns (PredictResponse); 24 | 25 | // MultiInference API for multi-headed models. 26 | rpc MultiInference(MultiInferenceRequest) returns (MultiInferenceResponse); 27 | 28 | // GetModelMetadata - provides access to metadata for loaded models. 29 | rpc GetModelMetadata(GetModelMetadataRequest) 30 | returns (GetModelMetadataResponse); 31 | } 32 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/file_acl.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2023 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_FILE_ACL_H_ 17 | #define THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_FILE_ACL_H_ 18 | 19 | #include "tensorflow_serving/util/oss_or_google.h" 20 | 21 | #ifdef TENSORFLOW_SERVING_GOOGLE 22 | #include "tensorflow_serving/servables/tensorflow/google/file_acl.h" 23 | #else 24 | #include "tensorflow_serving/servables/tensorflow/oss/file_acl.h" 25 | #endif 26 | 27 | #endif // THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_FILE_ACL_H_ 28 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/run_options.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2020 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_RUN_OPTIONS_H_ 17 | #define THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_RUN_OPTIONS_H_ 18 | 19 | #include "tensorflow_serving/util/oss_or_google.h" 20 | 21 | #ifdef TENSORFLOW_SERVING_GOOGLE 22 | #include "tensorflow_serving/servables/tensorflow/google/run_options.h" 23 | #else 24 | #include "tensorflow_serving/servables/tensorflow/oss/run_options.h" 25 | #endif 26 | 27 | #endif // THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_RUN_OPTIONS_H_ 28 | -------------------------------------------------------------------------------- /tensorflow_serving/util/net_http/internal/testing/net_logging_example.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2019 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include 17 | #include 18 | 19 | #include "tensorflow_serving/util/net_http/internal/net_logging.h" 20 | 21 | int main(int argc, char** argv) { 22 | NET_LOG(INFO, "started!"); 23 | 24 | size_t size = 100; 25 | NET_LOG(ERROR, "read less than specified bytes : %zu", size); 26 | 27 | const char* url = "/url"; 28 | NET_LOG(WARNING, "%s: read less than specified bytes : %zu", url, size); 29 | 30 | NET_LOG(FATAL, "aborted!"); 31 | 32 | return 0; // unexpected 33 | } 34 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/oss/file_acl.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2023 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/servables/tensorflow/oss/file_acl.h" 17 | 18 | #include 19 | 20 | #include "absl/status/status.h" 21 | #include "tensorflow_serving/core/servable_id.h" 22 | 23 | namespace tensorflow { 24 | namespace serving { 25 | 26 | absl::Status RegisterModelRoot(const ServableId& servable_id, 27 | std::string_view root_path) { 28 | // Unimplemented 29 | return absl::OkStatus(); 30 | } 31 | 32 | } // namespace serving 33 | } // namespace tensorflow 34 | -------------------------------------------------------------------------------- /tensorflow_serving/model_servers/grpc_status_util.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2018 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_MODEL_SERVERS_GRPC_STATUS_UTIL_H_ 17 | #define TENSORFLOW_SERVING_MODEL_SERVERS_GRPC_STATUS_UTIL_H_ 18 | 19 | #include "grpcpp/support/status.h" 20 | #include "tensorflow/core/lib/core/status.h" 21 | 22 | namespace tensorflow { 23 | namespace serving { 24 | 25 | // Converts from tensorflow Status to GRPC Status. 26 | ::grpc::Status ToGRPCStatus(const ::tensorflow::Status& status); 27 | 28 | } // namespace serving 29 | } // namespace tensorflow 30 | 31 | #endif // TENSORFLOW_SERVING_MODEL_SERVERS_GRPC_STATUS_UTIL_H_ 32 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/resource_estimator.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2020 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_RESOURCE_ESTIMATOR_H_ 17 | #define THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_RESOURCE_ESTIMATOR_H_ 18 | 19 | #include "tensorflow_serving/util/oss_or_google.h" 20 | 21 | #ifdef TENSORFLOW_SERVING_GOOGLE 22 | #include "tensorflow_serving/servables/tensorflow/google/resource_estimator.h" 23 | #else 24 | #include "tensorflow_serving/servables/tensorflow/oss/resource_estimator.h" 25 | #endif 26 | 27 | #endif // THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_RESOURCE_ESTIMATOR_H_ 28 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/saved_model_config.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving; 4 | 5 | import "tensorflow/core/grappler/optimizers/inference/batch_op_rewriter.proto"; 6 | import "tensorflow/core/tfrt/graph_executor/config.proto"; 7 | import "tensorflow_serving/servables/tensorflow/remote_op_config_rewriter.proto"; 8 | 9 | option cc_enable_arenas = true; 10 | 11 | message SessionOverrides { 12 | optional tools.RemoteOpRemapConfig remote_op_remap_config = 1; 13 | optional BatchOpRewriteConfig batch_op_rewriter_config = 2; 14 | // Disable the entire meta optimizer (off by default). 15 | optional bool disable_meta_optimizer = 3; 16 | } 17 | 18 | message SavedModelConfig { 19 | // A select set of fields from SessionOptions which, at the model level, can 20 | // be used to override SessionOptions set for the entire processes. 21 | optional SessionOverrides session_overrides = 1; 22 | 23 | // Runtime specific configuration proto. They can be used to specify 24 | // environments for runtime. 25 | optional tensorflow.tfrt_stub.RuntimeConfigProto tfrt_runtime_config = 2; 26 | 27 | // A boolean field that indicates whether the model is critical, i.e., whether 28 | // the entire server cannot serve requests before this model has been loaded. 29 | bool critical = 3; 30 | } 31 | -------------------------------------------------------------------------------- /tensorflow_serving/apis/model.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving; 4 | 5 | import "google/protobuf/wrappers.proto"; 6 | 7 | option cc_enable_arenas = true; 8 | 9 | // Metadata for an inference request such as the model name and version. 10 | message ModelSpec { 11 | // Required servable name. 12 | string name = 1; 13 | 14 | // Optional choice of which version of the model to use. 15 | // 16 | // Expected to be left unset in the common case. Should be specified when 17 | // there is a strong version consistency requirement (e.g. when the model 18 | // signature changes across versions and requests need to be 19 | // version-specific). 20 | // 21 | // When left unspecified, the system will serve the best available version. 22 | // This is typically the latest version, though during version transitions, 23 | // notably when serving on a fleet of instances, may be either the previous or 24 | // new version. 25 | oneof version_choice { 26 | // Use this specific version number. 27 | google.protobuf.Int64Value version = 2; 28 | 29 | // Use the version associated with the given label. 30 | string version_label = 4; 31 | } 32 | 33 | // A named signature to evaluate. If unspecified, the default signature will 34 | // be used. 35 | string signature_name = 3; 36 | } 37 | -------------------------------------------------------------------------------- /tensorflow_serving/util/threadpool_executor.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/util/threadpool_executor.h" 17 | 18 | #include 19 | 20 | namespace tensorflow { 21 | namespace serving { 22 | 23 | ThreadPoolExecutor::ThreadPoolExecutor(Env* const env, const string& name, 24 | int num_threads) 25 | : thread_pool_(env, name, num_threads) {} 26 | 27 | ThreadPoolExecutor::~ThreadPoolExecutor() {} 28 | 29 | void ThreadPoolExecutor::Schedule(std::function fn) { 30 | thread_pool_.Schedule(fn); 31 | } 32 | 33 | } // namespace serving 34 | } // namespace tensorflow 35 | -------------------------------------------------------------------------------- /tensorflow_serving/experimental/tensorflow/ops/remote_predict/kernels/remote_predict_op_kernel.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2020 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | #include "tensorflow_serving/experimental/tensorflow/ops/remote_predict/kernels/remote_predict_op_kernel.h" 16 | 17 | #include "tensorflow_serving/experimental/tensorflow/ops/remote_predict/kernels/prediction_service_grpc.h" 18 | 19 | namespace tensorflow { 20 | namespace serving { 21 | namespace { 22 | 23 | REGISTER_KERNEL_BUILDER(Name("TfServingRemotePredict").Device(DEVICE_CPU), 24 | RemotePredictOp); 25 | 26 | } // namespace 27 | } // namespace serving 28 | } // namespace tensorflow 29 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/oss/file_acl.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2023 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_OSS_FILE_ACL_H_ 17 | #define THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_OSS_FILE_ACL_H_ 18 | 19 | #include "absl/status/status.h" 20 | #include "tensorflow_serving/core/servable_id.h" 21 | 22 | namespace tensorflow { 23 | namespace serving { 24 | 25 | absl::Status RegisterModelRoot(const ServableId& servable_id, 26 | std::string_view root_path); 27 | 28 | } // namespace serving 29 | } // namespace tensorflow 30 | 31 | #endif // THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_OSS_FILE_ACL_H_ 32 | -------------------------------------------------------------------------------- /tensorflow_serving/util/status_util.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2018 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | #ifndef TENSORFLOW_SERVING_UTIL_STATUS_UTIL_H_ 16 | #define TENSORFLOW_SERVING_UTIL_STATUS_UTIL_H_ 17 | 18 | #include "tensorflow/core/lib/core/status.h" 19 | #include "tensorflow_serving/apis/status.pb.h" 20 | 21 | namespace tensorflow { 22 | namespace serving { 23 | 24 | // Converts from tensorflow Status to StatusProto. 25 | StatusProto ToStatusProto(const Status& status); 26 | 27 | // Converts from StatusProto to tensorflow Status. 28 | Status FromStatusProto(const StatusProto& status_proto); 29 | 30 | } // namespace serving 31 | } // namespace tensorflow 32 | 33 | #endif // TENSORFLOW_SERVING_UTIL_STATUS_UTIL_H_ 34 | -------------------------------------------------------------------------------- /third_party/icu/README: -------------------------------------------------------------------------------- 1 | ICU currently does not have public BUILD files for Bazel, so we've created 2 | our own based upon the currently downloaded version of Bazel. This BUILD 3 | is similar to the one used in TensorFlow and TF Text. 4 | 5 | TensorFlow and TF Text also patch Bazel to include data files necessary 6 | which are not a part of the core release, but are built into the code. This 7 | patch file is a combination of the two. Below are the steps to generate: 8 | 1. Checkout ICU and update to the correct version. 9 | `cd /tmp` 10 | `git clone https://github.com/unicode-org/icu.git` 11 | `git checkout release-64-2` 12 | 2. Apply TF Text patch. 13 | `cp third_party/tensorflow_text/oss_scripts/third_party/icu/udata.patch.oss /tmp/icu` 14 | `patch -p1 < udata.patch.oss` 15 | 3. Copy the files and stash the changes. 16 | `git status` 17 | `cp icu4c/source/common/udata.cpp ..` 18 | `cp icu4c/source/common/unicode/uconfig.h ..` 19 | `git stash` 20 | 4. Apply TF patch. 21 | `cp third_party/tensorflow/third_party/icu/udata.patch.oss /tmp/icu` 22 | `patch -p1 < udate.patch.oss` 23 | 5. Combine changes. 24 | `git add . && git commit -m .` 25 | `git stash pop` 26 | < resolve differences with vim > 27 | `git add .` 28 | `git reset HEAD^` 29 | 6. Generate new patch file. 30 | `git diff > icu.data.patch` 31 | -------------------------------------------------------------------------------- /third_party/tf_decision_forests/tf_decision_forests.patch: -------------------------------------------------------------------------------- 1 | diff --git a/tensorflow_decision_forests/tensorflow/ops/inference/kernel.cc b/tensorflow_decision_forests/tensorflow/ops/inference/kernel.cc 2 | index 77d8b58..fdfd781 100644 3 | --- a/tensorflow_decision_forests/tensorflow/ops/inference/kernel.cc 4 | +++ b/tensorflow_decision_forests/tensorflow/ops/inference/kernel.cc 5 | @@ -59,6 +59,7 @@ 6 | #include "absl/strings/substitute.h" 7 | #include "tensorflow/core/framework/op_kernel.h" 8 | #include "tensorflow/core/framework/resource_mgr.h" 9 | +#include "tensorflow/core/public/version.h" 10 | #include "yggdrasil_decision_forests/dataset/data_spec.h" 11 | #include "yggdrasil_decision_forests/dataset/data_spec.pb.h" 12 | #include "yggdrasil_decision_forests/model/abstract_model.h" 13 | @@ -1350,7 +1351,11 @@ class SimpleMLInferenceOp : public OpKernel { 14 | if (!lookup_status.ok()) { 15 | return tf::Status( 16 | lookup_status.code(), 17 | +#if TF_GRAPH_DEF_VERSION < 1467 18 | absl::StrCat(lookup_status.error_message(), 19 | +#else 20 | + absl::StrCat(lookup_status.message(), 21 | +#endif 22 | ". This error caused the simpleML model not to be " 23 | "available for inference. This error is likely due to " 24 | "the \"LoadModel*\" not having been run before.")); 25 | -------------------------------------------------------------------------------- /tensorflow_serving/model_servers/profiler_client.py: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Simple client to send profiling request to ModelServer.""" 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | 21 | import tensorflow as tf 22 | 23 | from tensorflow.python.profiler import profiler_client 24 | 25 | 26 | def main(argv): 27 | server = argv[1] if len(argv) > 1 else 'localhost:8500' 28 | logdir = argv[2] if len(argv) > 2 else '/tmp' 29 | duration_ms = argv[3] if len(argv) > 3 else 2000 30 | profiler_client.trace(server, logdir, duration_ms) 31 | 32 | 33 | if __name__ == '__main__': 34 | tf.compat.v1.app.run() 35 | -------------------------------------------------------------------------------- /tensorflow_serving/util/net_http/public/BUILD: -------------------------------------------------------------------------------- 1 | # Description: shared files 2 | 3 | load("@rules_cc//cc:cc_library.bzl", "cc_library") 4 | 5 | package(default_visibility = [ 6 | ":http_server_clients", 7 | "//tensorflow_serving:internal", 8 | "//tensorflow_serving/util/net_http:__subpackages__", 9 | ]) 10 | 11 | # TODO(wenboz): Remove this visibility list as net_http is now open-sourced and 12 | # available as a standalone library in third_party/net_http. 13 | package_group( 14 | name = "http_server_clients", 15 | packages = [ 16 | "//...", 17 | "//net/floo/flooefi/...", 18 | "//platforms/accel_ssw/common/lib/...", 19 | "//platforms/gsys/...", 20 | "//platforms/redfish/...", 21 | "//platforms/storage/flash/warthog/firmware/...", 22 | "//platforms/syshealth/shdc/...", 23 | "//prodkernel/networking/tcpdirect/buffer_mgmt_daemon/...", 24 | "//security/crypta/commands/ecclesia/...", 25 | "//security/loas/l2/internal/service/notar/...", 26 | "//third_party/ebpf_transport_monitoring/...", 27 | ], 28 | ) 29 | 30 | licenses(["notice"]) 31 | 32 | # C++ lib based on zlib for gzip support 33 | cc_library( 34 | name = "shared_files", 35 | srcs = [ 36 | "header_names.cc", 37 | ], 38 | hdrs = [ 39 | "header_names.h", 40 | "response_code_enum.h", 41 | ], 42 | ) 43 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/servable_test.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2023 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/servables/tensorflow/servable.h" 17 | 18 | #include "absl/status/status.h" 19 | #include "tensorflow_serving/apis/predict.pb.h" 20 | #include "tensorflow_serving/test_util/test_util.h" 21 | 22 | namespace tensorflow { 23 | namespace serving { 24 | namespace { 25 | 26 | TEST(EmptyServableTest, Predict) { 27 | PredictResponse response; 28 | EXPECT_EQ(EmptyServable() 29 | .Predict(Servable::RunOptions(), PredictRequest(), &response) 30 | .code(), 31 | absl::StatusCode::kFailedPrecondition); 32 | } 33 | 34 | } // namespace 35 | } // namespace serving 36 | } // namespace tensorflow 37 | -------------------------------------------------------------------------------- /tensorflow_serving/session_bundle/oss/session_bundle.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2019 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_SESSION_BUNDLE_OSS_SESSION_BUNDLE_H_ 17 | #define TENSORFLOW_SERVING_SESSION_BUNDLE_OSS_SESSION_BUNDLE_H_ 18 | 19 | #include "tensorflow/core/protobuf/meta_graph.pb.h" 20 | #include "tensorflow/core/public/session.h" 21 | 22 | namespace tensorflow { 23 | namespace serving { 24 | 25 | // Dummy SessionBundle class. 26 | // SessionBundle is deprecated and not supported in Tensorflow Serving. 27 | struct SessionBundle { 28 | std::unique_ptr session; 29 | MetaGraphDef meta_graph_def; 30 | }; 31 | 32 | } // namespace serving 33 | } // namespace tensorflow 34 | 35 | #endif // TENSORFLOW_SERVING_SESSION_BUNDLE_OSS_SESSION_BUNDLE_H_ 36 | -------------------------------------------------------------------------------- /tensorflow_serving/util/class_registration_util.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/util/class_registration_util.h" 17 | 18 | #include 19 | 20 | namespace tensorflow { 21 | namespace serving { 22 | 23 | Status ParseUrlForAnyType(const string& type_url, 24 | string* const full_type_name) { 25 | std::vector splits = str_util::Split(type_url, '/'); 26 | if (splits.size() < 2 || splits[splits.size() - 1].empty()) { 27 | return errors::InvalidArgument( 28 | "Supplied config's type_url could not be parsed: ", type_url); 29 | } 30 | *full_type_name = splits[splits.size() - 1]; 31 | return OkStatus(); 32 | } 33 | 34 | } // namespace serving 35 | } // namespace tensorflow 36 | -------------------------------------------------------------------------------- /tensorflow_serving/session_bundle/saved_model_config.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2023 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_SESSION_BUNDLE_SAVED_MODEL_CONFIG_H_ 17 | #define TENSORFLOW_SERVING_SESSION_BUNDLE_SAVED_MODEL_CONFIG_H_ 18 | 19 | #include 20 | 21 | #include "tensorflow/core/platform/status.h" 22 | #include "tensorflow/core/public/session_options.h" 23 | 24 | namespace tensorflow { 25 | namespace serving { 26 | namespace session_bundle { 27 | 28 | Status MaybeLoadSavedModelConfig(const std::string& export_dir, 29 | SessionOptions* session_options); 30 | 31 | } // namespace session_bundle 32 | } // namespace serving 33 | } // namespace tensorflow 34 | 35 | #endif // TENSORFLOW_SERVING_SESSION_BUNDLE_SAVED_MODEL_CONFIG_H_ 36 | -------------------------------------------------------------------------------- /tensorflow_serving/apis/classification.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving; 4 | 5 | import "tensorflow_serving/apis/input.proto"; 6 | import "tensorflow_serving/apis/model.proto"; 7 | 8 | option cc_enable_arenas = true; 9 | 10 | // A single class. 11 | message Class { 12 | // Label or name of the class. 13 | string label = 1; 14 | // Score for this class (e.g., the probability the item belongs to this 15 | // class). As per the proto3 default-value semantics, if the score is missing, 16 | // it should be treated as 0. 17 | float score = 2; 18 | } 19 | 20 | // List of classes for a single item (tensorflow.Example). 21 | message Classifications { 22 | repeated Class classes = 1; 23 | } 24 | 25 | // Contains one result per input example, in the same order as the input in 26 | // ClassificationRequest. 27 | message ClassificationResult { 28 | repeated Classifications classifications = 1; 29 | } 30 | 31 | // RPC Interfaces 32 | 33 | message ClassificationRequest { 34 | // Model Specification. If version is not specified, will use the latest 35 | // (numerical) version. 36 | ModelSpec model_spec = 1; 37 | 38 | // Input data. 39 | tensorflow.serving.Input input = 2; 40 | } 41 | 42 | message ClassificationResponse { 43 | // Effective Model Specification used for classification. 44 | ModelSpec model_spec = 2; 45 | 46 | // Result of the classification. 47 | ClassificationResult result = 1; 48 | } 49 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/remote_op_config_rewriter.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving.tools; 4 | 5 | // Config for the remote op rewriter. This should be serialized/encoded 6 | // and set a param in RewriterConfig with key kRemoteOpRewriteConfigParamKey. 7 | message RemoteOpRemapConfig { 8 | // Map for placeholder target model_names to actual target model_name. 9 | map model_name_remap = 1; 10 | 11 | // Map for placeholder target_address to actual target_address. 12 | map target_address_remap = 2; 13 | 14 | // Map for model name to actual target_address. This will only be used when 15 | // target_address_remap is not set. Also, when the model_name_remap is set, 16 | // the model name here is the name BEFORE the rewrite. The model name here is 17 | // backend model name. 18 | map backend_model_name_to_target_address_remap = 6; 19 | 20 | // If true, version will be propagated from SessionMetadata.version. 21 | // See: http://shortn/_XDBisC2j9k 22 | // Requires enable_tf_session_metadata = true. 23 | bool propagate_version = 4; 24 | 25 | // If true, model name will be propagated from SessionMetadata.name. 26 | // See: http://shortn/_XDBisC2j9k 27 | // Requires enable_tf_session_metadata = true. This takes precedence 28 | // over remapping from model_name_remap. 29 | bool propagate_model_name = 5; 30 | 31 | reserved 3; 32 | } 33 | -------------------------------------------------------------------------------- /tools/gen_status_stamp.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # ============================================================================== 16 | 17 | # This script will be run by the building process to generate key-value 18 | # information that represents the status of the workspace. The output should be 19 | # in the format: 20 | # 21 | # KEY1 VALUE1 22 | # KEY2 VALUE2 23 | # 24 | # If the script exits with non-zero code, it's considered as a failure 25 | # and the output will be discarded. 26 | 27 | # if we're inside a git tree 28 | if [ -d .git ] || git rev-parse --git-dir > /dev/null 2>&1; then 29 | git_rev=$(git rev-parse --short HEAD) 30 | if [[ $? != 0 ]]; 31 | then 32 | exit 1 33 | fi 34 | echo "BUILD_SCM_REVISION ${git_rev}" 35 | else 36 | echo "BUILD_SCM_REVISION no_git" 37 | fi; 38 | 39 | 40 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/test_util/BUILD: -------------------------------------------------------------------------------- 1 | # Description: Tensorflow servables test_utils. 2 | 3 | load("@rules_cc//cc:cc_library.bzl", "cc_library") 4 | load("//tensorflow_serving:serving.bzl", "serving_proto_library") 5 | 6 | licenses(["notice"]) 7 | 8 | serving_proto_library( 9 | name = "fake_thread_pool_factory_proto", 10 | testonly = 1, 11 | srcs = ["fake_thread_pool_factory.proto"], 12 | visibility = [ 13 | "//visibility:public", 14 | ], 15 | ) 16 | 17 | cc_library( 18 | name = "fake_thread_pool_factory", 19 | testonly = 1, 20 | srcs = ["fake_thread_pool_factory.cc"], 21 | hdrs = ["fake_thread_pool_factory.h"], 22 | visibility = [ 23 | "//visibility:public", 24 | ], 25 | deps = [ 26 | ":fake_thread_pool_factory_cc_proto", 27 | "//tensorflow_serving/servables/tensorflow:thread_pool_factory", 28 | "@org_tensorflow//tensorflow/core:lib", 29 | ], 30 | ) 31 | 32 | cc_library( 33 | name = "mock_tfrt_saved_model", 34 | testonly = 1, 35 | hdrs = ["mock_tfrt_saved_model.h"], 36 | visibility = [ 37 | "//tensorflow_serving/batching:__subpackages__", 38 | "//tensorflow_serving/servables/tensorflow:__subpackages__", 39 | ], 40 | deps = [ 41 | "@com_google_googletest//:gtest", 42 | "@org_tensorflow//tensorflow/core/tfrt/runtime", 43 | "@org_tensorflow//tensorflow/core/tfrt/saved_model", 44 | ], 45 | ) 46 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/test_util/fake_thread_pool_factory.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2020 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/servables/tensorflow/test_util/fake_thread_pool_factory.h" 17 | 18 | #include 19 | 20 | namespace tensorflow { 21 | namespace serving { 22 | namespace test_util { 23 | 24 | absl::Status FakeThreadPoolFactory::Create( 25 | const FakeThreadPoolFactoryConfig& config, 26 | std::unique_ptr* result) { 27 | *result = std::make_unique(config); 28 | return absl::Status(); 29 | } 30 | 31 | REGISTER_THREAD_POOL_FACTORY(FakeThreadPoolFactory, 32 | FakeThreadPoolFactoryConfig); 33 | 34 | } // namespace test_util 35 | } // namespace serving 36 | } // namespace tensorflow 37 | -------------------------------------------------------------------------------- /tensorflow_serving/util/executor.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_UTIL_EXECUTOR_H_ 17 | #define TENSORFLOW_SERVING_UTIL_EXECUTOR_H_ 18 | 19 | #include 20 | 21 | namespace tensorflow { 22 | namespace serving { 23 | 24 | /// An abstract object that can execute closures. 25 | /// 26 | /// Implementations of executor must be thread-safe. 27 | class Executor { 28 | public: 29 | virtual ~Executor() = default; 30 | 31 | /// Schedule the specified 'fn' for execution in this executor. Depending on 32 | /// the subclass implementation, this may block in some situations. 33 | virtual void Schedule(std::function fn) = 0; 34 | }; 35 | 36 | } // namespace serving 37 | } // namespace tensorflow 38 | 39 | #endif // TENSORFLOW_SERVING_UTIL_EXECUTOR_H_ 40 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/oss/run_options.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2020 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_OSS_RUN_OPTIONS_H_ 17 | #define THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_OSS_RUN_OPTIONS_H_ 18 | 19 | #include "tensorflow_serving/servables/tensorflow/run_options_base.h" 20 | 21 | namespace tensorflow { 22 | namespace serving { 23 | namespace servables { 24 | 25 | // RunOptions group the configuration for individual inference executions. 26 | // The per-request configuration (e.g. deadline) can be passed here. 27 | struct RunOptions : public RunOptionsBase {}; 28 | 29 | } // namespace servables 30 | } // namespace serving 31 | } // namespace tensorflow 32 | 33 | #endif // THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_OSS_RUN_OPTIONS_H_ 34 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/predict_response_tensor_serialization_option.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2020 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_PREDICT_RESPONSE_TENSOR_SERIALIZATION_OPTION_H_ 17 | #define THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_PREDICT_RESPONSE_TENSOR_SERIALIZATION_OPTION_H_ 18 | 19 | namespace tensorflow { 20 | namespace serving { 21 | namespace internal { 22 | 23 | // Whether to serialize proto as field or content. 24 | enum class PredictResponseTensorSerializationOption { 25 | kAsProtoField = 0, 26 | kAsProtoContent = 1, 27 | }; 28 | 29 | } // namespace internal 30 | } // namespace serving 31 | } // namespace tensorflow 32 | 33 | #endif // THIRD_PARTY_TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_PREDICT_RESPONSE_TENSOR_SERIALIZATION_OPTION_H_ 34 | -------------------------------------------------------------------------------- /tensorflow_serving/example/resnet_k8s.yaml: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | 16 | apiVersion: apps/v1 17 | kind: Deployment 18 | metadata: 19 | name: resnet-deployment 20 | spec: 21 | selector: 22 | matchLabels: 23 | app: resnet-server 24 | replicas: 3 25 | template: 26 | metadata: 27 | labels: 28 | app: resnet-server 29 | spec: 30 | containers: 31 | - name: resnet-container 32 | image: gcr.io/tensorflow-serving/resnet 33 | ports: 34 | - containerPort: 8500 35 | --- 36 | apiVersion: v1 37 | kind: Service 38 | metadata: 39 | labels: 40 | run: resnet-service 41 | name: resnet-service 42 | spec: 43 | ports: 44 | - port: 8500 45 | targetPort: 8500 46 | selector: 47 | app: resnet-server 48 | type: LoadBalancer 49 | -------------------------------------------------------------------------------- /tensorflow_serving/util/class_registration_util.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_UTIL_CLASS_REGISTRATION_UTIL_H_ 17 | #define TENSORFLOW_SERVING_UTIL_CLASS_REGISTRATION_UTIL_H_ 18 | 19 | #include "tensorflow/core/lib/core/errors.h" 20 | #include "tensorflow/core/lib/core/status.h" 21 | #include "tensorflow/core/lib/strings/str_util.h" 22 | 23 | namespace tensorflow { 24 | namespace serving { 25 | 26 | // Parses a url whose final '/' is followed by a proto type name, e.g. 27 | // "type.googleapis.com/some_namespace.some_proto_type_name". 28 | // Returns Status::OK() iff parsing succeeded. 29 | Status ParseUrlForAnyType(const string& type_url, string* const full_type_name); 30 | 31 | } // namespace serving 32 | } // namespace tensorflow 33 | 34 | #endif // TENSORFLOW_SERVING_UTIL_CLASS_REGISTRATION_UTIL_H_ 35 | -------------------------------------------------------------------------------- /tensorflow_serving/repo.bzl: -------------------------------------------------------------------------------- 1 | """ TensorFlow Http Archive 2 | 3 | Modified http_archive that allows us to override the TensorFlow commit that is 4 | downloaded by setting an environment variable. This override is to be used for 5 | testing purposes. 6 | 7 | Add the following to your Bazel build command in order to override the 8 | TensorFlow revision. 9 | 10 | build: --action_env TF_REVISION="" 11 | 12 | * `TF_REVISION`: tensorflow revision override (git commit hash) 13 | """ 14 | 15 | _TF_REVISION = "TF_REVISION" 16 | 17 | def _tensorflow_http_archive(ctx): 18 | git_commit = ctx.attr.git_commit 19 | sha256 = ctx.attr.sha256 20 | patch = getattr(ctx.attr, "patch", None) 21 | 22 | override_git_commit = ctx.os.environ.get(_TF_REVISION) 23 | if override_git_commit: 24 | sha256 = "" 25 | git_commit = override_git_commit 26 | 27 | strip_prefix = "tensorflow-%s" % git_commit 28 | urls = [ 29 | "https://github.com/tensorflow/tensorflow/archive/%s.tar.gz" % git_commit, 30 | ] 31 | ctx.download_and_extract( 32 | urls, 33 | "", 34 | sha256, 35 | "", 36 | strip_prefix, 37 | ) 38 | if patch: 39 | ctx.patch(patch, strip = 1) 40 | 41 | tensorflow_http_archive = repository_rule( 42 | implementation = _tensorflow_http_archive, 43 | attrs = { 44 | "git_commit": attr.string(mandatory = True), 45 | "sha256": attr.string(mandatory = True), 46 | "patch": attr.label(), 47 | }, 48 | ) 49 | -------------------------------------------------------------------------------- /tensorflow_serving/core/aspired_version_policy.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2017 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/core/aspired_version_policy.h" 17 | 18 | #include 19 | 20 | namespace tensorflow { 21 | namespace serving { 22 | 23 | absl::optional AspiredVersionPolicy::GetHighestAspiredNewServableId( 24 | const std::vector& all_versions) { 25 | absl::optional highest_version_id; 26 | for (const auto& version : all_versions) { 27 | if (version.is_aspired && version.state == LoaderHarness::State::kNew) { 28 | if (!highest_version_id || 29 | version.id.version > highest_version_id.value().version) { 30 | highest_version_id = version.id; 31 | } 32 | } 33 | } 34 | return highest_version_id; 35 | } 36 | 37 | } // namespace serving 38 | } // namespace tensorflow 39 | -------------------------------------------------------------------------------- /tensorflow_serving/core/prefix_storage_path_source_adapter.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2019 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/core/prefix_storage_path_source_adapter.h" 17 | 18 | #include 19 | 20 | #include "tensorflow/core/platform/path.h" 21 | 22 | namespace tensorflow { 23 | namespace serving { 24 | 25 | PrefixStoragePathSourceAdapter::PrefixStoragePathSourceAdapter( 26 | const std::string& prefix) 27 | : prefix_(prefix) {} 28 | 29 | PrefixStoragePathSourceAdapter::~PrefixStoragePathSourceAdapter() { Detach(); } 30 | 31 | absl::Status PrefixStoragePathSourceAdapter::Convert(const StoragePath& source, 32 | StoragePath* destination) { 33 | *destination = tensorflow::io::JoinPath(prefix_, source); 34 | return absl::OkStatus(); 35 | } 36 | 37 | } // namespace serving 38 | } // namespace tensorflow 39 | -------------------------------------------------------------------------------- /tensorflow_serving/experimental/example/BUILD: -------------------------------------------------------------------------------- 1 | # Description: Tensorflow Serving examples. 2 | 3 | # Placeholder: load py_binary 4 | 5 | package(features = ["no_layering_check"]) 6 | 7 | licenses(["notice"]) 8 | 9 | filegroup( 10 | name = "all_files", 11 | srcs = glob( 12 | ["**/*"], 13 | exclude = [ 14 | "**/METADATA", 15 | "**/OWNERS", 16 | ], 17 | ), 18 | ) 19 | 20 | py_binary( 21 | name = "remote_predict_client", 22 | srcs = ["remote_predict_client.py"], 23 | python_version = "PY3", 24 | tags = [ 25 | "manual", 26 | ], 27 | deps = ["//tensorflow_serving/experimental/tensorflow/ops/remote_predict:remote_predict_py"], 28 | ) 29 | 30 | py_binary( 31 | name = "half_plus_two_with_rpop", 32 | srcs = [ 33 | "half_plus_two_with_rpop.py", 34 | ], 35 | python_version = "PY3", 36 | srcs_version = "PY3", 37 | tags = [ 38 | "manual", 39 | ], 40 | deps = ["//tensorflow_serving/experimental/tensorflow/ops/remote_predict:remote_predict_py"], 41 | ) 42 | 43 | py_binary( 44 | name = "half_plus_two_with_rpop_client", 45 | srcs = ["half_plus_two_with_rpop_client.py"], 46 | python_version = "PY3", 47 | tags = [ 48 | "manual", 49 | ], 50 | deps = [ 51 | "//tensorflow_serving/apis:predict_proto_py_pb2", 52 | "//tensorflow_serving/apis:prediction_service_proto_py_pb2", 53 | "//tensorflow_serving/experimental/tensorflow/ops/remote_predict:remote_predict_py", 54 | ], 55 | ) 56 | -------------------------------------------------------------------------------- /tensorflow_serving/core/test_util/mock_log_collector.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2017 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_CORE_TEST_UTIL_MOCK_LOG_COLLECTOR_H_ 17 | #define TENSORFLOW_SERVING_CORE_TEST_UTIL_MOCK_LOG_COLLECTOR_H_ 18 | 19 | #include "google/protobuf/message.h" 20 | #include 21 | #include "tensorflow/core/lib/core/status.h" 22 | #include "tensorflow_serving/core/log_collector.h" 23 | 24 | namespace tensorflow { 25 | namespace serving { 26 | 27 | class MockLogCollector : public LogCollector { 28 | public: 29 | MockLogCollector() = default; 30 | MOCK_METHOD(Status, CollectMessage, (const google::protobuf::Message& message), 31 | (override)); 32 | MOCK_METHOD(Status, Flush, (), (override)); 33 | }; 34 | 35 | } // namespace serving 36 | } // namespace tensorflow 37 | 38 | #endif // TENSORFLOW_SERVING_CORE_TEST_UTIL_MOCK_LOG_COLLECTOR_H_ 39 | -------------------------------------------------------------------------------- /tensorflow_serving/model_servers/platform_config_util.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_MODEL_SERVERS_PLATFORM_CONFIG_UTIL_H_ 17 | #define TENSORFLOW_SERVING_MODEL_SERVERS_PLATFORM_CONFIG_UTIL_H_ 18 | 19 | #include "tensorflow_serving/config/platform_config.pb.h" 20 | #include "tensorflow_serving/servables/tensorflow/session_bundle_config.pb.h" 21 | 22 | namespace tensorflow { 23 | namespace serving { 24 | 25 | // Creates a PlatformConfigMap containing a single entry with the key as 26 | // kTensorFlowModelPlatform and the value as a SourceAdapter config proto 27 | // for SavedModelBundleSourceAdapter. 28 | PlatformConfigMap CreateTensorFlowPlatformConfigMap( 29 | const SessionBundleConfig& session_bundle_config); 30 | 31 | } // namespace serving 32 | } // namespace tensorflow 33 | 34 | #endif // TENSORFLOW_SERVING_MODEL_SERVERS_PLATFORM_CONFIG_UTIL_H_ 35 | -------------------------------------------------------------------------------- /tensorflow_serving/model_servers/grpc_status_util.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2018 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/model_servers/grpc_status_util.h" 17 | 18 | #include "grpcpp/support/status_code_enum.h" 19 | #include "absl/strings/str_join.h" 20 | 21 | namespace tensorflow { 22 | namespace serving { 23 | 24 | ::grpc::Status ToGRPCStatus(const absl::Status& status) { 25 | const int kErrorMessageLimit = 1024; 26 | string error_message; 27 | if (status.message().length() > kErrorMessageLimit) { 28 | error_message = absl::StrCat(status.message().substr(0, kErrorMessageLimit), 29 | "...TRUNCATED"); 30 | } else { 31 | error_message = status.message(); 32 | } 33 | return ::grpc::Status(static_cast(status.code()), 34 | error_message); 35 | } 36 | 37 | } // namespace serving 38 | } // namespace tensorflow 39 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/serving_session.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/servables/tensorflow/serving_session.h" 17 | 18 | #include "tensorflow/core/framework/graph.pb.h" 19 | #include "tensorflow/core/lib/core/errors.h" 20 | #include "tensorflow/core/lib/core/status.h" 21 | 22 | namespace tensorflow { 23 | namespace serving { 24 | 25 | absl::Status ServingSession::Create(const GraphDef& graph) { 26 | return errors::PermissionDenied("State changes denied via ServingSession"); 27 | } 28 | 29 | absl::Status ServingSession::Extend(const GraphDef& graph) { 30 | return errors::PermissionDenied("State changes denied via ServingSession"); 31 | } 32 | 33 | absl::Status ServingSession::Close() { 34 | return errors::PermissionDenied("State changes denied via ServingSession"); 35 | } 36 | 37 | } // namespace serving 38 | } // namespace tensorflow 39 | -------------------------------------------------------------------------------- /tensorflow_serving/test_util/BUILD: -------------------------------------------------------------------------------- 1 | # Description: Tensorflow Serving test utils. 2 | 3 | load("@rules_cc//cc:cc_binary.bzl", "cc_binary") 4 | load("@rules_cc//cc:cc_library.bzl", "cc_library") 5 | 6 | package(default_visibility = [ 7 | "//tensorflow_serving:internal", 8 | ]) 9 | 10 | licenses(["notice"]) 11 | 12 | filegroup( 13 | name = "all_files", 14 | srcs = glob( 15 | ["**/*"], 16 | exclude = [ 17 | "**/METADATA", 18 | "**/OWNERS", 19 | ], 20 | ), 21 | ) 22 | 23 | cc_library( 24 | name = "test_util", 25 | testonly = 1, 26 | srcs = ["test_util.cc"], 27 | hdrs = ["test_util.h"], 28 | visibility = [ 29 | "//visibility:public", 30 | ], 31 | deps = [ 32 | "@com_google_googletest//:gtest", 33 | "@com_google_protobuf//:protobuf", 34 | "@local_tsl//tsl/platform:protobuf", 35 | "@org_tensorflow//tensorflow/core:lib", 36 | "@org_tensorflow//tensorflow/core:session_options", 37 | "@org_tensorflow//tensorflow/core:test", 38 | ], 39 | ) 40 | 41 | cc_binary( 42 | name = "grpc_client", 43 | srcs = ["grpc_client.cc"], 44 | deps = [ 45 | "//tensorflow_serving/apis:prediction_service_cc_proto", 46 | "@com_github_grpc_grpc//:grpc++", 47 | "@com_google_absl//absl/flags:flag", 48 | "@com_google_absl//absl/flags:parse", 49 | "@com_google_absl//absl/strings", 50 | "@org_tensorflow//tensorflow/core:lib", 51 | "@org_tensorflow//tensorflow/core/platform:env", 52 | ], 53 | ) 54 | -------------------------------------------------------------------------------- /tensorflow_serving/util/inline_executor.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_UTIL_INLINE_EXECUTOR_H_ 17 | #define TENSORFLOW_SERVING_UTIL_INLINE_EXECUTOR_H_ 18 | 19 | #include 20 | 21 | #include "tensorflow/core/platform/macros.h" 22 | #include "tensorflow_serving/util/executor.h" 23 | 24 | namespace tensorflow { 25 | namespace serving { 26 | 27 | // An InlineExecutor is a trivial executor that immediately executes the closure 28 | // given to it. It's useful as a fake, and in cases where an executor is needed, 29 | // but multi-threadedness is not. 30 | class InlineExecutor : public Executor { 31 | public: 32 | InlineExecutor(); 33 | ~InlineExecutor() override; 34 | void Schedule(std::function fn) override; 35 | }; 36 | 37 | } // namespace serving 38 | } // namespace tensorflow 39 | 40 | #endif // TENSORFLOW_SERVING_UTIL_INLINE_EXECUTOR_H_ 41 | -------------------------------------------------------------------------------- /tensorflow_serving/util/status_util.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2018 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/util/status_util.h" 17 | 18 | #include 19 | 20 | namespace tensorflow { 21 | namespace serving { 22 | 23 | StatusProto ToStatusProto(const Status& status) { 24 | StatusProto status_proto; 25 | status_proto.set_error_code( 26 | static_cast(status.code())); 27 | if (!status.ok()) { 28 | status_proto.set_error_message(std::string(status.message())); 29 | } 30 | return status_proto; 31 | } 32 | 33 | Status FromStatusProto(const StatusProto& status_proto) { 34 | return status_proto.error_code() == tensorflow::error::OK 35 | ? Status() 36 | : Status(static_cast(status_proto.error_code()), 37 | status_proto.error_message()); 38 | } 39 | 40 | } // namespace serving 41 | } // namespace tensorflow 42 | -------------------------------------------------------------------------------- /tensorflow_serving/apis/prediction_log.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package tensorflow.serving; 4 | 5 | import "tensorflow_serving/apis/classification.proto"; 6 | import "tensorflow_serving/apis/inference.proto"; 7 | import "tensorflow_serving/apis/logging.proto"; 8 | import "tensorflow_serving/apis/predict.proto"; 9 | import "tensorflow_serving/apis/regression.proto"; 10 | import "tensorflow_serving/apis/session_service.proto"; 11 | 12 | option cc_enable_arenas = true; 13 | 14 | message ClassifyLog { 15 | ClassificationRequest request = 1; 16 | ClassificationResponse response = 2; 17 | } 18 | 19 | message RegressLog { 20 | RegressionRequest request = 1; 21 | RegressionResponse response = 2; 22 | } 23 | 24 | message PredictLog { 25 | PredictRequest request = 1; 26 | PredictResponse response = 2; 27 | } 28 | 29 | message PredictStreamedLog { 30 | repeated PredictRequest request = 1; 31 | repeated PredictResponse response = 2; 32 | } 33 | 34 | message MultiInferenceLog { 35 | MultiInferenceRequest request = 1; 36 | MultiInferenceResponse response = 2; 37 | } 38 | 39 | message SessionRunLog { 40 | SessionRunRequest request = 1; 41 | SessionRunResponse response = 2; 42 | } 43 | 44 | // Logged model inference request. 45 | message PredictionLog { 46 | LogMetadata log_metadata = 1; 47 | oneof log_type { 48 | ClassifyLog classify_log = 2; 49 | RegressLog regress_log = 3; 50 | PredictLog predict_log = 6; 51 | PredictStreamedLog predict_streamed_log = 7; 52 | MultiInferenceLog multi_inference_log = 4; 53 | SessionRunLog session_run_log = 5; 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /tensorflow_serving/session_bundle/oss/BUILD: -------------------------------------------------------------------------------- 1 | load("@org_tensorflow//tensorflow/core/platform:build_config.bzl", "tf_proto_library") 2 | load("@rules_cc//cc:cc_library.bzl", "cc_library") 3 | load("//tensorflow_serving:serving.bzl", "oss_only_cc_test") 4 | 5 | licenses(["notice"]) 6 | 7 | package( 8 | default_visibility = [ 9 | "//tensorflow_serving:internal", 10 | ], 11 | features = ["-layering_check"], 12 | ) 13 | 14 | tf_proto_library( 15 | name = "manifest_proto", 16 | srcs = ["manifest.proto"], 17 | ) 18 | 19 | cc_library( 20 | name = "session_bundle", 21 | hdrs = ["session_bundle.h"], 22 | deps = [ 23 | "@org_tensorflow//tensorflow/core:core_cpu", 24 | ], 25 | ) 26 | 27 | cc_library( 28 | name = "session_bundle_util", 29 | srcs = ["session_bundle_util.cc"], 30 | deps = [ 31 | ":manifest_proto_cc", 32 | ":session_bundle", 33 | "//tensorflow_serving/session_bundle:session_bundle_util_header", 34 | "@org_tensorflow//tensorflow/cc/saved_model:loader", 35 | "@org_tensorflow//tensorflow/core:lib", 36 | ], 37 | ) 38 | 39 | oss_only_cc_test( 40 | name = "session_bundle_util_test", 41 | srcs = ["session_bundle_util_test.cc"], 42 | data = [ 43 | "//tensorflow_serving/session_bundle:session_bundle_half_plus_two", 44 | "@org_tensorflow//tensorflow/cc/saved_model:saved_model_half_plus_two", 45 | ], 46 | deps = [ 47 | ":session_bundle_util", 48 | "//tensorflow_serving/core/test_util:test_main", 49 | "//tensorflow_serving/test_util", 50 | ], 51 | ) 52 | -------------------------------------------------------------------------------- /tensorflow_serving/util/file_probing_env.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/util/file_probing_env.h" 17 | 18 | #include 19 | 20 | namespace tensorflow { 21 | namespace serving { 22 | 23 | Status TensorflowFileProbingEnv::FileExists(const string& fname) { 24 | return env_->FileExists(fname); 25 | } 26 | 27 | Status TensorflowFileProbingEnv::GetChildren(const string& dir, 28 | std::vector* children) { 29 | return env_->GetChildren(dir, children); 30 | } 31 | 32 | Status TensorflowFileProbingEnv::IsDirectory(const string& fname) { 33 | return env_->IsDirectory(fname); 34 | } 35 | 36 | Status TensorflowFileProbingEnv::GetFileSize(const string& fname, 37 | uint64_t* file_size) { 38 | return env_->GetFileSize(fname, file_size); 39 | } 40 | 41 | } // namespace serving 42 | } // namespace tensorflow 43 | -------------------------------------------------------------------------------- /tensorflow_serving/util/oss_or_google.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2019 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | #ifndef TENSORFLOW_SERVING_UTIL_OSS_OR_GOOGLE_H_ 16 | #define TENSORFLOW_SERVING_UTIL_OSS_OR_GOOGLE_H_ 17 | 18 | #define TENSORFLOW_SERVING_OSS 19 | 20 | namespace tensorflow { 21 | namespace serving { 22 | 23 | // Used to distinguish the context of the code; whether it's part of our OSS 24 | // distribution or within Google. 25 | // 26 | // This is useful in cases where we want to enable/disable running some piece of 27 | // code based on whether we are in/out of OSS. 28 | // 29 | // NB that the method is marked 'constexpr' so that the value can be used as 30 | // a compile-time constant. 31 | inline constexpr bool IsTensorflowServingOSS() { 32 | #ifdef TENSORFLOW_SERVING_GOOGLE 33 | return false; 34 | #else 35 | return true; 36 | #endif 37 | } 38 | 39 | } // namespace serving 40 | } // namespace tensorflow 41 | 42 | #endif // TENSORFLOW_SERVING_UTIL_OSS_OR_GOOGLE_H_ 43 | -------------------------------------------------------------------------------- /tensorflow_serving/batching/test_util/BUILD: -------------------------------------------------------------------------------- 1 | # Description: Tensorflow Serving batching test utilities. 2 | 3 | load("@rules_cc//cc:cc_library.bzl", "cc_library") 4 | load("@rules_cc//cc:cc_test.bzl", "cc_test") 5 | # Placeholder: load py_binary 6 | 7 | package( 8 | default_visibility = ["//tensorflow_serving:internal"], 9 | features = ["-layering_check"], 10 | ) 11 | 12 | licenses(["notice"]) 13 | 14 | filegroup( 15 | name = "all_files", 16 | srcs = glob( 17 | ["**/*"], 18 | exclude = [ 19 | "**/METADATA", 20 | "**/OWNERS", 21 | ], 22 | ), 23 | ) 24 | 25 | cc_library( 26 | name = "puppet_batch_scheduler", 27 | testonly = 1, 28 | hdrs = ["puppet_batch_scheduler.h"], 29 | visibility = ["//visibility:private"], 30 | deps = [ 31 | "@org_tensorflow//tensorflow/core:tensorflow", 32 | "@org_tensorflow//tensorflow/core/kernels/batching_util:batch_scheduler", 33 | ], 34 | ) 35 | 36 | cc_test( 37 | name = "puppet_batch_scheduler_test", 38 | srcs = [ 39 | "puppet_batch_scheduler_test.cc", 40 | ], 41 | deps = [ 42 | ":puppet_batch_scheduler", 43 | "//tensorflow_serving/core/test_util:test_main", 44 | "@org_tensorflow//tensorflow/core:lib", 45 | "@org_tensorflow//tensorflow/core:test", 46 | ], 47 | ) 48 | 49 | # script that generates saved_model for matrix_half_plus_two model. 50 | py_binary( 51 | name = "matrix_half_plus_two_saved_model", 52 | srcs = ["matrix_half_plus_two_saved_model.py"], 53 | python_version = "PY3", 54 | srcs_version = "PY3", 55 | ) 56 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/machine_learning_metadata.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2020 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_MACHINE_LEARNING_METADATA_H_ 17 | #define TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_MACHINE_LEARNING_METADATA_H_ 18 | 19 | #include 20 | 21 | #include "tensorflow/core/platform/status.h" 22 | 23 | namespace tensorflow { 24 | namespace serving { 25 | 26 | // If present, processes Machine Learning Metadata associated with the 27 | // SavedModel. Currently, this broadcasts the MLMD UUID as a key associated 28 | // with a loaded model. 29 | // For more information: https://www.tensorflow.org/tfx/guide/mlmd 30 | void MaybePublishMLMDStreamz(const string& export_dir, const string& model_name, 31 | int64_t version); 32 | 33 | } // namespace serving 34 | } // namespace tensorflow 35 | 36 | #endif // TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_MACHINE_LEARNING_METADATA_H_ 37 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/thread_pool_factory.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2021 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/servables/tensorflow/thread_pool_factory.h" 17 | 18 | #include 19 | #include 20 | 21 | namespace tensorflow { 22 | namespace serving { 23 | 24 | ScopedThreadPools::ScopedThreadPools( 25 | std::shared_ptr inter_op_thread_pool, 26 | std::shared_ptr intra_op_thread_pool) 27 | : inter_op_thread_pool_(std::move(inter_op_thread_pool)), 28 | intra_op_thread_pool_(std::move(intra_op_thread_pool)) {} 29 | 30 | tensorflow::thread::ThreadPoolOptions ScopedThreadPools::get() { 31 | tensorflow::thread::ThreadPoolOptions options; 32 | options.inter_op_threadpool = inter_op_thread_pool_.get(); 33 | options.intra_op_threadpool = intra_op_thread_pool_.get(); 34 | return options; 35 | } 36 | 37 | } // namespace serving 38 | } // namespace tensorflow 39 | -------------------------------------------------------------------------------- /tensorflow_serving/tools/pip_package/BUILD: -------------------------------------------------------------------------------- 1 | # Description: Tensorflow Serving pip package. 2 | 3 | licenses(["notice"]) 4 | 5 | sh_binary( 6 | name = "build_pip_package", 7 | srcs = ["build_pip_package.sh"], 8 | data = [ 9 | "setup.py", 10 | 11 | # Python scripts needed for the Python TF Serving API 12 | "//tensorflow_serving/apis:classification_proto_py_pb2", 13 | "//tensorflow_serving/apis:get_model_metadata_proto_py_pb2", 14 | "//tensorflow_serving/apis:get_model_status_proto_py_pb2", 15 | "//tensorflow_serving/apis:inference_proto_py_pb2", 16 | "//tensorflow_serving/apis:input_proto_py_pb2", 17 | "//tensorflow_serving/apis:model_management_proto_py_pb2", 18 | "//tensorflow_serving/apis:model_proto_py_pb2", 19 | "//tensorflow_serving/apis:model_service_proto_py_pb2", 20 | "//tensorflow_serving/apis:prediction_log_proto_py_pb2", 21 | "//tensorflow_serving/apis:prediction_service_proto_py_pb2", 22 | "//tensorflow_serving/apis:predict_proto_py_pb2", 23 | "//tensorflow_serving/apis:regression_proto_py_pb2", 24 | "//tensorflow_serving/apis:session_service_proto_py_pb2", 25 | "//tensorflow_serving/apis:status_proto_py_pb2", 26 | "//tensorflow_serving/config:log_collector_config_proto_py_pb2", 27 | "//tensorflow_serving/config:logging_config_proto_py_pb2", 28 | "//tensorflow_serving/config:model_server_config_proto_py_pb2", 29 | "//tensorflow_serving/config:file_system_storage_path_source_proto_py_pb2", 30 | "//tensorflow_serving/apis:logging_proto_py_pb2", 31 | ], 32 | ) 33 | -------------------------------------------------------------------------------- /tensorflow_serving/model_servers/http_server.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2018 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | #ifndef TENSORFLOW_SERVING_MODEL_SERVERS_HTTP_SERVER_H_ 16 | #define TENSORFLOW_SERVING_MODEL_SERVERS_HTTP_SERVER_H_ 17 | 18 | #include 19 | 20 | #include "tensorflow_serving/config/monitoring_config.pb.h" 21 | #include "tensorflow_serving/util/net_http/server/public/httpserver_interface.h" 22 | 23 | namespace tensorflow { 24 | namespace serving { 25 | 26 | class ServerCore; 27 | 28 | // Returns a HTTP Server that has following endpoints: 29 | // 30 | // o HTTP/REST API (under /v1/models/...) 31 | // 32 | // The returned server is in a state of accepting new requests. 33 | std::unique_ptr CreateAndStartHttpServer( 34 | int port, int num_threads, int timeout_in_ms, 35 | const MonitoringConfig& monitoring_config, ServerCore* core); 36 | 37 | } // namespace serving 38 | } // namespace tensorflow 39 | #endif // TENSORFLOW_SERVING_MODEL_SERVERS_HTTP_SERVER_H_ 40 | -------------------------------------------------------------------------------- /tensorflow_serving/core/test_util/mock_storage_path_target.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_CORE_TEST_UTIL_MOCK_STORAGE_PATH_TARGET_H_ 17 | #define TENSORFLOW_SERVING_CORE_TEST_UTIL_MOCK_STORAGE_PATH_TARGET_H_ 18 | 19 | #include 20 | 21 | #include 22 | #include "tensorflow/core/lib/core/status.h" 23 | #include "tensorflow_serving/core/storage_path.h" 24 | #include "tensorflow_serving/core/target.h" 25 | 26 | namespace tensorflow { 27 | namespace serving { 28 | namespace test_util { 29 | 30 | class MockStoragePathTarget : public TargetBase { 31 | public: 32 | ~MockStoragePathTarget() override { Detach(); } 33 | MOCK_METHOD(void, SetAspiredVersions, 34 | (const StringPiece, std::vector>), 35 | (override)); 36 | }; 37 | 38 | } // namespace test_util 39 | } // namespace serving 40 | } // namespace tensorflow 41 | 42 | #endif // TENSORFLOW_SERVING_CORE_TEST_UTIL_MOCK_STORAGE_PATH_TARGET_H_ 43 | -------------------------------------------------------------------------------- /tensorflow_serving/model_servers/test_util/storage_path_error_injecting_source_adapter.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_MODEL_SERVERS_TEST_UTIL_STORAGE_PATH_ERROR_INJECTING_SOURCE_ADAPTER_H_ 17 | #define TENSORFLOW_SERVING_MODEL_SERVERS_TEST_UTIL_STORAGE_PATH_ERROR_INJECTING_SOURCE_ADAPTER_H_ 18 | 19 | #include "tensorflow_serving/core/source_adapter.h" 20 | 21 | namespace tensorflow { 22 | namespace serving { 23 | namespace test_util { 24 | 25 | // An ErrorInjectingSourceAdapter> (see 26 | // source_adapter.h) registered in StoragePathSourceAdapterRegistry and keyed on 27 | // StoragePathErrorInjectingSourceAdapterConfig. 28 | using StoragePathErrorInjectingSourceAdapter = 29 | ErrorInjectingSourceAdapter>; 30 | 31 | } // namespace test_util 32 | } // namespace serving 33 | } // namespace tensorflow 34 | 35 | #endif // TENSORFLOW_SERVING_MODEL_SERVERS_TEST_UTIL_STORAGE_PATH_ERROR_INJECTING_SOURCE_ADAPTER_H_ 36 | -------------------------------------------------------------------------------- /tensorflow_serving/core/storage_path_test.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/core/storage_path.h" 17 | 18 | #include 19 | #include "tensorflow/core/lib/core/errors.h" 20 | 21 | namespace tensorflow { 22 | namespace serving { 23 | namespace { 24 | 25 | TEST(StoragePathTest, ServableDataEquality) { 26 | ServableId id0 = {"0", 0}; 27 | ServableId id1 = {"1", 1}; 28 | 29 | ServableData a(id0, "x"); 30 | ServableData a2(id0, "x"); 31 | EXPECT_TRUE(a == a); 32 | EXPECT_TRUE(a == a2); 33 | EXPECT_TRUE(a2 == a); 34 | 35 | ServableData b(id0, "y"); 36 | ServableData c(id1, "x"); 37 | ServableData d(id0, errors::Unknown("error")); 38 | for (const ServableData& other : {b, c, d}) { 39 | EXPECT_TRUE(other == other); 40 | EXPECT_FALSE(a == other); 41 | EXPECT_FALSE(other == a); 42 | } 43 | } 44 | 45 | } // namespace 46 | } // namespace serving 47 | } // namespace tensorflow 48 | -------------------------------------------------------------------------------- /tensorflow_serving/core/test_util/mock_loader.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_CORE_TEST_UTIL_MOCK_LOADER_H_ 17 | #define TENSORFLOW_SERVING_CORE_TEST_UTIL_MOCK_LOADER_H_ 18 | 19 | #include 20 | #include "tensorflow/core/lib/core/status.h" 21 | #include "tensorflow_serving/core/loader.h" 22 | #include "tensorflow_serving/util/any_ptr.h" 23 | 24 | namespace tensorflow { 25 | namespace serving { 26 | namespace test_util { 27 | 28 | class MockLoader : public Loader { 29 | public: 30 | MOCK_METHOD(Status, EstimateResources, (ResourceAllocation * estimate), 31 | (const, override)); 32 | MOCK_METHOD(Status, Load, (), (override)); 33 | MOCK_METHOD(Status, LoadWithMetadata, (const Metadata&), (override)); 34 | MOCK_METHOD(void, Unload, (), (override)); 35 | MOCK_METHOD(AnyPtr, servable, (), (override)); 36 | }; 37 | 38 | } // namespace test_util 39 | } // namespace serving 40 | } // namespace tensorflow 41 | 42 | #endif // TENSORFLOW_SERVING_CORE_TEST_UTIL_MOCK_LOADER_H_ 43 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/oss/resource_estimator.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2020 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/servables/tensorflow/oss/resource_estimator.h" 17 | 18 | #include "tensorflow/core/platform/path.h" 19 | #include "tensorflow_serving/resources/resource_values.h" 20 | #include "tensorflow_serving/servables/tensorflow/util.h" 21 | 22 | namespace tensorflow { 23 | namespace serving { 24 | 25 | absl::Status EstimateMainRamBytesFromValidationResult( 26 | const string& path, ResourceAllocation* estimate) { 27 | return errors::Unimplemented("Reading validation results is not supported"); 28 | } 29 | 30 | absl::Status EstimateMainRamBytesFromPath(const string& path, 31 | bool use_validation_result, 32 | FileProbingEnv* env, 33 | ResourceAllocation* estimate) { 34 | return EstimateResourceFromPathUsingDiskState(path, env, estimate); 35 | } 36 | 37 | } // namespace serving 38 | } // namespace tensorflow 39 | -------------------------------------------------------------------------------- /tensorflow_serving/apis/internal/serialized_input.proto: -------------------------------------------------------------------------------- 1 | /* Copyright 2017 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | // Serialized counterparts of the messages in input.proto. These protos enable 17 | // us to keep the original tensorflow.serving.Input's structure but with the 18 | // tensorflow.Examples in their serialized form. When combined with lazy 19 | // parsing, this improves performance by allowing us to skip a redundant 20 | // deserialization/serialization loop. 21 | // 22 | // WARNING: These are internal implementation details and not part of the public 23 | // API. 24 | 25 | syntax = "proto3"; 26 | 27 | package tensorflow.serving.internal; 28 | 29 | option cc_enable_arenas = true; 30 | 31 | message SerializedExampleList { 32 | repeated bytes examples = 1; 33 | } 34 | 35 | message SerializedExampleListWithContext { 36 | repeated bytes examples = 1; 37 | bytes context = 2; 38 | } 39 | 40 | message SerializedInput { 41 | oneof kind { 42 | SerializedExampleList example_list = 1; 43 | SerializedExampleListWithContext example_list_with_context = 2; 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /tensorflow_serving/core/manager_wrapper.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/core/manager_wrapper.h" 17 | 18 | #include 19 | #include 20 | #include 21 | #include 22 | 23 | namespace tensorflow { 24 | namespace serving { 25 | 26 | ManagerWrapper::ManagerWrapper(UniquePtrWithDeps wrapped) 27 | : wrapped_(std::move(wrapped)) {} 28 | 29 | std::vector ManagerWrapper::ListAvailableServableIds() const { 30 | return wrapped_->ListAvailableServableIds(); 31 | } 32 | 33 | absl::Status ManagerWrapper::GetUntypedServableHandle( 34 | const ServableRequest& request, 35 | std::unique_ptr* const untyped_handle) { 36 | return wrapped_->GetUntypedServableHandle(request, untyped_handle); 37 | } 38 | 39 | std::map> 40 | ManagerWrapper::GetAvailableUntypedServableHandles() const { 41 | return wrapped_->GetAvailableUntypedServableHandles(); 42 | } 43 | 44 | } // namespace serving 45 | } // namespace tensorflow 46 | -------------------------------------------------------------------------------- /tensorflow_serving/model_servers/prediction_service_util.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2022 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_MODEL_SERVERS_PREDICTION_SERVICE_UTIL_H_ 17 | #define TENSORFLOW_SERVING_MODEL_SERVERS_PREDICTION_SERVICE_UTIL_H_ 18 | 19 | #include "tensorflow_serving/apis/prediction_service.grpc.pb.h" 20 | #include "tensorflow_serving/model_servers/server_core.h" 21 | #include "tensorflow_serving/servables/tensorflow/thread_pool_factory.h" 22 | 23 | namespace tensorflow { 24 | namespace serving { 25 | 26 | // Options for configuring a PredictionServiceBase object. 27 | struct PredictionServiceOptions { 28 | ServerCore* server_core; 29 | bool enforce_session_run_timeout; 30 | ThreadPoolFactory* thread_pool_factory = nullptr; 31 | }; 32 | 33 | // Convert the request deadline represented in absolute time point into number 34 | // of milliseconds from now. 35 | int DeadlineToTimeoutMillis(const gpr_timespec deadline); 36 | 37 | } // namespace serving 38 | } // namespace tensorflow 39 | 40 | #endif // TENSORFLOW_SERVING_MODEL_SERVERS_PREDICTION_SERVICE_UTIL_H_ 41 | -------------------------------------------------------------------------------- /tensorflow_serving/core/test_util/fake_log_collector.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2017 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_CORE_TEST_UTIL_FAKE_LOG_COLLECTOR_H_ 17 | #define TENSORFLOW_SERVING_CORE_TEST_UTIL_FAKE_LOG_COLLECTOR_H_ 18 | 19 | #include "google/protobuf/message.h" 20 | #include "tensorflow/core/lib/core/status.h" 21 | #include "tensorflow_serving/core/log_collector.h" 22 | 23 | namespace tensorflow { 24 | namespace serving { 25 | 26 | // FakeLogCollector which does nothing except count the number of times 27 | // CollectMessage has been called on it. 28 | class FakeLogCollector : public LogCollector { 29 | public: 30 | Status CollectMessage(const google::protobuf::Message& message) override { 31 | ++collect_count_; 32 | return Status(); 33 | } 34 | 35 | Status Flush() override { return Status(); } 36 | 37 | int collect_count() const { return collect_count_; } 38 | 39 | private: 40 | int collect_count_ = 0; 41 | }; 42 | 43 | } // namespace serving 44 | } // namespace tensorflow 45 | 46 | #endif // TENSORFLOW_SERVING_CORE_TEST_UTIL_FAKE_LOG_COLLECTOR_H_ 47 | -------------------------------------------------------------------------------- /tensorflow_serving/util/test_util/mock_file_probing_env.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_UTIL_TEST_UTIL_MOCK_FILE_PROBING_ENV_H_ 17 | #define TENSORFLOW_SERVING_UTIL_TEST_UTIL_MOCK_FILE_PROBING_ENV_H_ 18 | 19 | #include 20 | #include "tensorflow/core/lib/core/status.h" 21 | #include "tensorflow_serving/util/file_probing_env.h" 22 | 23 | namespace tensorflow { 24 | namespace serving { 25 | namespace test_util { 26 | 27 | class MockFileProbingEnv : public FileProbingEnv { 28 | public: 29 | MOCK_METHOD(Status, FileExists, (const string& fname), (override)); 30 | MOCK_METHOD(Status, GetChildren, 31 | (const string& fname, std::vector* children), (override)); 32 | MOCK_METHOD(Status, IsDirectory, (const string& fname), (override)); 33 | MOCK_METHOD(Status, GetFileSize, (const string& fname, uint64_t* file_size), 34 | (override)); 35 | }; 36 | 37 | } // namespace test_util 38 | } // namespace serving 39 | } // namespace tensorflow 40 | 41 | #endif // TENSORFLOW_SERVING_UTIL_TEST_UTIL_MOCK_FILE_PROBING_ENV_H_ 42 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/tfrt_multi_inference.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2020 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_TFRT_MULTI_INFERENCE_H_ 17 | #define TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_TFRT_MULTI_INFERENCE_H_ 18 | 19 | #include "absl/types/optional.h" 20 | #include "tensorflow/core/lib/core/status.h" 21 | #include "tensorflow/core/tfrt/saved_model/saved_model.h" 22 | #include "tensorflow_serving/apis/inference.pb.h" 23 | #include "tensorflow_serving/model_servers/server_core.h" 24 | 25 | namespace tensorflow { 26 | namespace serving { 27 | 28 | // Implementation of MultiInference using the tfrt::SavedModel. 29 | Status RunMultiInference(const tfrt::SavedModel::RunOptions& run_options, 30 | const absl::optional& servable_version, 31 | tfrt::SavedModel* saved_model, 32 | const MultiInferenceRequest& request, 33 | MultiInferenceResponse* response); 34 | 35 | } // namespace serving 36 | } // namespace tensorflow 37 | 38 | #endif // TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_TFRT_MULTI_INFERENCE_H_ 39 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/oss/BUILD: -------------------------------------------------------------------------------- 1 | load("@rules_cc//cc:cc_library.bzl", "cc_library") 2 | load("@rules_cc//cc:cc_test.bzl", "cc_test") 3 | 4 | package( 5 | default_visibility = [ 6 | "//tensorflow_serving:internal", 7 | ], 8 | features = ["-layering_check"], 9 | ) 10 | 11 | licenses(["notice"]) 12 | 13 | cc_library( 14 | name = "resource_estimator", 15 | srcs = ["resource_estimator.cc"], 16 | hdrs = ["resource_estimator.h"], 17 | deps = [ 18 | "//tensorflow_serving/resources:resource_values", 19 | "//tensorflow_serving/resources:resources_cc_proto", 20 | "//tensorflow_serving/servables/tensorflow:util", 21 | "//tensorflow_serving/util:file_probing_env", 22 | "@org_tensorflow//tensorflow/core:lib", 23 | ], 24 | ) 25 | 26 | cc_test( 27 | name = "resource_estimator_test", 28 | srcs = ["resource_estimator_test.cc"], 29 | deps = [ 30 | ":resource_estimator", 31 | "//tensorflow_serving/core/test_util:test_main", 32 | "//tensorflow_serving/servables/tensorflow:bundle_factory_test_util", 33 | "//tensorflow_serving/test_util", 34 | "//tensorflow_serving/util/test_util:mock_file_probing_env", 35 | "@org_tensorflow//tensorflow/core:lib", 36 | "@org_tensorflow//tensorflow/core:test", 37 | ], 38 | ) 39 | 40 | cc_library( 41 | name = "run_options", 42 | hdrs = ["run_options.h"], 43 | deps = [ 44 | "//tensorflow_serving/servables/tensorflow:run_options_base", 45 | ], 46 | ) 47 | 48 | cc_library( 49 | name = "file_acl", 50 | srcs = ["file_acl.cc"], 51 | hdrs = ["file_acl.h"], 52 | compatible_with = [], 53 | deps = [ 54 | "//tensorflow_serving/core:servable_id", 55 | "@com_google_absl//absl/status", 56 | ], 57 | ) 58 | -------------------------------------------------------------------------------- /tensorflow_serving/batching/threadsafe_status.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2020 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/batching/threadsafe_status.h" 17 | 18 | #include 19 | 20 | #include "absl/base/thread_annotations.h" 21 | #include "absl/status/status.h" 22 | #include "absl/synchronization/mutex.h" 23 | #include "tensorflow/core/platform/mutex.h" 24 | 25 | namespace tensorflow { 26 | namespace serving { 27 | const absl::Status& ThreadSafeStatus::status() const& { 28 | tf_shared_lock lock(mutex_); 29 | return status_; 30 | } 31 | 32 | absl::Status ThreadSafeStatus::status() && { 33 | tf_shared_lock lock(mutex_); 34 | return std::move(status_); 35 | } 36 | 37 | void ThreadSafeStatus::Update(const absl::Status& new_status) { 38 | if (new_status.ok()) { 39 | return; 40 | } 41 | 42 | mutex_lock lock(mutex_); 43 | status_.Update(new_status); 44 | } 45 | 46 | void ThreadSafeStatus::Update(absl::Status&& new_status) { 47 | if (new_status.ok()) { 48 | return; 49 | } 50 | 51 | mutex_lock lock(mutex_); 52 | status_.Update(std::forward(new_status)); 53 | } 54 | } // namespace serving 55 | } // namespace tensorflow 56 | -------------------------------------------------------------------------------- /tensorflow_serving/model_servers/platform_config_util.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/model_servers/platform_config_util.h" 17 | 18 | #include "google/protobuf/any.pb.h" 19 | #include "tensorflow_serving/model_servers/model_platform_types.h" 20 | #include "tensorflow_serving/servables/tensorflow/saved_model_bundle_source_adapter.pb.h" 21 | 22 | namespace tensorflow { 23 | namespace serving { 24 | 25 | PlatformConfigMap CreateTensorFlowPlatformConfigMap( 26 | const SessionBundleConfig& session_bundle_config) { 27 | PlatformConfigMap platform_config_map; 28 | ::google::protobuf::Any source_adapter_config; 29 | SavedModelBundleSourceAdapterConfig saved_model_bundle_source_adapter_config; 30 | *saved_model_bundle_source_adapter_config.mutable_legacy_config() = 31 | session_bundle_config; 32 | source_adapter_config.PackFrom(saved_model_bundle_source_adapter_config); 33 | (*(*platform_config_map.mutable_platform_configs())[kTensorFlowModelPlatform] 34 | .mutable_source_adapter_config()) = source_adapter_config; 35 | return platform_config_map; 36 | } 37 | 38 | } // namespace serving 39 | } // namespace tensorflow 40 | -------------------------------------------------------------------------------- /tensorflow_serving/sources/storage_path/static_storage_path_source.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/sources/storage_path/static_storage_path_source.h" 17 | 18 | #include 19 | #include 20 | #include 21 | 22 | #include "tensorflow_serving/core/servable_data.h" 23 | #include "tensorflow_serving/core/servable_id.h" 24 | 25 | namespace tensorflow { 26 | namespace serving { 27 | 28 | absl::Status StaticStoragePathSource::Create( 29 | const StaticStoragePathSourceConfig& config, 30 | std::unique_ptr* result) { 31 | auto raw_result = new StaticStoragePathSource; 32 | raw_result->config_ = config; 33 | result->reset(raw_result); 34 | return absl::Status(); 35 | } 36 | 37 | void StaticStoragePathSource::SetAspiredVersionsCallback( 38 | AspiredVersionsCallback callback) { 39 | const ServableId id = {config_.servable_name(), config_.version_num()}; 40 | LOG(INFO) << "Aspiring servable " << id; 41 | callback(config_.servable_name(), 42 | {CreateServableData(id, config_.version_path())}); 43 | } 44 | 45 | } // namespace serving 46 | } // namespace tensorflow 47 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing guidelines 2 | 3 | ## How to become a contributor and submit your own code 4 | 5 | ### Contributor License Agreements 6 | 7 | We'd love to accept your patches! Before we can take them, we have to jump a couple of legal hurdles. 8 | 9 | Please fill out either the individual or corporate Contributor License Agreement (CLA). 10 | 11 | * If you are an individual writing original source code and you're sure you own the intellectual property, then you'll need to sign an [individual CLA](http://code.google.com/legal/individual-cla-v1.0.html). 12 | * If you work for a company that wants to allow you to contribute your work, then you'll need to sign a [corporate CLA](http://code.google.com/legal/corporate-cla-v1.0.html). 13 | 14 | Follow either of the two links above to access the appropriate CLA and instructions for how to sign and return it. Once we receive it, we'll be able to accept your pull requests. 15 | 16 | ***NOTE***: Only original source code from you and other people that have signed the CLA can be accepted into the main repository. 17 | 18 | ### Contributing code 19 | 20 | If you have improvements to TensorFlow Serving, send us your pull requests! 21 | For those just getting started, Github has a [howto](https://help.github.com/articles/using-pull-requests/). 22 | 23 | If you want to contribute but you're not sure where to start, take a look at the 24 | [issues with the "contributions welcome" label](https://github.com/tensorflow/serving/labels/contributions%20welcome). 25 | These are issues that we believe are particularly well suited for outside 26 | contributions, often because we probably won't get to them right now. If you 27 | decide to start on an issue, leave a comment so that other people know that 28 | you're working on it. If you want to help out, but not alone, use the issue 29 | comment thread to coordinate. 30 | -------------------------------------------------------------------------------- /tensorflow_serving/util/retrier.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2017 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_UTIL_RETRIER_H_ 17 | #define TENSORFLOW_SERVING_UTIL_RETRIER_H_ 18 | 19 | #include 20 | #include 21 | 22 | #include "absl/status/status.h" 23 | #include "tensorflow/core/lib/core/status.h" 24 | 25 | namespace tensorflow { 26 | namespace serving { 27 | 28 | // Tries running 'retried_fn' once, and if it doesn't succeed, retries running 29 | // the 'retried_fn' till it returns an ok status or max_num_retries are 30 | // exhausted or should_retry() returns false. Each retry is attempted after an 31 | // interval of 'retry_interval_micros'. The 'description' is useful for logging. 32 | // 33 | // Returns the status returned by the last call to 'retried_fn'. 34 | absl::Status Retry( 35 | const string& description, uint32 max_num_retries, 36 | int64_t retry_interval_micros, 37 | const std::function& retried_fn, 38 | const std::function& should_retry = 39 | [](absl::Status status) { return true; }); 40 | 41 | } // namespace serving 42 | } // namespace tensorflow 43 | 44 | #endif // TENSORFLOW_SERVING_UTIL_RETRIER_H_ 45 | -------------------------------------------------------------------------------- /tensorflow_serving/util/prometheus_exporter.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2018 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_UTIL_PROMETHEUS_EXPORTER_H_ 17 | #define TENSORFLOW_SERVING_UTIL_PROMETHEUS_EXPORTER_H_ 18 | 19 | #include 20 | 21 | #include "tensorflow/core/lib/core/status.h" 22 | #include "tensorflow/core/lib/monitoring/collected_metrics.h" 23 | #include "tensorflow/core/lib/monitoring/collection_registry.h" 24 | 25 | namespace tensorflow { 26 | namespace serving { 27 | 28 | // Exports metrics in Prometheus monitoring format. 29 | class PrometheusExporter { 30 | public: 31 | // Default path to expose the metrics. 32 | static const char* const kPrometheusPath; 33 | 34 | PrometheusExporter(); 35 | 36 | // Generates text page in Prometheus format: 37 | // https://prometheus.io/docs/instrumenting/exposition_formats/#text-format-example 38 | // If an error status returned, http_page is unchanged. 39 | Status GeneratePage(string* http_page); 40 | 41 | private: 42 | // The metrics registry. 43 | monitoring::CollectionRegistry* collection_registry_; 44 | }; 45 | 46 | } // namespace serving 47 | } // namespace tensorflow 48 | 49 | #endif // TENSORFLOW_SERVING_UTIL_PROMETHEUS_EXPORTER_H_ 50 | -------------------------------------------------------------------------------- /tensorflow_serving/core/test_util/mock_server_request_logger.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2017 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_CORE_TEST_UTIL_MOCK_SERVER_REQUEST_LOGGER_H_ 17 | #define TENSORFLOW_SERVING_CORE_TEST_UTIL_MOCK_SERVER_REQUEST_LOGGER_H_ 18 | 19 | #include 20 | #include 21 | 22 | #include 23 | #include "tensorflow_serving/core/server_request_logger.h" 24 | 25 | namespace tensorflow { 26 | namespace serving { 27 | namespace test_util { 28 | 29 | class MockServerRequestLogger : public ServerRequestLogger { 30 | public: 31 | MockServerRequestLogger() : ServerRequestLogger({}) {} 32 | 33 | MOCK_METHOD(Status, Update, 34 | ((const std::map>& 35 | logging_config_map)), 36 | (override)); 37 | 38 | MOCK_METHOD(Status, Log, 39 | (const google::protobuf::Message& request, const google::protobuf::Message& response, 40 | const LogMetadata& log_metadata), 41 | (override)); 42 | }; 43 | 44 | } // namespace test_util 45 | } // namespace serving 46 | } // namespace tensorflow 47 | 48 | #endif // TENSORFLOW_SERVING_CORE_TEST_UTIL_MOCK_SERVER_REQUEST_LOGGER_H_ 49 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/saved_model_config.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2023 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_SAVED_MODEL_CONFIG_H_ 17 | #define TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_SAVED_MODEL_CONFIG_H_ 18 | 19 | #include 20 | 21 | #include "tensorflow/core/platform/status.h" 22 | #include "tensorflow/core/protobuf/config.pb.h" 23 | #include "tensorflow/core/tfrt/graph_executor/config.h" 24 | 25 | namespace tensorflow { 26 | namespace serving { 27 | 28 | // Returns error if the `assets.extra/saved_model_config.pb` cannot be parsed. 29 | // Returns success otherwise (including empty or no `saved_model_config.pb`). 30 | // On success, reads SavedModelConfig proto from the specified model directory, 31 | // adds or replaces some optimization options in 32 | // `tensorflow::serving::RewriterConfig` of `tensorflow::GraphOptions` and 33 | // replaces the `runtime_config`. 34 | Status LoadSavedModelConfig( 35 | const std::string& export_dir, tensorflow::GraphOptions& graph_options, 36 | tensorflow::tfrt_stub::RuntimeConfig& runtime_config); 37 | 38 | } // namespace serving 39 | } // namespace tensorflow 40 | 41 | #endif // TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_SAVED_MODEL_CONFIG_H_ 42 | -------------------------------------------------------------------------------- /tensorflow_serving/core/storage_path.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | // Typedefs and registries pertaining to storage system paths. 17 | 18 | #ifndef TENSORFLOW_SERVING_CORE_STORAGE_PATH_H_ 19 | #define TENSORFLOW_SERVING_CORE_STORAGE_PATH_H_ 20 | 21 | #include 22 | #include 23 | #include 24 | 25 | #include "tensorflow/core/lib/core/status.h" 26 | #include "tensorflow/core/platform/types.h" 27 | #include "tensorflow_serving/core/servable_data.h" 28 | #include "tensorflow_serving/core/servable_id.h" 29 | 30 | namespace tensorflow { 31 | namespace serving { 32 | 33 | // Strings that represent paths in some storage system. 34 | using StoragePath = string; 35 | 36 | inline bool operator==(const ServableData& a, 37 | const ServableData& b) { 38 | if (a.id() != b.id()) { 39 | return false; 40 | } 41 | if (a.status().ok() != b.status().ok()) { 42 | return false; 43 | } 44 | if (a.status().ok()) { 45 | return a.DataOrDie() == b.DataOrDie(); 46 | } else { 47 | return a.status() == b.status(); 48 | } 49 | } 50 | 51 | } // namespace serving 52 | } // namespace tensorflow 53 | 54 | #endif // TENSORFLOW_SERVING_CORE_STORAGE_PATH_H_ 55 | -------------------------------------------------------------------------------- /tensorflow_serving/core/static_manager.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/core/static_manager.h" 17 | 18 | #include 19 | #include 20 | 21 | namespace tensorflow { 22 | namespace serving { 23 | 24 | StaticManagerBuilder::StaticManagerBuilder() { 25 | BasicManager::Options basic_manager_options; 26 | // We don't want multithreading. 27 | basic_manager_options.num_load_threads = 0; 28 | basic_manager_options.num_unload_threads = 0; 29 | const absl::Status basic_manager_status = 30 | BasicManager::Create(std::move(basic_manager_options), &basic_manager_); 31 | if (!basic_manager_status.ok()) { 32 | LOG(ERROR) << "Error creating BasicManager: " << health_; 33 | health_ = basic_manager_status; 34 | } 35 | } 36 | 37 | std::unique_ptr StaticManagerBuilder::Build() { 38 | if (!health_.ok()) { 39 | LOG(ERROR) << health_; 40 | return nullptr; 41 | } 42 | 43 | // If Build() is called again, we'll produce the following error. 44 | health_ = errors::FailedPrecondition( 45 | "Build() already called on this StaticManagerBuilder."); 46 | 47 | return std::move(basic_manager_); 48 | } 49 | 50 | } // namespace serving 51 | } // namespace tensorflow 52 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/tfrt_get_model_metadata_impl.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2021 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_TFRT_GET_MODEL_METADATA_IMPL_H_ 17 | #define TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_TFRT_GET_MODEL_METADATA_IMPL_H_ 18 | 19 | #include "tensorflow/core/lib/core/status.h" 20 | #include "tensorflow_serving/apis/get_model_metadata.pb.h" 21 | #include "tensorflow_serving/model_servers/server_core.h" 22 | 23 | namespace tensorflow { 24 | namespace serving { 25 | 26 | class TFRTGetModelMetadataImpl { 27 | public: 28 | static Status GetModelMetadata(ServerCore* core, 29 | const GetModelMetadataRequest& request, 30 | GetModelMetadataResponse* response); 31 | 32 | // Like GetModelMetadata(), but uses 'model_spec' instead of the one embedded 33 | // in 'request'. 34 | static Status GetModelMetadataWithModelSpec( 35 | ServerCore* core, const ModelSpec& model_spec, 36 | const GetModelMetadataRequest& request, 37 | GetModelMetadataResponse* response); 38 | }; 39 | 40 | } // namespace serving 41 | } // namespace tensorflow 42 | 43 | #endif // TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_TFRT_GET_MODEL_METADATA_IMPL_H_ 44 | -------------------------------------------------------------------------------- /tensorflow_serving/batching/threadsafe_status_test.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2020 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/batching/threadsafe_status.h" 17 | 18 | #include "tensorflow/core/lib/core/status_test_util.h" 19 | #include "tensorflow/core/platform/errors.h" 20 | #include "tensorflow/core/platform/test.h" 21 | #include "tensorflow/core/protobuf/error_codes.pb.h" 22 | 23 | namespace tensorflow { 24 | namespace serving { 25 | namespace { 26 | 27 | TEST(ThreadSafeStatus, DefaultOk) { 28 | ThreadSafeStatus status; 29 | TF_EXPECT_OK(status.status()); 30 | } 31 | 32 | TEST(ThreadSafeStatus, Update) { 33 | ThreadSafeStatus status; 34 | TF_EXPECT_OK(status.status()); 35 | 36 | status.Update(errors::FailedPrecondition("original error")); 37 | EXPECT_EQ(status.status().code(), error::FAILED_PRECONDITION); 38 | 39 | status.Update(absl::OkStatus()); 40 | EXPECT_EQ(status.status().code(), error::FAILED_PRECONDITION); 41 | 42 | status.Update(errors::Internal("new error")); 43 | EXPECT_EQ(status.status().code(), error::FAILED_PRECONDITION); 44 | } 45 | 46 | TEST(ThreadSafeStatus, Move) { 47 | ThreadSafeStatus status; 48 | TF_EXPECT_OK(std::move(status).status()); 49 | } 50 | 51 | } // namespace 52 | } // namespace serving 53 | } // namespace tensorflow 54 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/saved_model_warmup.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2018 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_SAVED_MODEL_WARMUP_H_ 17 | #define TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_SAVED_MODEL_WARMUP_H_ 18 | 19 | #include 20 | 21 | #include "tensorflow/cc/saved_model/loader.h" 22 | #include "tensorflow/core/protobuf/saved_model.pb.h" 23 | #include "tensorflow/core/public/session.h" 24 | #include "tensorflow_serving/servables/tensorflow/saved_model_warmup_util.h" 25 | #include "tensorflow_serving/servables/tensorflow/session_bundle_config.pb.h" 26 | 27 | namespace tensorflow { 28 | namespace serving { 29 | 30 | // Run warmup requests to trigger lazy initializations (such as TF 31 | // optimizations, XLA compilations) at load time, and consequently improve first 32 | // request latency. 33 | // Supported request types: Regress, Classify, Predict, MultiInference. 34 | Status RunSavedModelWarmup(const ModelWarmupOptions& model_warmup_options, 35 | const RunOptions& run_options, 36 | const string& export_dir, SavedModelBundle* bundle); 37 | 38 | } // namespace serving 39 | } // namespace tensorflow 40 | 41 | #endif // TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_SAVED_MODEL_WARMUP_H_ 42 | -------------------------------------------------------------------------------- /tensorflow_serving/core/test_util/mock_prediction_stream_logger.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2023 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef THIRD_PARTY_TENSORFLOW_SERVING_CORE_TEST_UTIL_MOCK_PREDICTION_STREAM_LOGGER_H_ 17 | #define THIRD_PARTY_TENSORFLOW_SERVING_CORE_TEST_UTIL_MOCK_PREDICTION_STREAM_LOGGER_H_ 18 | 19 | #include 20 | 21 | #include 22 | #include "tensorflow_serving/apis/logging.pb.h" 23 | #include "tensorflow_serving/apis/predict.pb.h" 24 | #include "tensorflow_serving/core/stream_logger.h" 25 | 26 | namespace tensorflow { 27 | namespace serving { 28 | namespace test_util { 29 | 30 | class MockPredictionStreamLogger 31 | : public StreamLogger { 32 | public: 33 | ~MockPredictionStreamLogger() override = default; 34 | 35 | MOCK_METHOD(void, LogStreamRequest, (PredictRequest), (override)); 36 | MOCK_METHOD(void, LogStreamResponse, (PredictResponse), (override)); 37 | MOCK_METHOD(absl::Status, CreateLogMessage, 38 | (const LogMetadata&, std::unique_ptr*), 39 | (override)); 40 | }; 41 | 42 | } // namespace test_util 43 | } // namespace serving 44 | } // namespace tensorflow 45 | 46 | #endif // THIRD_PARTY_TENSORFLOW_SERVING_CORE_TEST_UTIL_MOCK_PREDICTION_STREAM_LOGGER_H_ 47 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/hashmap/BUILD: -------------------------------------------------------------------------------- 1 | # Description: Tensorflow Serving hashmap servable. 2 | 3 | load("@rules_cc//cc:cc_library.bzl", "cc_library") 4 | load("@rules_cc//cc:cc_test.bzl", "cc_test") 5 | load("//tensorflow_serving:serving.bzl", "serving_proto_library") 6 | 7 | package( 8 | default_visibility = ["//tensorflow_serving:internal"], 9 | features = ["-layering_check"], 10 | ) 11 | 12 | licenses(["notice"]) 13 | 14 | filegroup( 15 | name = "all_files", 16 | srcs = glob( 17 | ["**/*"], 18 | exclude = [ 19 | "**/METADATA", 20 | "**/OWNERS", 21 | "g3doc/sitemap.md", 22 | ], 23 | ), 24 | ) 25 | 26 | cc_library( 27 | name = "hashmap_source_adapter", 28 | srcs = ["hashmap_source_adapter.cc"], 29 | hdrs = ["hashmap_source_adapter.h"], 30 | visibility = ["//visibility:private"], 31 | deps = [ 32 | ":hashmap_source_adapter_cc_proto", 33 | "//tensorflow_serving/core:simple_loader", 34 | "//tensorflow_serving/core:source_adapter", 35 | "//tensorflow_serving/core:storage_path", 36 | "@org_tensorflow//tensorflow/core:lib", 37 | "@org_tensorflow//tensorflow/core:tensorflow", 38 | ], 39 | ) 40 | 41 | cc_test( 42 | name = "hashmap_source_adapter_test", 43 | size = "medium", 44 | srcs = ["hashmap_source_adapter_test.cc"], 45 | deps = [ 46 | ":hashmap_source_adapter", 47 | ":hashmap_source_adapter_cc_proto", 48 | "//tensorflow_serving/core:loader", 49 | "//tensorflow_serving/core:servable_data", 50 | "//tensorflow_serving/core/test_util:test_main", 51 | "//tensorflow_serving/util:any_ptr", 52 | "@org_tensorflow//tensorflow/core:lib", 53 | "@org_tensorflow//tensorflow/core:test", 54 | ], 55 | ) 56 | 57 | serving_proto_library( 58 | name = "hashmap_source_adapter_proto", 59 | srcs = ["hashmap_source_adapter.proto"], 60 | ) 61 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/get_model_metadata_impl.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2017 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_GET_MODEL_METADATA_IMPL_H_ 17 | #define TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_GET_MODEL_METADATA_IMPL_H_ 18 | 19 | #include "tensorflow/core/lib/core/status.h" 20 | #include "tensorflow_serving/apis/get_model_metadata.pb.h" 21 | #include "tensorflow_serving/model_servers/server_core.h" 22 | 23 | namespace tensorflow { 24 | namespace serving { 25 | 26 | class GetModelMetadataImpl { 27 | public: 28 | static constexpr const char kSignatureDef[] = "signature_def"; 29 | 30 | static Status GetModelMetadata(ServerCore* core, 31 | const GetModelMetadataRequest& request, 32 | GetModelMetadataResponse* response); 33 | 34 | // Like GetModelMetadata(), but uses 'model_spec' instead of the one embedded 35 | // in 'request'. 36 | static Status GetModelMetadataWithModelSpec( 37 | ServerCore* core, const ModelSpec& model_spec, 38 | const GetModelMetadataRequest& request, 39 | GetModelMetadataResponse* response); 40 | }; 41 | 42 | } // namespace serving 43 | } // namespace tensorflow 44 | 45 | #endif // TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_GET_MODEL_METADATA_IMPL_H_ 46 | -------------------------------------------------------------------------------- /tensorflow_serving/core/test_util/fake_storage_path_source_adapter.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/core/test_util/fake_storage_path_source_adapter.h" 17 | 18 | #include 19 | 20 | #include "tensorflow/core/lib/core/errors.h" 21 | 22 | namespace tensorflow { 23 | namespace serving { 24 | namespace test_util { 25 | 26 | FakeStoragePathSourceAdapter::FakeStoragePathSourceAdapter( 27 | const string& suffix, std::function call_on_destruct) 28 | : suffix_(suffix), call_on_destruct_(call_on_destruct) {} 29 | 30 | FakeStoragePathSourceAdapter::~FakeStoragePathSourceAdapter() { 31 | Detach(); 32 | if (call_on_destruct_) { 33 | call_on_destruct_(suffix_); 34 | } 35 | } 36 | 37 | absl::Status FakeStoragePathSourceAdapter::Convert( 38 | const StoragePath& data, StoragePath* const converted_data) { 39 | if (data == "invalid") { 40 | return errors::InvalidArgument( 41 | "FakeStoragePathSourceAdapter Convert() dutifully failing on " 42 | "\"invalid\" " 43 | "data"); 44 | } 45 | *converted_data = 46 | suffix_.empty() ? data : strings::StrCat(data, "/", suffix_); 47 | return absl::Status(); 48 | } 49 | 50 | } // namespace test_util 51 | } // namespace serving 52 | } // namespace tensorflow 53 | -------------------------------------------------------------------------------- /tensorflow_serving/core/test_util/availability_test_util.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | // Methods related to the availability of servables, that are useful in writing 17 | // tests. (Not intended for production use.) 18 | 19 | #ifndef TENSORFLOW_SERVING_CORE_TEST_UTIL_AVAILABILITY_TEST_UTIL_H_ 20 | #define TENSORFLOW_SERVING_CORE_TEST_UTIL_AVAILABILITY_TEST_UTIL_H_ 21 | 22 | #include "tensorflow_serving/core/servable_state_monitor.h" 23 | 24 | namespace tensorflow { 25 | namespace serving { 26 | namespace test_util { 27 | 28 | // Waits until 'monitor' shows that the manager state of 'servable' is one of 29 | // 'states'. 30 | void WaitUntilServableManagerStateIsOneOf( 31 | const ServableStateMonitor& monitor, const ServableId& servable, 32 | const std::vector& states); 33 | 34 | // Waits until 'monitor' shows that the manager state servable ids is 35 | // kAvailable. 36 | void WaitUntilVersionsAvailable(const ServableStateMonitor& monitor, 37 | const string& servable_id_name, 38 | absl::Span servable_id_versions); 39 | 40 | } // namespace test_util 41 | } // namespace serving 42 | } // namespace tensorflow 43 | 44 | #endif // TENSORFLOW_SERVING_CORE_TEST_UTIL_AVAILABILITY_TEST_UTIL_H_ 45 | -------------------------------------------------------------------------------- /tensorflow_serving/util/threadpool_executor.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_UTIL_THREADPOOL_EXECUTOR_H_ 17 | #define TENSORFLOW_SERVING_UTIL_THREADPOOL_EXECUTOR_H_ 18 | 19 | #include 20 | 21 | #include "tensorflow/core/lib/core/threadpool.h" 22 | #include "tensorflow/core/platform/env.h" 23 | #include "tensorflow_serving/util/executor.h" 24 | 25 | namespace tensorflow { 26 | namespace serving { 27 | 28 | // An executor which uses a pool of threads to execute the scheduled closures. 29 | class ThreadPoolExecutor : public Executor { 30 | public: 31 | // Constructs a threadpool that has 'num_threads' threads with specified 32 | // 'thread_pool_name'. Env is used to start the thread. 33 | // 34 | // REQUIRES: num_threads > 0. 35 | ThreadPoolExecutor(Env* env, const string& thread_pool_name, int num_threads); 36 | 37 | // Waits until all scheduled work has finished and then destroy the set of 38 | // threads. 39 | ~ThreadPoolExecutor() override; 40 | 41 | void Schedule(std::function fn) override; 42 | 43 | private: 44 | thread::ThreadPool thread_pool_; 45 | 46 | TF_DISALLOW_COPY_AND_ASSIGN(ThreadPoolExecutor); 47 | }; 48 | 49 | } // namespace serving 50 | } // namespace tensorflow 51 | 52 | #endif // TENSORFLOW_SERVING_UTIL_THREADPOOL_EXECUTOR_H_ 53 | -------------------------------------------------------------------------------- /tensorflow_serving/core/servable_data_test.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/core/servable_data.h" 17 | 18 | #include 19 | 20 | #include 21 | #include "tensorflow/core/lib/core/errors.h" 22 | #include "tensorflow/core/lib/core/status_test_util.h" 23 | #include "tensorflow/core/platform/types.h" 24 | 25 | namespace tensorflow { 26 | namespace serving { 27 | namespace { 28 | 29 | TEST(ServableDataTest, NoError) { 30 | ServableId id = {"name", 42}; 31 | ServableData data(id, "yo"); 32 | EXPECT_EQ(id, data.id()); 33 | TF_EXPECT_OK(data.status()); 34 | EXPECT_EQ("yo", data.DataOrDie()); 35 | EXPECT_EQ("yo", data.ConsumeDataOrDie()); 36 | } 37 | 38 | TEST(ServableDataTest, StaticCreateNoError) { 39 | ServableId id = {"name", 42}; 40 | auto data = CreateServableData(id, "yo"); 41 | EXPECT_EQ(id, data.id()); 42 | TF_EXPECT_OK(data.status()); 43 | EXPECT_EQ("yo", data.DataOrDie()); 44 | EXPECT_EQ("yo", data.ConsumeDataOrDie()); 45 | } 46 | 47 | TEST(ServableDataTest, Error) { 48 | ServableId id = {"name", 42}; 49 | ServableData data(id, errors::Unknown("d'oh")); 50 | EXPECT_EQ(id, data.id()); 51 | EXPECT_EQ(errors::Unknown("d'oh"), data.status()); 52 | } 53 | 54 | } // namespace 55 | } // namespace serving 56 | } // namespace tensorflow 57 | -------------------------------------------------------------------------------- /tensorflow_serving/model_servers/tensorflow_model_server_test_client.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # ============================================================================== 15 | """Manual test client for tensorflow_model_server.""" 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | from __future__ import print_function 20 | 21 | import grpc 22 | import tensorflow as tf 23 | 24 | from tensorflow.core.framework import types_pb2 25 | from tensorflow.python.platform import flags 26 | from tensorflow_serving.apis import predict_pb2 27 | from tensorflow_serving.apis import prediction_service_pb2_grpc 28 | 29 | 30 | tf.compat.v1.app.flags.DEFINE_string('server', 'localhost:8500', 31 | 'inception_inference service host:port') 32 | FLAGS = tf.compat.v1.app.flags.FLAGS 33 | 34 | 35 | def main(_): 36 | # Prepare request 37 | request = predict_pb2.PredictRequest() 38 | request.model_spec.name = 'default' 39 | request.inputs['x'].dtype = types_pb2.DT_FLOAT 40 | request.inputs['x'].float_val.append(2.0) 41 | request.output_filter.append('y') 42 | # Send request 43 | channel = grpc.insecure_channel(FLAGS.server) 44 | stub = prediction_service_pb2_grpc.PredictionServiceStub(channel) 45 | print(stub.Predict(request, 5.0)) # 5 secs timeout 46 | 47 | 48 | if __name__ == '__main__': 49 | tf.compat.v1.app.run() 50 | -------------------------------------------------------------------------------- /tensorflow_serving/servables/tensorflow/tfrt_saved_model_warmup.h: -------------------------------------------------------------------------------- 1 | /* Copyright 2020 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #ifndef TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_TFRT_SAVED_MODEL_WARMUP_H_ 17 | #define TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_TFRT_SAVED_MODEL_WARMUP_H_ 18 | 19 | #include 20 | 21 | #include "tensorflow/cc/saved_model/loader.h" 22 | #include "tensorflow/core/protobuf/saved_model.pb.h" 23 | #include "tensorflow/core/tfrt/saved_model/saved_model.h" 24 | #include "tensorflow_serving/servables/tensorflow/saved_model_warmup_util.h" 25 | #include "tensorflow_serving/servables/tensorflow/session_bundle_config.pb.h" 26 | 27 | namespace tensorflow { 28 | namespace serving { 29 | 30 | // Run warmup requests to trigger lazy initializations (such as TF 31 | // optimizations, XLA compilations) at load time, and consequently improve first 32 | // request latency. 33 | // Supported request types: Predict. 34 | Status RunSavedModelWarmup(const ModelWarmupOptions& model_warmup_options, 35 | const string& export_dir, int lazy_init_threshold, 36 | bool skip_warmup_requests_if_initialized, 37 | tfrt::SavedModel* saved_model); 38 | 39 | } // namespace serving 40 | } // namespace tensorflow 41 | 42 | #endif // TENSORFLOW_SERVING_SERVABLES_TENSORFLOW_TFRT_SAVED_MODEL_WARMUP_H_ 43 | -------------------------------------------------------------------------------- /tensorflow_serving/core/aspired_versions_manager_builder.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2016 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | 16 | #include "tensorflow_serving/core/aspired_versions_manager_builder.h" 17 | 18 | #include 19 | #include 20 | 21 | #include "tensorflow_serving/core/manager_wrapper.h" 22 | 23 | namespace tensorflow { 24 | namespace serving { 25 | 26 | absl::Status AspiredVersionsManagerBuilder::Create( 27 | Options options, std::unique_ptr* builder) { 28 | std::unique_ptr aspired_versions_manager; 29 | TF_RETURN_IF_ERROR(AspiredVersionsManager::Create(std::move(options), 30 | &aspired_versions_manager)); 31 | builder->reset( 32 | new AspiredVersionsManagerBuilder(std::move(aspired_versions_manager))); 33 | return absl::OkStatus(); 34 | } 35 | 36 | AspiredVersionsManagerBuilder::AspiredVersionsManagerBuilder( 37 | std::unique_ptr manager) 38 | : aspired_versions_manager_(manager.get()) { 39 | manager_with_sources_.SetOwned(std::move(manager)); 40 | } 41 | 42 | std::unique_ptr AspiredVersionsManagerBuilder::Build() { 43 | return std::unique_ptr( 44 | new ManagerWrapper(std::move(manager_with_sources_))); 45 | } 46 | 47 | } // namespace serving 48 | } // namespace tensorflow 49 | -------------------------------------------------------------------------------- /tensorflow_serving/experimental/tensorflow/ops/remote_predict/kernels/prediction_service_grpc_test.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2020 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | #include "tensorflow_serving/experimental/tensorflow/ops/remote_predict/kernels/prediction_service_grpc.h" 16 | 17 | #include 18 | 19 | #include "absl/time/clock.h" 20 | #include "tensorflow/core/framework/tensor_testutil.h" 21 | 22 | namespace tensorflow { 23 | namespace serving { 24 | namespace { 25 | 26 | class PredictionServiceGrpcTest : public ::testing::Test { 27 | protected: 28 | virtual void SetUp() { 29 | auto prediction_service_status = 30 | PredictionServiceGrpc::Create("target_address", &grpc_stub_); 31 | } 32 | std::unique_ptr grpc_stub_; 33 | std::unique_ptr<::grpc::ClientContext> rpc_; 34 | }; 35 | 36 | TEST_F(PredictionServiceGrpcTest, TestSetDeadline) { 37 | const absl::Duration deadline = absl::Milliseconds(30000); 38 | auto rpc_or = grpc_stub_->CreateRpc(deadline); 39 | ASSERT_TRUE(rpc_or.ok()); 40 | rpc_.reset(rpc_or.value()); 41 | 42 | EXPECT_NEAR(absl::ToDoubleMilliseconds(deadline), 43 | absl::ToDoubleMilliseconds(absl::FromChrono(rpc_->deadline()) - 44 | absl::Now()), 45 | 10); 46 | } 47 | 48 | } // namespace 49 | } // namespace serving 50 | } // namespace tensorflow 51 | -------------------------------------------------------------------------------- /tensorflow_serving/model_servers/device_runner_init_stub.cc: -------------------------------------------------------------------------------- 1 | /* Copyright 2021 The TensorFlow Authors. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | ==============================================================================*/ 15 | #include "tensorflow_serving/model_servers/device_runner_init_stub.h" 16 | 17 | #include "absl/base/attributes.h" 18 | #include "tensorflow/core/platform/errors.h" 19 | #include "tensorflow/core/platform/status.h" 20 | #include "tensorflow/core/tfrt/runtime/runtime.h" 21 | 22 | namespace tensorflow::serving { 23 | 24 | namespace { 25 | absl::Status InitializeDeviceRunnerAndTopologyStub(tfrt_stub::Runtime&, int*, 26 | int*, 27 | const DeviceRunnerOptions&) { 28 | return tensorflow::errors::Internal( 29 | "device_runner_init_impl is not linked into this binary"); 30 | } 31 | } // namespace 32 | 33 | absl::Status InitializeDeviceRunnerAndTopology( 34 | tfrt_stub::Runtime& runtime, int* num_local_devices, int* cores_per_chip, 35 | const DeviceRunnerOptions& options) { 36 | return InitializeDeviceRunnerAndTopologyFunc(runtime, num_local_devices, 37 | cores_per_chip, options); 38 | } 39 | 40 | ABSL_CONST_INIT InitializeDeviceRunnerAndTopologyFuncType 41 | InitializeDeviceRunnerAndTopologyFunc = 42 | InitializeDeviceRunnerAndTopologyStub; 43 | 44 | } // namespace tensorflow::serving 45 | --------------------------------------------------------------------------------