@@ -835,6 +835,71 @@ func TestFullDuplexStreamed_KubeInferenceModelRequest(t *testing.T) {
835835 },
836836 },
837837 },
838+ {
839+ name : "no backend pods are available" ,
840+ requests : []* extProcPb.ProcessingRequest {
841+ {
842+ Request : & extProcPb.ProcessingRequest_RequestHeaders {
843+ RequestHeaders : & extProcPb.HttpHeaders {
844+ Headers : & configPb.HeaderMap {
845+ Headers : []* configPb.HeaderValue {
846+ {
847+ Key : "content-type" ,
848+ RawValue : []byte ("text/event-stream" ),
849+ },
850+ {
851+ Key : "status" ,
852+ RawValue : []byte ("200" ),
853+ },
854+ },
855+ },
856+ EndOfStream : true ,
857+ },
858+ },
859+ },
860+ },
861+ pods : nil ,
862+ wantMetrics : map [string ]string {},
863+ wantErr : true ,
864+ wantResponses : []* extProcPb.ProcessingResponse {
865+ {
866+ Response : & extProcPb.ProcessingResponse_ImmediateResponse {
867+ ImmediateResponse : & extProcPb.ImmediateResponse {
868+ Status : & envoyTypePb.HttpStatus {
869+ Code : envoyTypePb .StatusCode_InternalServerError ,
870+ },
871+ Body : []byte ("inference gateway: Internal - no pods available in datastore" ),
872+ },
873+ },
874+ },
875+ },
876+ },
877+ {
878+ name : "request don't contains invalid payload, model not exist" ,
879+ requests : []* extProcPb.ProcessingRequest {
880+ {
881+ Request : & extProcPb.ProcessingRequest_RequestBody {
882+ RequestBody : & extProcPb.HttpBody {
883+ Body : []byte (`{"hello":"world"}` ),
884+ EndOfStream : true },
885+ },
886+ },
887+ },
888+ wantErr : true ,
889+ wantMetrics : map [string ]string {},
890+ wantResponses : []* extProcPb.ProcessingResponse {
891+ {
892+ Response : & extProcPb.ProcessingResponse_ImmediateResponse {
893+ ImmediateResponse : & extProcPb.ImmediateResponse {
894+ Status : & envoyTypePb.HttpStatus {
895+ Code : envoyTypePb .StatusCode_BadRequest ,
896+ },
897+ Body : []byte ("inference gateway: BadRequest - model not found in request body" ),
898+ },
899+ },
900+ },
901+ },
902+ },
838903 }
839904
840905 for _ , test := range tests {
0 commit comments