@@ -22,14 +22,16 @@ import { ConnectionSampler } from './connection-sampler/connection_sampler.js';
2222import {
2323 DEFAULT_SUB_PROTOCOL_HANDLERS ,
2424 DEFAULT_SUB_PROTOCOL_VALIDATORS ,
25- type ReqRespSubProtocol ,
25+ type ReqRespResponse ,
26+ ReqRespSubProtocol ,
2627 type ReqRespSubProtocolHandlers ,
2728 type ReqRespSubProtocolValidators ,
2829 type SubProtocolMap ,
2930 subProtocolMap ,
3031} from './interface.js' ;
3132import { ReqRespMetrics } from './metrics.js' ;
3233import { RequestResponseRateLimiter } from './rate-limiter/rate_limiter.js' ;
34+ import { ReqRespStatus , ReqRespStatusError , parseStatusChunk , prettyPrintReqRespStatus } from './status.js' ;
3335
3436/**
3537 * The Request Response Service
@@ -190,10 +192,17 @@ export class ReqResp {
190192 this . logger . trace ( `Sending request to peer: ${ peer . toString ( ) } ` ) ;
191193 const response = await this . sendRequestToPeer ( peer , subProtocol , requestBuffer ) ;
192194
195+ if ( response && response . status !== ReqRespStatus . SUCCESS ) {
196+ this . logger . debug (
197+ `Request to peer ${ peer . toString ( ) } failed with status ${ prettyPrintReqRespStatus ( response . status ) } ` ,
198+ ) ;
199+ continue ;
200+ }
201+
193202 // If we get a response, return it, otherwise we iterate onto the next peer
194203 // We do not consider it a success if we have an empty buffer
195- if ( response && response . length > 0 ) {
196- const object = subProtocolMap [ subProtocol ] . response . fromBuffer ( response ) ;
204+ if ( response && response . data . length > 0 ) {
205+ const object = subProtocolMap [ subProtocol ] . response . fromBuffer ( response . data ) ;
197206 // The response validator handles peer punishment within
198207 const isValid = await responseValidator ( request , object , peer ) ;
199208 if ( ! isValid ) {
@@ -311,8 +320,22 @@ export class ReqResp {
311320 for ( const index of indices ) {
312321 const response = await this . sendRequestToPeer ( peer , subProtocol , requestBuffers [ index ] ) ;
313322
314- if ( response && response . length > 0 ) {
315- const object = subProtocolMap [ subProtocol ] . response . fromBuffer ( response ) ;
323+ // Check the status of the response buffer
324+ if ( response && response . status !== ReqRespStatus . SUCCESS ) {
325+ this . logger . debug (
326+ `Request to peer ${ peer . toString ( ) } failed with status ${ prettyPrintReqRespStatus (
327+ response . status ,
328+ ) } `,
329+ ) ;
330+
331+ // If we hit a rate limit or some failure, we remove the peer and return the results,
332+ // they will be split among remaining peers and the new sampled peer
333+ batchSampler . removePeerAndReplace ( peer ) ;
334+ return { peer, results : peerResults } ;
335+ }
336+
337+ if ( response && response . data . length > 0 ) {
338+ const object = subProtocolMap [ subProtocol ] . response . fromBuffer ( response . data ) ;
316339 const isValid = await responseValidator ( requests [ index ] , object , peer ) ;
317340
318341 if ( isValid ) {
@@ -394,16 +417,16 @@ export class ReqResp {
394417 peerId : PeerId ,
395418 subProtocol : ReqRespSubProtocol ,
396419 payload : Buffer ,
397- ) : Promise < Buffer | undefined > {
420+ ) : Promise < ReqRespResponse | undefined > {
398421 let stream : Stream | undefined ;
399422 try {
400423 this . metrics . recordRequestSent ( subProtocol ) ;
401424
402425 stream = await this . connectionSampler . dialProtocol ( peerId , subProtocol ) ;
403426
404427 // Open the stream with a timeout
405- const result = await executeTimeout < Buffer > (
406- ( ) : Promise < Buffer > => pipe ( [ payload ] , stream ! , this . readMessage . bind ( this ) ) ,
428+ const result = await executeTimeout < ReqRespResponse > (
429+ ( ) : Promise < ReqRespResponse > => pipe ( [ payload ] , stream ! , this . readMessage . bind ( this ) ) ,
407430 this . individualRequestTimeoutMs ,
408431 ( ) => new IndividualReqRespTimeoutError ( ) ,
409432 ) ;
@@ -447,7 +470,15 @@ export class ReqResp {
447470 * Categorize the error and log it.
448471 */
449472 private categorizeError ( e : any , peerId : PeerId , subProtocol : ReqRespSubProtocol ) : PeerErrorSeverity | undefined {
450- // Non pubishable errors
473+ // Non punishable errors - we do not expect a response for goodbye messages
474+ if ( subProtocol === ReqRespSubProtocol . GOODBYE ) {
475+ this . logger . debug ( 'Error encountered on goodbye sub protocol, no penalty' , {
476+ peerId : peerId . toString ( ) ,
477+ subProtocol,
478+ } ) ;
479+ return undefined ;
480+ }
481+
451482 // We do not punish a collective timeout, as the node triggers this interupt, independent of the peer's behaviour
452483 const logTags = {
453484 peerId : peerId . toString ( ) ,
@@ -492,14 +523,45 @@ export class ReqResp {
492523
493524 /**
494525 * Read a message returned from a stream into a single buffer
526+ *
527+ * The message is split into two components
528+ * - The first chunk should contain a control byte, indicating the status of the response see `ReqRespStatus`
529+ * - The second chunk should contain the response data
495530 */
496- private async readMessage ( source : AsyncIterable < Uint8ArrayList > ) : Promise < Buffer > {
531+ private async readMessage ( source : AsyncIterable < Uint8ArrayList > ) : Promise < ReqRespResponse > {
532+ let statusBuffer : ReqRespStatus | undefined ;
497533 const chunks : Uint8Array [ ] = [ ] ;
498- for await ( const chunk of source ) {
499- chunks . push ( chunk . subarray ( ) ) ;
534+
535+ try {
536+ for await ( const chunk of source ) {
537+ if ( statusBuffer === undefined ) {
538+ const firstChunkBuffer = chunk . subarray ( ) ;
539+ statusBuffer = parseStatusChunk ( firstChunkBuffer ) ;
540+ } else {
541+ chunks . push ( chunk . subarray ( ) ) ;
542+ }
543+ }
544+
545+ const messageData = Buffer . concat ( chunks ) ;
546+ const message : Buffer = this . snappyTransform . inboundTransformNoTopic ( messageData ) ;
547+
548+ return {
549+ status : statusBuffer ?? ReqRespStatus . UNKNOWN ,
550+ data : message ,
551+ } ;
552+ } catch ( e : any ) {
553+ this . logger . debug ( `Reading message failed: ${ e . message } ` ) ;
554+
555+ let status = ReqRespStatus . UNKNOWN ;
556+ if ( e instanceof ReqRespStatusError ) {
557+ status = e . status ;
558+ }
559+
560+ return {
561+ status,
562+ data : Buffer . from ( [ ] ) ,
563+ } ;
500564 }
501- const messageData = Buffer . concat ( chunks ) ;
502- return this . snappyTransform . inboundTransformNoTopic ( messageData ) ;
503565 }
504566
505567 /**
@@ -525,25 +587,28 @@ export class ReqResp {
525587 private async streamHandler ( protocol : ReqRespSubProtocol , { stream, connection } : IncomingStreamData ) {
526588 this . metrics . recordRequestReceived ( protocol ) ;
527589
528- // Store a reference to from this for the async generator
529- if ( ! this . rateLimiter . allow ( protocol , connection . remotePeer ) ) {
530- this . logger . warn ( `Rate limit exceeded for ${ protocol } from ${ connection . remotePeer } ` ) ;
590+ try {
591+ // Store a reference to from this for the async generator
592+ if ( ! this . rateLimiter . allow ( protocol , connection . remotePeer ) ) {
593+ this . logger . warn ( `Rate limit exceeded for ${ protocol } from ${ connection . remotePeer } ` ) ;
531594
532- // TODO(#8483): handle changing peer scoring for failed rate limit, maybe differentiate between global and peer limits here when punishing
533- await stream . close ( ) ;
534- return ;
535- }
595+ throw new ReqRespStatusError ( ReqRespStatus . RATE_LIMIT_EXCEEDED ) ;
596+ }
536597
537- const handler = this . subProtocolHandlers [ protocol ] ;
538- const transform = this . snappyTransform ;
598+ const handler = this . subProtocolHandlers [ protocol ] ;
599+ const transform = this . snappyTransform ;
539600
540- try {
541601 await pipe (
542602 stream ,
543603 async function * ( source : any ) {
544604 for await ( const chunkList of source ) {
545605 const msg = Buffer . from ( chunkList . subarray ( ) ) ;
546606 const response = await handler ( connection . remotePeer , msg ) ;
607+
608+ // Send success code first, then the response
609+ const successChunk = Buffer . from ( [ ReqRespStatus . SUCCESS ] ) ;
610+ yield new Uint8Array ( successChunk ) ;
611+
547612 yield new Uint8Array ( transform . outboundTransformNoTopic ( response ) ) ;
548613 }
549614 } ,
@@ -552,8 +617,30 @@ export class ReqResp {
552617 } catch ( e : any ) {
553618 this . logger . warn ( e ) ;
554619 this . metrics . recordResponseError ( protocol ) ;
620+
621+ // If we receive a known error, we use the error status in the response chunk, otherwise we categorize as unknown
622+ let errorStatus = ReqRespStatus . UNKNOWN ;
623+ if ( e instanceof ReqRespStatusError ) {
624+ errorStatus = e . status ;
625+ }
626+
627+ const sendErrorChunk = this . sendErrorChunk ( errorStatus ) ;
628+
629+ // Return and yield the response chunk
630+ await pipe (
631+ stream ,
632+ async function * ( _source : any ) {
633+ yield * sendErrorChunk ;
634+ } ,
635+ stream ,
636+ ) ;
555637 } finally {
556638 await stream . close ( ) ;
557639 }
558640 }
641+
642+ private async * sendErrorChunk ( error : ReqRespStatus ) : AsyncIterable < Uint8Array > {
643+ const errorChunk = Buffer . from ( [ error ] ) ;
644+ yield new Uint8Array ( errorChunk ) ;
645+ }
559646}
0 commit comments