@@ -235,6 +235,7 @@ internal struct LambdaHTTPServer {
235
235
236
236
var requestHead : HTTPRequestHead !
237
237
var requestBody : ByteBuffer ?
238
+ var requestId : String ?
238
239
239
240
// Note that this method is non-throwing and we are catching any error.
240
241
// We do this since we don't want to tear down the whole server when a single connection
@@ -246,16 +247,27 @@ internal struct LambdaHTTPServer {
246
247
switch inboundData {
247
248
case . head( let head) :
248
249
requestHead = head
250
+ requestId = getRequestId ( from: requestHead)
251
+
252
+ // for streaming requests, push a partial head response
253
+ if self . isStreamingResponse ( requestHead) {
254
+ await self . responsePool. push (
255
+ LocalServerResponse (
256
+ id: requestId,
257
+ status: . ok
258
+ )
259
+ )
260
+ }
249
261
250
262
case . body( let body) :
251
263
precondition ( requestHead != nil , " Received .body without .head " )
252
264
253
265
// if this is a request from a Streaming Lambda Handler,
254
266
// stream the response instead of buffering it
255
267
if self . isStreamingResponse ( requestHead) {
256
- // we are receiving a chunked body,
257
- // we can stream the response and not accumulate the chunks
258
- print ( String ( buffer : body ) )
268
+ await self . responsePool . push (
269
+ LocalServerResponse ( id : requestId , body : body )
270
+ )
259
271
} else {
260
272
requestBody. setOrWriteImmutableBuffer ( body)
261
273
}
@@ -265,22 +277,23 @@ internal struct LambdaHTTPServer {
265
277
266
278
// process the buffered response for non streaming requests
267
279
if !self . isStreamingResponse ( requestHead) {
268
- // process the complete request
269
- let response = try await self . processCompleteRequest (
280
+ // process the request and send the response
281
+ try await self . processRequestAndSendResponse (
270
282
head: requestHead,
271
283
body: requestBody,
272
- logger: logger
273
- )
274
- // send the responses
275
- try await self . sendCompleteResponse (
276
- response: response,
277
284
outbound: outbound,
278
285
logger: logger
279
286
)
287
+ } else {
288
+ await self . responsePool. push (
289
+ LocalServerResponse ( id: requestId, final: true )
290
+ )
291
+
280
292
}
281
293
282
294
requestHead = nil
283
295
requestBody = nil
296
+ requestId = nil
284
297
}
285
298
}
286
299
}
@@ -304,6 +317,11 @@ internal struct LambdaHTTPServer {
304
317
requestHead. headers [ " Transfer-Encoding " ] . contains ( " chunked " )
305
318
}
306
319
320
+ /// This function pareses and returns the requestId or nil if the request is malformed
321
+ private func getRequestId( from head: HTTPRequestHead ) -> String ? {
322
+ let parts = head. uri. split ( separator: " / " )
323
+ return parts. count > 2 ? String ( parts [ parts. count - 2 ] ) : nil
324
+ }
307
325
/// This function process the URI request sent by the client and by the Lambda function
308
326
///
309
327
/// It enqueues the client invocation and iterate over the invocation queue when the Lambda function sends /next request
@@ -314,19 +332,22 @@ internal struct LambdaHTTPServer {
314
332
/// - body: the HTTP request body
315
333
/// - Throws:
316
334
/// - Returns: the response to send back to the client or the Lambda function
317
- private func processCompleteRequest (
335
+ private func processRequestAndSendResponse (
318
336
head: HTTPRequestHead ,
319
337
body: ByteBuffer ? ,
338
+ outbound: NIOAsyncChannelOutboundWriter < HTTPServerResponsePart > ,
320
339
logger: Logger
321
- ) async throws -> LocalServerResponse {
340
+ ) async throws {
322
341
342
+ var logger = logger
343
+ logger [ metadataKey: " URI " ] = " \( head. method) \( head. uri) "
323
344
if let body {
324
345
logger. trace (
325
346
" Processing request " ,
326
- metadata: [ " URI " : " \( head . method ) \( head . uri ) " , " Body " : " \( String ( buffer: body) ) " ]
347
+ metadata: [ " Body " : " \( String ( buffer: body) ) " ]
327
348
)
328
349
} else {
329
- logger. trace ( " Processing request " , metadata : [ " URI " : " \( head . method ) \( head . uri ) " ] )
350
+ logger. trace ( " Processing request " )
330
351
}
331
352
332
353
switch ( head. method, head. uri) {
@@ -337,27 +358,32 @@ internal struct LambdaHTTPServer {
337
358
// client POST /invoke
338
359
case ( . POST, let url) where url. hasSuffix ( self . invocationEndpoint) :
339
360
guard let body else {
340
- return . init( status: . badRequest, headers : [ ] , body : nil )
361
+ return try await sendResponse ( . init( status: . badRequest) , outbound : outbound , logger : logger )
341
362
}
342
363
// we always accept the /invoke request and push them to the pool
343
364
let requestId = " \( DispatchTime . now ( ) . uptimeNanoseconds) "
344
- var logger = logger
345
365
logger [ metadataKey: " requestID " ] = " \( requestId) "
346
- logger. trace ( " /invoke received invocation " )
366
+ logger. trace ( " /invoke received invocation, pushing it to the stack " )
347
367
await self . invocationPool. push ( LocalServerInvocation ( requestId: requestId, request: body) )
348
368
349
369
// wait for the lambda function to process the request
350
370
for try await response in self . responsePool {
351
- logger. trace (
352
- " Received response to return to client " ,
353
- metadata: [ " requestId " : " \( response. requestId ?? " " ) " ]
354
- )
371
+ logger [ metadataKey: " requestID " ] = " \( requestId) "
372
+ logger. trace ( " Received response to return to client " )
355
373
if response. requestId == requestId {
356
- return response
374
+ logger. trace ( " /invoke requestId is valid, sending the response " )
375
+ // send the response to the client
376
+ // if the response is final, we can send it and return
377
+ // if the response is not final, we can send it and wait for the next response
378
+ try await self . sendResponse ( response, outbound: outbound, logger: logger)
379
+ if response. final == true {
380
+ logger. trace ( " /invoke returning " )
381
+ return // if the response is final, we can return and close the connection
382
+ }
357
383
} else {
358
384
logger. error (
359
385
" Received response for a different request id " ,
360
- metadata: [ " response requestId " : " \( response. requestId ?? " " ) " , " requestId " : " \( requestId ) " ]
386
+ metadata: [ " response requestId " : " \( response. requestId ?? " " ) " ]
361
387
)
362
388
// should we return an error here ? Or crash as this is probably a programming error?
363
389
}
@@ -368,7 +394,7 @@ internal struct LambdaHTTPServer {
368
394
369
395
// client uses incorrect HTTP method
370
396
case ( _, let url) where url. hasSuffix ( self . invocationEndpoint) :
371
- return . init( status: . methodNotAllowed)
397
+ return try await sendResponse ( . init( status: . methodNotAllowed) , outbound : outbound , logger : logger )
372
398
373
399
//
374
400
// lambda invocations
@@ -381,85 +407,97 @@ internal struct LambdaHTTPServer {
381
407
// pop the tasks from the queue
382
408
logger. trace ( " /next waiting for /invoke " )
383
409
for try await invocation in self . invocationPool {
384
- logger. trace ( " /next retrieved invocation " , metadata: [ " requestId " : " \( invocation. requestId) " ] )
385
- // this call also stores the invocation requestId into the response
386
- return invocation. makeResponse ( status: . accepted)
410
+ logger [ metadataKey: " requestId " ] = " \( invocation. requestId) "
411
+ logger. trace ( " /next retrieved invocation " )
412
+ // tell the lambda function we accepted the invocation
413
+ return try await sendResponse ( invocation. acceptedResponse ( ) , outbound: outbound, logger: logger)
387
414
}
388
415
// What todo when there is no more tasks to process?
389
416
// This should not happen as the async iterator blocks until there is a task to process
390
417
fatalError ( " No more invocations to process - the async for loop should not return " )
391
418
392
419
// :requestID/response endpoint is called by the lambda posting the response
393
420
case ( . POST, let url) where url. hasSuffix ( Consts . postResponseURLSuffix) :
394
- let parts = head. uri. split ( separator: " / " )
395
- guard let requestID = parts. count > 2 ? String ( parts [ parts. count - 2 ] ) : nil else {
421
+ guard let requestID = getRequestId ( from: head) else {
396
422
// the request is malformed, since we were expecting a requestId in the path
397
- return . init( status: . badRequest)
423
+ return try await sendResponse ( . init( status: . badRequest) , outbound : outbound , logger : logger )
398
424
}
399
425
// enqueue the lambda function response to be served as response to the client /invoke
400
426
logger. trace ( " /:requestID/response received response " , metadata: [ " requestId " : " \( requestID) " ] )
401
427
await self . responsePool. push (
402
428
LocalServerResponse (
403
429
id: requestID,
404
430
status: . ok,
405
- headers: [ ( " Content-Type " , " application/json " ) ] ,
431
+ headers: HTTPHeaders ( [ ( " Content-Type " , " application/json " ) ] ) ,
406
432
body: body
407
433
)
408
434
)
409
435
410
436
// tell the Lambda function we accepted the response
411
- return . init( id: requestID, status: . accepted)
437
+ return try await sendResponse ( . init( id: requestID, status: . accepted) , outbound : outbound , logger : logger )
412
438
413
439
// :requestID/error endpoint is called by the lambda posting an error response
414
440
// we accept all requestID and we do not handle the body, we just acknowledge the request
415
441
case ( . POST, let url) where url. hasSuffix ( Consts . postErrorURLSuffix) :
416
- let parts = head. uri. split ( separator: " / " )
417
- guard let requestID = parts. count > 2 ? String ( parts [ parts. count - 2 ] ) : nil else {
442
+ guard let requestID = getRequestId ( from: head) else {
418
443
// the request is malformed, since we were expecting a requestId in the path
419
- return . init( status: . badRequest)
444
+ return try await sendResponse ( . init( status: . badRequest) , outbound : outbound , logger : logger )
420
445
}
421
446
// enqueue the lambda function response to be served as response to the client /invoke
422
447
logger. trace ( " /:requestID/response received response " , metadata: [ " requestId " : " \( requestID) " ] )
423
448
await self . responsePool. push (
424
449
LocalServerResponse (
425
450
id: requestID,
426
451
status: . internalServerError,
427
- headers: [ ( " Content-Type " , " application/json " ) ] ,
452
+ headers: HTTPHeaders ( [ ( " Content-Type " , " application/json " ) ] ) ,
428
453
body: body
429
454
)
430
455
)
431
456
432
- return . init( status: . accepted)
457
+ return try await sendResponse ( . init( status: . accepted) , outbound : outbound , logger : logger )
433
458
434
459
// unknown call
435
460
default :
436
- return . init( status: . notFound)
461
+ return try await sendResponse ( . init( status: . notFound) , outbound : outbound , logger : logger )
437
462
}
438
463
}
439
464
440
- private func sendCompleteResponse (
441
- response: LocalServerResponse ,
465
+ private func sendResponse (
466
+ _ response: LocalServerResponse ,
442
467
outbound: NIOAsyncChannelOutboundWriter < HTTPServerResponsePart > ,
443
468
logger: Logger
444
469
) async throws {
445
- var headers = HTTPHeaders ( response. headers ?? [ ] )
446
- headers. add ( name: " Content-Length " , value: " \( response. body? . readableBytes ?? 0 ) " )
447
-
448
- logger. trace ( " Writing response " , metadata: [ " requestId " : " \( response. requestId ?? " " ) " ] )
449
- try await outbound. write (
450
- HTTPServerResponsePart . head (
451
- HTTPResponseHead (
452
- version: . init( major: 1 , minor: 1 ) ,
453
- status: response. status,
454
- headers: headers
470
+ var logger = logger
471
+ logger [ metadataKey: " requestId " ] = " \( response. requestId ?? " nil " ) "
472
+ logger. trace ( " Writing response " )
473
+
474
+ var headers = response. headers ?? HTTPHeaders ( )
475
+ if let body = response. body {
476
+ headers. add ( name: " Content-Length " , value: " \( body. readableBytes) " )
477
+ }
478
+
479
+ if let status = response. status {
480
+ logger. trace ( " Sending status and headers " )
481
+ try await outbound. write (
482
+ HTTPServerResponsePart . head (
483
+ HTTPResponseHead (
484
+ version: . init( major: 1 , minor: 1 ) ,
485
+ status: status,
486
+ headers: headers
487
+ )
455
488
)
456
489
)
457
- )
490
+ }
491
+
458
492
if let body = response. body {
493
+ logger. trace ( " Sending body " )
459
494
try await outbound. write ( HTTPServerResponsePart . body ( . byteBuffer( body) ) )
460
495
}
461
496
462
- try await outbound. write ( HTTPServerResponsePart . end ( nil ) )
497
+ if response. final {
498
+ logger. trace ( " Sending end " )
499
+ try await outbound. write ( HTTPServerResponsePart . end ( nil ) )
500
+ }
463
501
}
464
502
465
503
/// A shared data structure to store the current invocation or response requests and the continuation objects.
@@ -543,36 +581,37 @@ internal struct LambdaHTTPServer {
543
581
544
582
private struct LocalServerResponse : Sendable {
545
583
let requestId : String ?
546
- let status : HTTPResponseStatus
547
- let headers : [ ( String , String ) ] ?
584
+ let status : HTTPResponseStatus ?
585
+ let headers : HTTPHeaders ?
548
586
let body : ByteBuffer ?
549
- init ( id : String ? = nil , status : HTTPResponseStatus , headers : [ ( String , String ) ] ? = nil , body : ByteBuffer ? = nil )
550
- {
587
+ let final : Bool
588
+ init ( id : String ? = nil , status : HTTPResponseStatus ? = nil , headers : HTTPHeaders ? = nil , body : ByteBuffer ? = nil , final : Bool = false ) {
551
589
self . requestId = id
552
590
self . status = status
553
591
self . headers = headers
554
592
self . body = body
593
+ self . final = final
555
594
}
556
595
}
557
596
558
597
private struct LocalServerInvocation : Sendable {
559
598
let requestId : String
560
599
let request : ByteBuffer
561
600
562
- func makeResponse ( status : HTTPResponseStatus ) -> LocalServerResponse {
601
+ func acceptedResponse ( ) -> LocalServerResponse {
563
602
564
603
// required headers
565
- let headers = [
604
+ let headers = HTTPHeaders ( [
566
605
( AmazonHeaders . requestID, self . requestId) ,
567
606
(
568
- AmazonHeaders . invokedFunctionARN,
569
- " arn:aws:lambda:us-east-1: \( Int16 . random ( in: Int16 . min ... Int16 . max) ) :function:custom-runtime "
607
+ AmazonHeaders . invokedFunctionARN,
608
+ " arn:aws:lambda:us-east-1: \( Int16 . random ( in: Int16 . min ... Int16 . max) ) :function:custom-runtime "
570
609
) ,
571
610
( AmazonHeaders . traceID, " Root= \( AmazonHeaders . generateXRayTraceID ( ) ) ;Sampled=1 " ) ,
572
611
( AmazonHeaders . deadline, " \( DispatchWallTime . distantFuture. millisSinceEpoch) " ) ,
573
- ]
612
+ ] )
574
613
575
- return LocalServerResponse ( id: self . requestId, status: status , headers: headers, body: self . request)
614
+ return LocalServerResponse ( id: self . requestId, status: . accepted , headers: headers, body: self . request, final : true )
576
615
}
577
616
}
578
617
}
0 commit comments