-
Notifications
You must be signed in to change notification settings - Fork 221
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
b0c7402
commit eeee229
Showing
7 changed files
with
222 additions
and
131 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
package io.finch | ||
|
||
import com.twitter.io.{Buf, Reader} | ||
import java.nio.charset.Charset | ||
|
||
/** | ||
* | ||
*/ | ||
trait EncodeStreamToReader[S[_[_], _], F[_], A] { | ||
type ContentType <: String | ||
|
||
def apply(s: S[F, A], cs: Charset): Reader[Buf] | ||
} | ||
|
||
object EncodeStreamToReader { | ||
|
||
type Aux[S[_[_], _], F[_], A, CT <: String] = | ||
EncodeStreamToReader[S, F, A] { type ContentType = CT } | ||
|
||
type Json[S[_[_],_], F[_], A] = Aux[S, F, A, Application.Json] | ||
|
||
type Text[S[_[_],_], F[_], A] = Aux[S, F, A, Text.Plain] | ||
} | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
225 changes: 116 additions & 109 deletions
225
iteratee/src/main/scala/io/finch/iteratee/package.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,109 +1,116 @@ | ||
package io.finch | ||
|
||
import cats.effect.Effect | ||
import com.twitter.finagle.http.Response | ||
import com.twitter.io._ | ||
import com.twitter.util.Future | ||
import io.finch.internal._ | ||
import io.finch.items.RequestItem | ||
import io.iteratee.{Enumerator, Iteratee} | ||
import shapeless.Witness | ||
|
||
/** | ||
* Iteratee module | ||
*/ | ||
package object iteratee extends IterateeInstances { | ||
|
||
|
||
private[finch] def enumeratorFromReader[F[_] : Effect](reader: Reader[Buf]): Enumerator[F, Buf] = { | ||
def rec(reader: Reader[Buf]): Enumerator[F, Buf] = { | ||
Enumerator.liftM[F, Option[Buf]] { | ||
futureToEffect(reader.read()) | ||
}.flatMap { | ||
case None => Enumerator.empty[F, Buf] | ||
case Some(buf) => Enumerator.enumOne[F, Buf](buf).append(rec(reader)) | ||
} | ||
} | ||
rec(reader).ensure(Effect[F].delay(reader.discard())) | ||
} | ||
|
||
/** | ||
* An evaluating [[Endpoint]] that reads a required chunked streaming binary body, interpreted as | ||
* an `Enumerator[Future, A]`. The returned [[Endpoint]] only matches chunked (streamed) requests. | ||
*/ | ||
def enumeratorBody[F[_] : Effect, A, CT <: String](implicit | ||
decode: Enumerate.Aux[F, A, CT] | ||
): Endpoint[F, Enumerator[F, A]] = new Endpoint[F, Enumerator[F, A]] { | ||
final def apply(input: Input): Endpoint.Result[F, Enumerator[F, A]] = { | ||
if (!input.request.isChunked) EndpointResult.NotMatched[F] | ||
else { | ||
val req = input.request | ||
EndpointResult.Matched( | ||
input, | ||
Trace.empty, | ||
Effect[F].pure(Output.payload(decode(enumeratorFromReader(req.reader), req.charsetOrUtf8))) | ||
) | ||
} | ||
} | ||
|
||
final override def item: RequestItem = items.BodyItem | ||
final override def toString: String = "enumeratorBody" | ||
} | ||
|
||
/** | ||
* An evaluating [[Endpoint]] that reads a required chunked streaming JSON body, interpreted as | ||
* an `Enumerator[Future, A]`. The returned [[Endpoint]] only matches chunked (streamed) requests. | ||
*/ | ||
def enumeratorJsonBody[F[_] : Effect, A](implicit | ||
ad: Enumerate.Aux[F, A, Application.Json] | ||
): Endpoint[F, Enumerator[F, A]] = enumeratorBody[F, A, Application.Json].withToString("enumeratorJsonBody") | ||
|
||
} | ||
|
||
trait IterateeInstances extends LowPriorityInstances { | ||
|
||
implicit def enumeratorToJsonResponse[F[_] : Effect, A](implicit | ||
e: Encode.Aux[A, Application.Json], | ||
w: Witness.Aux[Application.Json] | ||
): ToResponse.Aux[Enumerator[F, A], Application.Json] = { | ||
withCustomIteratee[F, A, Application.Json](writer => | ||
Iteratee.foreachM[F, Buf]((buf: Buf) => futureToEffect(writer.write(buf.concat(ToResponse.NewLine)))) | ||
) | ||
} | ||
} | ||
|
||
trait LowPriorityInstances { | ||
|
||
protected def futureToEffect[F[_] : Effect, A](future: => Future[A]): F[A] = { | ||
Effect[F].async[A](cb => { | ||
future | ||
.onFailure(t => cb(Left(t))) | ||
.onSuccess(b => cb(Right(b))) | ||
}) | ||
} | ||
|
||
implicit def enumeratorToResponse[F[_] : Effect, A, CT <: String](implicit | ||
e: Encode.Aux[A, CT], | ||
w: Witness.Aux[CT] | ||
): ToResponse.Aux[Enumerator[F, A], CT] = { | ||
withCustomIteratee(writer => Iteratee.foreachM[F, Buf]((buf: Buf) => futureToEffect(writer.write(buf)))) | ||
} | ||
|
||
protected def withCustomIteratee[F[_] : Effect, A, CT <: String] | ||
(iteratee: Writer[Buf] => Iteratee[F, Buf, Unit])(implicit | ||
e: Encode.Aux[A, CT], | ||
w: Witness.Aux[CT] | ||
): ToResponse.Aux[Enumerator[F, A], CT] = { | ||
ToResponse.instance[Enumerator[F, A], CT]((enum, cs) => { | ||
val response = Response() | ||
response.setChunked(true) | ||
response.contentType = w.value | ||
val writer = response.writer | ||
val stream = { | ||
enum.ensure(Effect[F].suspend(futureToEffect(writer.close()))).map(e.apply(_, cs)).into(iteratee(writer)) | ||
} | ||
Effect[F].toIO(stream).unsafeRunAsyncAndForget() | ||
response | ||
}) | ||
} | ||
} | ||
package io.finch | ||
|
||
import cats.effect.Effect | ||
import com.twitter.io._ | ||
import com.twitter.util.Future | ||
import io.finch.internal._ | ||
import io.finch.items.RequestItem | ||
import io.iteratee.{Enumerator, Iteratee} | ||
import java.nio.charset.Charset | ||
|
||
/** | ||
* Iteratee module | ||
*/ | ||
package object iteratee extends IterateeInstances { | ||
|
||
private[finch] def enumeratorFromReader[F[_] : Effect](reader: Reader[Buf]): Enumerator[F, Buf] = { | ||
def loop(): Enumerator[F, Buf] = { | ||
Enumerator | ||
.liftM[F, Option[Buf]](toEffect[F, Option[Buf]](reader.read())) | ||
.flatMap { | ||
case None => Enumerator.empty[F, Buf] | ||
case Some(buf) => Enumerator.enumOne[F, Buf](buf).append(loop()) | ||
} | ||
} | ||
|
||
loop().ensure(Effect[F].delay(reader.discard())) | ||
} | ||
|
||
/** | ||
* An evaluating [[Endpoint]] that reads a required chunked streaming binary body, interpreted as | ||
* an `Enumerator[Future, A]`. The returned [[Endpoint]] only matches chunked (streamed) requests. | ||
*/ | ||
def enumeratorBody[F[_] : Effect, A, CT <: String](implicit | ||
decode: Enumerate.Aux[F, A, CT] | ||
): Endpoint[F, Enumerator[F, A]] = new Endpoint[F, Enumerator[F, A]] { | ||
final def apply(input: Input): Endpoint.Result[F, Enumerator[F, A]] = { | ||
if (!input.request.isChunked) EndpointResult.NotMatched[F] | ||
else { | ||
val req = input.request | ||
EndpointResult.Matched( | ||
input, | ||
Trace.empty, | ||
Effect[F].pure(Output.payload(decode(enumeratorFromReader(req.reader), req.charsetOrUtf8))) | ||
) | ||
} | ||
} | ||
|
||
final override def item: RequestItem = items.BodyItem | ||
final override def toString: String = "enumeratorBody" | ||
} | ||
|
||
/** | ||
* An evaluating [[Endpoint]] that reads a required chunked streaming JSON body, interpreted as | ||
* an `Enumerator[Future, A]`. The returned [[Endpoint]] only matches chunked (streamed) requests. | ||
*/ | ||
def enumeratorJsonBody[F[_] : Effect, A](implicit | ||
ad: Enumerate.Aux[F, A, Application.Json] | ||
): Endpoint[F, Enumerator[F, A]] = enumeratorBody[F, A, Application.Json].withToString("enumeratorJsonBody") | ||
|
||
} | ||
|
||
trait IterateeInstances extends LowPriorityIterateeInstances { | ||
|
||
implicit def encodeJsonEnumeratorToReader[F[_]: Effect, A](implicit | ||
A: Encode.Json[A] | ||
): EncodeStreamToReader.Json[Enumerator, F, A] = | ||
new EncodeEnumeratorToReader[F, A, Application.Json] { | ||
protected def encodeChunk(chunk: A, cs: Charset): Buf = A(chunk, cs) | ||
override protected def writeChunk(chunk: Buf, w: Writer[Buf]): Future[Unit] = | ||
w.write(chunk.concat(ToResponse.NewLine)) | ||
} | ||
|
||
implicit def encodeTextEnumeratorToReader[F[_]: Effect, A](implicit | ||
A: Encode.Text[A] | ||
): EncodeStreamToReader.Text[Enumerator, F, A] = | ||
new EncodeEnumeratorToReader[F, A, Text.Plain] { | ||
override protected def encodeChunk(chunk: A, cs: Charset): Buf = A(chunk, cs) | ||
} | ||
} | ||
|
||
trait LowPriorityIterateeInstances { | ||
|
||
protected def toEffect[F[_], A](f: => Future[A])(implicit F: Effect[F]): F[A] = | ||
F.async[A]( | ||
cb => f.onFailure(t => cb(Left(t))).onSuccess(b => cb(Right(b))) | ||
) | ||
|
||
protected abstract class EncodeEnumeratorToReader[F[_], A, CT <: String](implicit | ||
F: Effect[F] | ||
) extends EncodeStreamToReader[Enumerator, F, A] { | ||
|
||
type ContentType = CT | ||
|
||
protected def encodeChunk(chunk: A, cs: Charset): Buf | ||
protected def writeChunk(chunk: Buf, w: Writer[Buf]): Future[Unit] = w.write(chunk) | ||
|
||
private def writeIteratee(w: Writer[Buf]): Iteratee[F, Buf, Unit] = | ||
Iteratee.foreachM[F, Buf](chunk => toEffect[F, Unit](writeChunk(chunk, w))) | ||
|
||
def apply(s: Enumerator[F, A], cs: Charset): Reader[Buf] = { | ||
val p = new Pipe[Buf] | ||
val run = s | ||
.ensure(F.suspend(toEffect[F, Unit](p.close()))) | ||
.map(chunk => encodeChunk(chunk, cs)) | ||
.into(writeIteratee(p)) | ||
|
||
F.toIO(run).unsafeRunAsyncAndForget() | ||
p | ||
} | ||
} | ||
|
||
implicit def encodeBufEnumeratorToReader[F[_]: Effect, CT <: String]: EncodeStreamToReader.Aux[Enumerator, F, Buf, CT] = | ||
new EncodeEnumeratorToReader[F, Buf, CT] { | ||
protected def encodeChunk(chunk: Buf, cs: Charset): Buf = chunk | ||
} | ||
} |
Oops, something went wrong.