public void Interpreter_must_work_with_a_massive_chain_of_maps()
{
var ops = Enumerable.Range(1, ChainLength).Select(_ => _select)
.Cast<IStage<int, int>>().ToArray();
WithOneBoundedSetup(ops, (lastEvents, upstream, downstream) =>
{
lastEvents().Should().BeEmpty();
var tstamp = new Stopwatch();
tstamp.Start();
var i = 0;
while (i < Repetition)
{
downstream.RequestOne();
lastEvents().Should().BeEquivalentTo(new RequestOne());
upstream.OnNext(i);
lastEvents().Should().BeEquivalentTo(new OnNext(i + ChainLength));
i++;
}
upstream.OnComplete();
lastEvents().Should().BeEquivalentTo(new OnComplete());
tstamp.Stop();
var time = tstamp.Elapsed.TotalSeconds;
// Not a real benchmark, just for sanity check
_helper?.WriteLine($"Chain finished in {time} seconds {ChainLength * Repetition} maps in total and {(ChainLength * Repetition) / (time * 1000 * 1000)} million maps/s");
});
}