public void Interpreter_must_work_with_a_massive_chain_of_maps_with_early_complete()
{
var ops = Enumerable.Range(1, HalfLength).Select(_ => _select).ToList<IStage<int, int>>();
ops.Add(new Take<int>(Repetition/2));
ops.AddRange(Enumerable.Range(1, HalfLength).Select(_ => _select));
WithOneBoundedSetup(ops.ToArray(), (lastEvents, upstream, downstream) =>
{
lastEvents().Should().BeEmpty();
var tstamp = new Stopwatch();
tstamp.Start();
var i = 0;
while (i < (Repetition/2) - 1)
{
downstream.RequestOne();
lastEvents().Should().BeEquivalentTo(new RequestOne());
upstream.OnNext(i);
lastEvents().Should().BeEquivalentTo(new OnNext(i + ChainLength));
i++;
}
downstream.RequestOne();
lastEvents().Should().BeEquivalentTo(new RequestOne());
upstream.OnNext(0);
lastEvents().Should().BeEquivalentTo(new OnNext(0 + ChainLength), new Cancel(), new OnComplete());
tstamp.Stop();
var time = tstamp.Elapsed.TotalSeconds;
// Not a real benchmark, just for sanity check
_helper?.WriteLine(
$"Chain finished in {time} seconds {ChainLength*Repetition} maps in total and {(ChainLength*Repetition)/(time*1000*1000)} million maps/s");
});
}