update comments
[ieee754fpu.git] / src / add / example_buf_pipe.py
1 """ Pipeline and BufferedPipeline implementation, conforming to the same API.
2
3 eq:
4 --
5
6 a strategically very important function that is identical in function
7 to nmigen's Signal.eq function, except it may take objects, or a list
8 of objects, or a tuple of objects, and where objects may also be
9 Records.
10
11 Stage API:
12 ---------
13
14 stage requires compliance with a strict API that may be
15 implemented in several means, including as a static class.
16 the methods of a stage instance must be as follows:
17
18 * ispec() - Input data format specification
19 returns an object or a list or tuple of objects, or
20 a Record, each object having an "eq" function which
21 takes responsibility for copying by assignment all
22 sub-objects
23 * ospec() - Output data format specification
24 requirements as for ospec
25 * process(m, i) - Processes an ispec-formatted object
26 returns a combinatorial block of a result that
27 may be assigned to the output, by way of the "eq"
28 function
29 * setup(m, i) - Optional function for setting up submodules
30 may be used for more complex stages, to link
31 the input (i) to submodules. must take responsibility
32 for adding those submodules to the module (m).
33 the submodules must be combinatorial blocks and
34 must have their inputs and output linked combinatorially.
35
36 StageChain:
37 ----------
38
39 A useful combinatorial wrapper around stages that chains them together
40 and then presents a Stage-API-conformant interface.
41
42 UnbufferedPipeline:
43 ------------------
44
45 A simple stalling clock-synchronised pipeline that has no buffering
46 (unlike BufferedPipeline). A stall anywhere along the line will
47 result in a stall back-propagating down the entire chain.
48
49 The BufferedPipeline by contrast will buffer incoming data, allowing
50 previous stages one clock cycle's grace before also having to stall.
51
52 An advantage of the UnbufferedPipeline over the Buffered one is
53 that the amount of logic needed (number of gates) is greatly
54 reduced.
55
56 BufferedPipeline:
57 ----------------
58
59 nmigen implementation of buffered pipeline stage, based on zipcpu:
60 https://zipcpu.com/blog/2017/08/14/strategies-for-pipelining.html
61
62 this module requires quite a bit of thought to understand how it works
63 (and why it is needed in the first place). reading the above is
64 *strongly* recommended.
65
66 unlike john dawson's IEEE754 FPU STB/ACK signalling, which requires
67 the STB / ACK signals to raise and lower (on separate clocks) before
68 data may proceeed (thus only allowing one piece of data to proceed
69 on *ALTERNATE* cycles), the signalling here is a true pipeline
70 where data will flow on *every* clock when the conditions are right.
71
72 input acceptance conditions are when:
73 * incoming previous-stage strobe (p.i_valid) is HIGH
74 * outgoing previous-stage ready (p.o_ready) is LOW
75
76 output transmission conditions are when:
77 * outgoing next-stage strobe (n.o_valid) is HIGH
78 * outgoing next-stage ready (n.i_ready) is LOW
79
80 the tricky bit is when the input has valid data and the output is not
81 ready to accept it. if it wasn't for the clock synchronisation, it
82 would be possible to tell the input "hey don't send that data, we're
83 not ready". unfortunately, it's not possible to "change the past":
84 the previous stage *has no choice* but to pass on its data.
85
86 therefore, the incoming data *must* be accepted - and stored: that
87 is the responsibility / contract that this stage *must* accept.
88 on the same clock, it's possible to tell the input that it must
89 not send any more data. this is the "stall" condition.
90
91 we now effectively have *two* possible pieces of data to "choose" from:
92 the buffered data, and the incoming data. the decision as to which
93 to process and output is based on whether we are in "stall" or not.
94 i.e. when the next stage is no longer ready, the output comes from
95 the buffer if a stall had previously occurred, otherwise it comes
96 direct from processing the input.
97
98 this allows us to respect a synchronous "travelling STB" with what
99 dan calls a "buffered handshake".
100
101 it's quite a complex state machine!
102 """
103
104 from nmigen import Signal, Cat, Const, Mux, Module
105 from nmigen.cli import verilog, rtlil
106 from nmigen.hdl.rec import Record, Layout
107
108 from abc import ABCMeta, abstractmethod
109 from collections.abc import Sequence
110
111
112 class PrevControl:
113 """ contains signals that come *from* the previous stage (both in and out)
114 * i_valid: previous stage indicating all incoming data is valid.
115 may be a multi-bit signal, where all bits are required
116 to be asserted to indicate "valid".
117 * o_ready: output to next stage indicating readiness to accept data
118 * i_data : an input - added by the user of this class
119 """
120
121 def __init__(self, i_width=1):
122 self.i_valid = Signal(i_width, name="p_i_valid") # prev >>in self
123 self.o_ready = Signal(name="p_o_ready") # prev <<out self
124
125 def _connect_in(self, prev):
126 """ internal helper function to connect stage to an input source.
127 do not use to connect stage-to-stage!
128 """
129 return [self.i_valid.eq(prev.i_valid),
130 prev.o_ready.eq(self.o_ready),
131 eq(self.i_data, prev.i_data),
132 ]
133
134 def i_valid_logic(self):
135 vlen = len(self.i_valid)
136 if vlen > 1: # multi-bit case: valid only when i_valid is all 1s
137 all1s = Const(-1, (len(self.i_valid), False))
138 return self.i_valid == all1s
139 # single-bit i_valid case
140 return self.i_valid
141
142
143 class NextControl:
144 """ contains the signals that go *to* the next stage (both in and out)
145 * o_valid: output indicating to next stage that data is valid
146 * i_ready: input from next stage indicating that it can accept data
147 * o_data : an output - added by the user of this class
148 """
149 def __init__(self):
150 self.o_valid = Signal(name="n_o_valid") # self out>> next
151 self.i_ready = Signal(name="n_i_ready") # self <<in next
152
153 def connect_to_next(self, nxt):
154 """ helper function to connect to the next stage data/valid/ready.
155 data/valid is passed *TO* nxt, and ready comes *IN* from nxt.
156 use this when connecting stage-to-stage
157 """
158 return [nxt.i_valid.eq(self.o_valid),
159 self.i_ready.eq(nxt.o_ready),
160 eq(nxt.i_data, self.o_data),
161 ]
162
163 def _connect_out(self, nxt):
164 """ internal helper function to connect stage to an output source.
165 do not use to connect stage-to-stage!
166 """
167 return [nxt.o_valid.eq(self.o_valid),
168 self.i_ready.eq(nxt.i_ready),
169 eq(nxt.o_data, self.o_data),
170 ]
171
172
173 def eq(o, i):
174 """ makes signals equal: a helper routine which identifies if it is being
175 passed a list (or tuple) of objects, or signals, or Records, and calls
176 the objects' eq function.
177
178 complex objects (classes) can be used: they must follow the
179 convention of having an eq member function, which takes the
180 responsibility of further calling eq and returning a list of
181 eq assignments
182
183 Record is a special (unusual, recursive) case, where the input may be
184 specified as a dictionary (which may contain further dictionaries,
185 recursively), where the field names of the dictionary must match
186 the Record's field spec. Alternatively, an object with the same
187 member names as the Record may be assigned: it does not have to
188 *be* a Record.
189 """
190 if not isinstance(o, Sequence):
191 o, i = [o], [i]
192 res = []
193 for (ao, ai) in zip(o, i):
194 #print ("eq", ao, ai)
195 if isinstance(ao, Record):
196 for idx, (field_name, field_shape, _) in enumerate(ao.layout):
197 if isinstance(field_shape, Layout):
198 val = ai.fields
199 else:
200 val = ai
201 if hasattr(val, field_name): # check for attribute
202 val = getattr(val, field_name)
203 else:
204 val = val[field_name] # dictionary-style specification
205 rres = eq(ao.fields[field_name], val)
206 res += rres
207 else:
208 rres = ao.eq(ai)
209 if not isinstance(rres, Sequence):
210 rres = [rres]
211 res += rres
212 return res
213
214
215 class StageCls(metaclass=ABCMeta):
216 """ Class-based "Stage" API. requires instantiation (after derivation)
217
218 see "Stage API" above.. Note: python does *not* require derivation
219 from this class. All that is required is that the pipelines *have*
220 the functions listed in this class. Derivation from this class
221 is therefore merely a "courtesy" to maintainers.
222 """
223 @abstractmethod
224 def ispec(self): pass # REQUIRED
225 @abstractmethod
226 def ospec(self): pass # REQUIRED
227 #@abstractmethod
228 #def setup(self, m, i): pass # OPTIONAL
229 @abstractmethod
230 def process(self, i): pass # REQUIRED
231
232
233 class Stage(metaclass=ABCMeta):
234 """ Static "Stage" API. does not require instantiation (after derivation)
235
236 see "Stage API" above. Note: python does *not* require derivation
237 from this class. All that is required is that the pipelines *have*
238 the functions listed in this class. Derivation from this class
239 is therefore merely a "courtesy" to maintainers.
240 """
241 @staticmethod
242 @abstractmethod
243 def ispec(): pass
244
245 @staticmethod
246 @abstractmethod
247 def ospec(): pass
248
249 #@staticmethod
250 #@abstractmethod
251 #def setup(m, i): pass
252
253 @staticmethod
254 @abstractmethod
255 def process(i): pass
256
257
258 class StageChain(StageCls):
259 """ pass in a list of stages, and they will automatically be
260 chained together via their input and output specs into a
261 combinatorial chain.
262
263 the end result basically conforms to the exact same Stage API.
264
265 * input to this class will be the input of the first stage
266 * output of first stage goes into input of second
267 * output of second goes into input into third (etc. etc.)
268 * the output of this class will be the output of the last stage
269 """
270 def __init__(self, chain):
271 self.chain = chain
272
273 def ispec(self):
274 return self.chain[0].ispec()
275
276 def ospec(self):
277 return self.chain[-1].ospec()
278
279 def setup(self, m, i):
280 for (idx, c) in enumerate(self.chain):
281 if hasattr(c, "setup"):
282 c.setup(m, i) # stage may have some module stuff
283 o = self.chain[idx].ospec() # only the last assignment survives
284 m.d.comb += eq(o, c.process(i)) # process input into "o"
285 if idx != len(self.chain)-1:
286 ni = self.chain[idx+1].ispec() # becomes new input on next loop
287 m.d.comb += eq(ni, o) # assign output to next input
288 i = ni
289 self.o = o # last loop is the output
290
291 def process(self, i):
292 return self.o # conform to Stage API: return last-loop output
293
294
295 class ControlBase:
296 """ Common functions for Pipeline API
297 """
298 def __init__(self, in_multi=None):
299 """ Base class containing ready/valid/data to previous and next stages
300
301 * p: contains ready/valid to the previous stage
302 * n: contains ready/valid to the next stage
303
304 User must also:
305 * add i_data member to PrevControl (p) and
306 * add o_data member to NextControl (n)
307 """
308
309 # set up input and output IO ACK (prev/next ready/valid)
310 self.p = PrevControl(in_multi)
311 self.n = NextControl()
312
313 def connect_to_next(self, nxt):
314 """ helper function to connect to the next stage data/valid/ready.
315 """
316 return self.n.connect_to_next(nxt.p)
317
318 def _connect_in(self, prev):
319 """ internal helper function to connect stage to an input source.
320 do not use to connect stage-to-stage!
321 """
322 return self.p._connect_in(prev.p)
323
324 def _connect_out(self, nxt):
325 """ internal helper function to connect stage to an output source.
326 do not use to connect stage-to-stage!
327 """
328 return self.n._connect_out(nxt.n)
329
330 def connect(self, m, pipechain):
331 """ connects a chain (list) of Pipeline instances together and
332 links them to this ControlBase instance:
333
334 in <----> self <---> out
335 | ^
336 v |
337 [pipe1, pipe2, pipe3, pipe4]
338 | ^ | ^ | ^
339 v | v | v |
340 out---in out--in out---in
341
342 Also takes care of allocating i_data/o_data, by looking up
343 the data spec for each end of the pipechain. i.e It is NOT
344 necessary to allocate self.p.i_data or self.n.o_data manually:
345 this is handled AUTOMATICALLY, here.
346
347 Basically this function is the direct equivalent of StageChain,
348 except that unlike StageChain, the Pipeline logic is followed.
349
350 Just as StageChain presents an object that conforms to the
351 Stage API from a list of objects that also conform to the
352 Stage API, an object that calls this Pipeline connect function
353 has the exact same pipeline API as the list of pipline objects
354 it is called with.
355
356 Thus it becomes possible to build up larger chains recursively.
357 More complex chains (multi-input, multi-output) will have to be
358 done manually.
359 """
360 eqs = [] # collated list of assignment statements
361
362 # connect inter-chain
363 for i in range(len(pipechain)-1):
364 pipe1 = pipechain[i]
365 pipe2 = pipechain[i+1]
366 eqs += pipe1.connect_to_next(pipe2)
367
368 # connect front of chain to ourselves
369 front = pipechain[0]
370 self.p.i_data = front.stage.ispec()
371 eqs += front._connect_in(self)
372
373 # connect end of chain to ourselves
374 end = pipechain[-1]
375 self.n.o_data = end.stage.ospec()
376 eqs += end._connect_out(self)
377
378 # activate the assignments
379 m.d.comb += eqs
380
381 def set_input(self, i):
382 """ helper function to set the input data
383 """
384 return eq(self.p.i_data, i)
385
386 def ports(self):
387 return [self.p.i_valid, self.n.i_ready,
388 self.n.o_valid, self.p.o_ready,
389 self.p.i_data, self.n.o_data # XXX need flattening!
390 ]
391
392
393 class BufferedPipeline(ControlBase):
394 """ buffered pipeline stage. data and strobe signals travel in sync.
395 if ever the input is ready and the output is not, processed data
396 is shunted in a temporary register.
397
398 Argument: stage. see Stage API above
399
400 stage-1 p.i_valid >>in stage n.o_valid out>> stage+1
401 stage-1 p.o_ready <<out stage n.i_ready <<in stage+1
402 stage-1 p.i_data >>in stage n.o_data out>> stage+1
403 | |
404 process --->----^
405 | |
406 +-- r_data ->-+
407
408 input data p.i_data is read (only), is processed and goes into an
409 intermediate result store [process()]. this is updated combinatorially.
410
411 in a non-stall condition, the intermediate result will go into the
412 output (update_output). however if ever there is a stall, it goes
413 into r_data instead [update_buffer()].
414
415 when the non-stall condition is released, r_data is the first
416 to be transferred to the output [flush_buffer()], and the stall
417 condition cleared.
418
419 on the next cycle (as long as stall is not raised again) the
420 input may begin to be processed and transferred directly to output.
421
422 """
423 def __init__(self, stage):
424 ControlBase.__init__(self)
425 self.stage = stage
426
427 # set up the input and output data
428 self.p.i_data = stage.ispec() # input type
429 self.n.o_data = stage.ospec()
430
431 def elaborate(self, platform):
432 m = Module()
433
434 result = self.stage.ospec()
435 r_data = self.stage.ospec()
436 if hasattr(self.stage, "setup"):
437 self.stage.setup(m, self.p.i_data)
438
439 # establish some combinatorial temporaries
440 o_n_validn = Signal(reset_less=True)
441 i_p_valid_o_p_ready = Signal(reset_less=True)
442 p_i_valid = Signal(reset_less=True)
443 m.d.comb += [p_i_valid.eq(self.p.i_valid_logic()),
444 o_n_validn.eq(~self.n.o_valid),
445 i_p_valid_o_p_ready.eq(p_i_valid & self.p.o_ready),
446 ]
447
448 # store result of processing in combinatorial temporary
449 m.d.comb += eq(result, self.stage.process(self.p.i_data))
450
451 # if not in stall condition, update the temporary register
452 with m.If(self.p.o_ready): # not stalled
453 m.d.sync += eq(r_data, result) # update buffer
454
455 with m.If(self.n.i_ready): # next stage is ready
456 with m.If(self.p.o_ready): # not stalled
457 # nothing in buffer: send (processed) input direct to output
458 m.d.sync += [self.n.o_valid.eq(p_i_valid),
459 eq(self.n.o_data, result), # update output
460 ]
461 with m.Else(): # p.o_ready is false, and something is in buffer.
462 # Flush the [already processed] buffer to the output port.
463 m.d.sync += [self.n.o_valid.eq(1), # declare reg empty
464 eq(self.n.o_data, r_data), # flush buffer
465 self.p.o_ready.eq(1), # clear stall condition
466 ]
467 # ignore input, since p.o_ready is also false.
468
469 # (n.i_ready) is false here: next stage is ready
470 with m.Elif(o_n_validn): # next stage being told "ready"
471 m.d.sync += [self.n.o_valid.eq(p_i_valid),
472 self.p.o_ready.eq(1), # Keep the buffer empty
473 eq(self.n.o_data, result), # set output data
474 ]
475
476 # (n.i_ready) false and (n.o_valid) true:
477 with m.Elif(i_p_valid_o_p_ready):
478 # If next stage *is* ready, and not stalled yet, accept input
479 m.d.sync += self.p.o_ready.eq(~(p_i_valid & self.n.o_valid))
480
481 return m
482
483
484 class ExampleAddStage(StageCls):
485 """ an example of how to use the buffered pipeline, as a class instance
486 """
487
488 def ispec(self):
489 """ returns a tuple of input signals which will be the incoming data
490 """
491 return (Signal(16), Signal(16))
492
493 def ospec(self):
494 """ returns an output signal which will happen to contain the sum
495 of the two inputs
496 """
497 return Signal(16)
498
499 def process(self, i):
500 """ process the input data (sums the values in the tuple) and returns it
501 """
502 return i[0] + i[1]
503
504
505 class ExampleBufPipeAdd(BufferedPipeline):
506 """ an example of how to use the buffered pipeline, using a class instance
507 """
508
509 def __init__(self):
510 addstage = ExampleAddStage()
511 BufferedPipeline.__init__(self, addstage)
512
513
514 class ExampleStage(Stage):
515 """ an example of how to use the buffered pipeline, in a static class
516 fashion
517 """
518
519 def ispec():
520 return Signal(16, name="example_input_signal")
521
522 def ospec():
523 return Signal(16, name="example_output_signal")
524
525 def process(i):
526 """ process the input data and returns it (adds 1)
527 """
528 return i + 1
529
530
531 class ExampleStageCls(StageCls):
532 """ an example of how to use the buffered pipeline, in a static class
533 fashion
534 """
535
536 def ispec(self):
537 return Signal(16, name="example_input_signal")
538
539 def ospec(self):
540 return Signal(16, name="example_output_signal")
541
542 def process(self, i):
543 """ process the input data and returns it (adds 1)
544 """
545 return i + 1
546
547
548 class ExampleBufPipe(BufferedPipeline):
549 """ an example of how to use the buffered pipeline.
550 """
551
552 def __init__(self):
553 BufferedPipeline.__init__(self, ExampleStage)
554
555
556 class UnbufferedPipeline(ControlBase):
557 """ A simple pipeline stage with single-clock synchronisation
558 and two-way valid/ready synchronised signalling.
559
560 Note that a stall in one stage will result in the entire pipeline
561 chain stalling.
562
563 Also that unlike BufferedPipeline, the valid/ready signalling does NOT
564 travel synchronously with the data: the valid/ready signalling
565 combines in a *combinatorial* fashion. Therefore, a long pipeline
566 chain will lengthen propagation delays.
567
568 Argument: stage. see Stage API, above
569
570 stage-1 p.i_valid >>in stage n.o_valid out>> stage+1
571 stage-1 p.o_ready <<out stage n.i_ready <<in stage+1
572 stage-1 p.i_data >>in stage n.o_data out>> stage+1
573 | |
574 r_data result
575 | |
576 +--process ->-+
577
578 Attributes:
579 -----------
580 p.i_data : StageInput, shaped according to ispec
581 The pipeline input
582 p.o_data : StageOutput, shaped according to ospec
583 The pipeline output
584 r_data : input_shape according to ispec
585 A temporary (buffered) copy of a prior (valid) input.
586 This is HELD if the output is not ready. It is updated
587 SYNCHRONOUSLY.
588 result: output_shape according to ospec
589 The output of the combinatorial logic. it is updated
590 COMBINATORIALLY (no clock dependence).
591 """
592
593 def __init__(self, stage):
594 ControlBase.__init__(self)
595 self.stage = stage
596 self._data_valid = Signal()
597
598 # set up the input and output data
599 self.p.i_data = stage.ispec() # input type
600 self.n.o_data = stage.ospec() # output type
601
602 def elaborate(self, platform):
603 m = Module()
604
605 r_data = self.stage.ispec() # input type
606 result = self.stage.ospec() # output data
607 if hasattr(self.stage, "setup"):
608 self.stage.setup(m, r_data)
609
610 p_i_valid = Signal(reset_less=True)
611 m.d.comb += p_i_valid.eq(self.p.i_valid_logic())
612 m.d.comb += eq(result, self.stage.process(r_data))
613 m.d.comb += self.n.o_valid.eq(self._data_valid)
614 m.d.comb += self.p.o_ready.eq(~self._data_valid | self.n.i_ready)
615 m.d.sync += self._data_valid.eq(p_i_valid | \
616 (~self.n.i_ready & self._data_valid))
617 with m.If(self.p.i_valid & self.p.o_ready):
618 m.d.sync += eq(r_data, self.p.i_data)
619 m.d.comb += eq(self.n.o_data, result)
620 return m
621
622
623 class ExamplePipeline(UnbufferedPipeline):
624 """ an example of how to use the combinatorial pipeline.
625 """
626
627 def __init__(self):
628 UnbufferedPipeline.__init__(self, ExampleStage)
629
630
631 if __name__ == '__main__':
632 dut = ExampleBufPipe()
633 vl = rtlil.convert(dut, ports=dut.ports())
634 with open("test_bufpipe.il", "w") as f:
635 f.write(vl)
636
637 dut = ExamplePipeline()
638 vl = rtlil.convert(dut, ports=dut.ports())
639 with open("test_combpipe.il", "w") as f:
640 f.write(vl)