update comments
[ieee754fpu.git] / src / add / example_buf_pipe.py
1 """ Pipeline and BufferedPipeline implementation, conforming to the same API.
2
3 eq:
4 --
5
6 a strategically very important function that is identical in function
7 to nmigen's Signal.eq function, except it may take objects, or a list
8 of objects, or a tuple of objects, and where objects may also be
9 Records.
10
11 Stage API:
12 ---------
13
14 stage requires compliance with a strict API that may be
15 implemented in several means, including as a static class.
16 the methods of a stage instance must be as follows:
17
18 * ispec() - Input data format specification
19 returns an object or a list or tuple of objects, or
20 a Record, each object having an "eq" function which
21 takes responsibility for copying by assignment all
22 sub-objects
23 * ospec() - Output data format specification
24 requirements as for ospec
25 * process(m, i) - Processes an ispec-formatted object
26 returns a combinatorial block of a result that
27 may be assigned to the output, by way of the "eq"
28 function
29 * setup(m, i) - Optional function for setting up submodules
30 may be used for more complex stages, to link
31 the input (i) to submodules. must take responsibility
32 for adding those submodules to the module (m).
33 the submodules must be combinatorial blocks and
34 must have their inputs and output linked combinatorially.
35
36 StageChain:
37 ----------
38
39 A useful combinatorial wrapper around stages that chains them together
40 and then presents a Stage-API-conformant interface.
41
42 UnbufferedPipeline:
43 ------------------
44
45 A simple stalling clock-synchronised pipeline that has no buffering
46 (unlike BufferedPipeline). A stall anywhere along the line will
47 result in a stall back-propagating down the entire chain.
48
49 The BufferedPipeline by contrast will buffer incoming data, allowing
50 previous stages one clock cycle's grace before also having to stall.
51
52 An advantage of the UnbufferedPipeline over the Buffered one is
53 that the amount of logic needed (number of gates) is greatly
54 reduced.
55
56 BufferedPipeline:
57 ----------------
58
59 nmigen implementation of buffered pipeline stage, based on zipcpu:
60 https://zipcpu.com/blog/2017/08/14/strategies-for-pipelining.html
61
62 this module requires quite a bit of thought to understand how it works
63 (and why it is needed in the first place). reading the above is
64 *strongly* recommended.
65
66 unlike john dawson's IEEE754 FPU STB/ACK signalling, which requires
67 the STB / ACK signals to raise and lower (on separate clocks) before
68 data may proceeed (thus only allowing one piece of data to proceed
69 on *ALTERNATE* cycles), the signalling here is a true pipeline
70 where data will flow on *every* clock when the conditions are right.
71
72 input acceptance conditions are when:
73 * incoming previous-stage strobe (p.i_valid) is HIGH
74 * outgoing previous-stage ready (p.o_ready) is LOW
75
76 output transmission conditions are when:
77 * outgoing next-stage strobe (n.o_valid) is HIGH
78 * outgoing next-stage ready (n.i_ready) is LOW
79
80 the tricky bit is when the input has valid data and the output is not
81 ready to accept it. if it wasn't for the clock synchronisation, it
82 would be possible to tell the input "hey don't send that data, we're
83 not ready". unfortunately, it's not possible to "change the past":
84 the previous stage *has no choice* but to pass on its data.
85
86 therefore, the incoming data *must* be accepted - and stored: that
87 is the responsibility / contract that this stage *must* accept.
88 on the same clock, it's possible to tell the input that it must
89 not send any more data. this is the "stall" condition.
90
91 we now effectively have *two* possible pieces of data to "choose" from:
92 the buffered data, and the incoming data. the decision as to which
93 to process and output is based on whether we are in "stall" or not.
94 i.e. when the next stage is no longer ready, the output comes from
95 the buffer if a stall had previously occurred, otherwise it comes
96 direct from processing the input.
97
98 this allows us to respect a synchronous "travelling STB" with what
99 dan calls a "buffered handshake".
100
101 it's quite a complex state machine!
102 """
103
104 from nmigen import Signal, Cat, Const, Mux, Module
105 from nmigen.cli import verilog, rtlil
106 from nmigen.hdl.rec import Record, Layout
107
108 from abc import ABCMeta, abstractmethod
109 from collections.abc import Sequence
110
111
112 class PrevControl:
113 """ contains signals that come *from* the previous stage (both in and out)
114 * i_valid: previous stage indicating all incoming data is valid.
115 may be a multi-bit signal, where all bits are required
116 to be asserted to indicate "valid".
117 * o_ready: output to next stage indicating readiness to accept data
118 * i_data : an input - added by the user of this class
119 """
120
121 def __init__(self, i_width=1):
122 self.i_valid = Signal(i_width, name="p_i_valid") # prev >>in self
123 self.o_ready = Signal(name="p_o_ready") # prev <<out self
124
125 def _connect_in(self, prev):
126 """ helper function to connect stage to an input source. do not
127 use to connect stage-to-stage!
128 """
129 return [self.i_valid.eq(prev.i_valid),
130 prev.o_ready.eq(self.o_ready),
131 eq(self.i_data, prev.i_data),
132 ]
133
134 def i_valid_logic(self):
135 vlen = len(self.i_valid)
136 if vlen > 1: # multi-bit case: valid only when i_valid is all 1s
137 all1s = Const(-1, (len(self.i_valid), False))
138 return self.i_valid == all1s
139 # single-bit i_valid case
140 return self.i_valid
141
142
143 class NextControl:
144 """ contains the signals that go *to* the next stage (both in and out)
145 * o_valid: output indicating to next stage that data is valid
146 * i_ready: input from next stage indicating that it can accept data
147 * o_data : an output - added by the user of this class
148 """
149 def __init__(self):
150 self.o_valid = Signal(name="n_o_valid") # self out>> next
151 self.i_ready = Signal(name="n_i_ready") # self <<in next
152
153 def connect_to_next(self, nxt):
154 """ helper function to connect to the next stage data/valid/ready.
155 data/valid is passed *TO* nxt, and ready comes *IN* from nxt.
156 """
157 return [nxt.i_valid.eq(self.o_valid),
158 self.i_ready.eq(nxt.o_ready),
159 eq(nxt.i_data, self.o_data),
160 ]
161
162 def _connect_out(self, nxt):
163 """ helper function to connect stage to an output source. do not
164 use to connect stage-to-stage!
165 """
166 return [nxt.o_valid.eq(self.o_valid),
167 self.i_ready.eq(nxt.i_ready),
168 eq(nxt.o_data, self.o_data),
169 ]
170
171
172 def eq(o, i):
173 """ makes signals equal: a helper routine which identifies if it is being
174 passed a list (or tuple) of objects, or signals, or Records, and calls
175 the objects' eq function.
176
177 complex objects (classes) can be used: they must follow the
178 convention of having an eq member function, which takes the
179 responsibility of further calling eq and returning a list of
180 eq assignments
181
182 Record is a special (unusual, recursive) case, where the input may be
183 specified as a dictionary (which may contain further dictionaries,
184 recursively), where the field names of the dictionary must match
185 the Record's field spec. Alternatively, an object with the same
186 member names as the Record may be assigned: it does not have to
187 *be* a Record.
188 """
189 if not isinstance(o, Sequence):
190 o, i = [o], [i]
191 res = []
192 for (ao, ai) in zip(o, i):
193 #print ("eq", ao, ai)
194 if isinstance(ao, Record):
195 for idx, (field_name, field_shape, _) in enumerate(ao.layout):
196 if isinstance(field_shape, Layout):
197 val = ai.fields
198 else:
199 val = ai
200 if hasattr(val, field_name): # check for attribute
201 val = getattr(val, field_name)
202 else:
203 val = val[field_name] # dictionary-style specification
204 rres = eq(ao.fields[field_name], val)
205 res += rres
206 else:
207 rres = ao.eq(ai)
208 if not isinstance(rres, Sequence):
209 rres = [rres]
210 res += rres
211 return res
212
213
214 class StageCls(metaclass=ABCMeta):
215 """ Class-based "Stage" API. requires instantiation (after derivation)
216
217 see "Stage API" above.. Note: python does *not* require derivation
218 from this class. All that is required is that the pipelines *have*
219 the functions listed in this class. Derivation from this class
220 is therefore merely a "courtesy" to maintainers.
221 """
222 @abstractmethod
223 def ispec(self): pass # REQUIRED
224 @abstractmethod
225 def ospec(self): pass # REQUIRED
226 #@abstractmethod
227 #def setup(self, m, i): pass # OPTIONAL
228 @abstractmethod
229 def process(self, i): pass # REQUIRED
230
231
232 class Stage(metaclass=ABCMeta):
233 """ Static "Stage" API. does not require instantiation (after derivation)
234
235 see "Stage API" above. Note: python does *not* require derivation
236 from this class. All that is required is that the pipelines *have*
237 the functions listed in this class. Derivation from this class
238 is therefore merely a "courtesy" to maintainers.
239 """
240 @staticmethod
241 @abstractmethod
242 def ispec(): pass
243
244 @staticmethod
245 @abstractmethod
246 def ospec(): pass
247
248 #@staticmethod
249 #@abstractmethod
250 #def setup(m, i): pass
251
252 @staticmethod
253 @abstractmethod
254 def process(i): pass
255
256
257 class StageChain(StageCls):
258 """ pass in a list of stages, and they will automatically be
259 chained together via their input and output specs into a
260 combinatorial chain.
261
262 the end result basically conforms to the exact same Stage API.
263
264 * input to this class will be the input of the first stage
265 * output of first stage goes into input of second
266 * output of second goes into input into third (etc. etc.)
267 * the output of this class will be the output of the last stage
268 """
269 def __init__(self, chain):
270 self.chain = chain
271
272 def ispec(self):
273 return self.chain[0].ispec()
274
275 def ospec(self):
276 return self.chain[-1].ospec()
277
278 def setup(self, m, i):
279 for (idx, c) in enumerate(self.chain):
280 if hasattr(c, "setup"):
281 c.setup(m, i) # stage may have some module stuff
282 o = self.chain[idx].ospec() # only the last assignment survives
283 m.d.comb += eq(o, c.process(i)) # process input into "o"
284 if idx != len(self.chain)-1:
285 ni = self.chain[idx+1].ispec() # becomes new input on next loop
286 m.d.comb += eq(ni, o) # assign output to next input
287 i = ni
288 self.o = o # last loop is the output
289
290 def process(self, i):
291 return self.o
292
293
294 class ControlBase:
295 """ Common functions for Pipeline API
296 """
297 def __init__(self, in_multi=None):
298 """ Base class containing ready/valid/data to previous and next stages
299
300 * p: contains ready/valid to the previous stage
301 * n: contains ready/valid to the next stage
302
303 User must also:
304 * add i_data member to PrevControl (p) and
305 * add o_data member to NextControl (n)
306 """
307
308 # set up input and output IO ACK (prev/next ready/valid)
309 self.p = PrevControl(in_multi)
310 self.n = NextControl()
311
312 def connect_to_next(self, nxt):
313 """ helper function to connect to the next stage data/valid/ready.
314 """
315 return self.n.connect_to_next(nxt.p)
316
317 def _connect_in(self, prev):
318 """ helper function to connect stage to an input source. do not
319 use to connect stage-to-stage!
320 """
321 return self.p._connect_in(prev.p)
322
323 def _connect_out(self, nxt):
324 """ helper function to connect stage to an output source. do not
325 use to connect stage-to-stage!
326 """
327 return self.n._connect_out(nxt.n)
328
329 def connect(self, m, pipechain):
330 """ connects a chain (list) of Pipeline instances together and
331 links them to this ControlBase instance:
332
333 in <----> self <---> out
334 | ^
335 v |
336 [pipe1, pipe2, pipe3, pipe4]
337 | ^ | ^ | ^
338 v | v | v |
339 out---in out--in out---in
340
341 Also takes care of allocating i_data/o_data, by looking up
342 the data spec for each end of the pipechain. i.e It is NOT
343 necessary to allocate self.p.i_data or self.n.o_data manually:
344 this is handled AUTOMATICALLY, here.
345
346 Basically this function is the direct equivalent of StageChain,
347 except that unlike StageChain, the Pipeline logic is followed.
348
349 Just as StageChain presents an object that conforms to the
350 Stage API from a list of objects that also conform to the
351 Stage API, an object that calls this Pipeline connect function
352 has the exact same pipeline API as the list of pipline objects
353 it is called with.
354
355 Thus it becomes possible to build up larger chains recursively.
356 More complex chains (multi-input, multi-output) will have to be
357 done manually.
358 """
359 eqs = [] # collated list of assignment statements
360
361 # connect inter-chain
362 for i in range(len(pipechain)-1):
363 pipe1 = pipechain[i]
364 pipe2 = pipechain[i+1]
365 eqs += pipe1.connect_to_next(pipe2)
366
367 # connect front of chain to ourselves
368 front = pipechain[0]
369 self.p.i_data = front.stage.ispec()
370 eqs += front._connect_in(self)
371
372 # connect end of chain to ourselves
373 end = pipechain[-1]
374 self.n.o_data = end.stage.ospec()
375 eqs += end._connect_out(self)
376
377 # activate the assignments
378 m.d.comb += eqs
379
380 def set_input(self, i):
381 """ helper function to set the input data
382 """
383 return eq(self.p.i_data, i)
384
385 def ports(self):
386 return [self.p.i_valid, self.n.i_ready,
387 self.n.o_valid, self.p.o_ready,
388 self.p.i_data, self.n.o_data # XXX need flattening!
389 ]
390
391
392 class BufferedPipeline(ControlBase):
393 """ buffered pipeline stage. data and strobe signals travel in sync.
394 if ever the input is ready and the output is not, processed data
395 is stored in a temporary register.
396
397 Argument: stage. see Stage API above
398
399 stage-1 p.i_valid >>in stage n.o_valid out>> stage+1
400 stage-1 p.o_ready <<out stage n.i_ready <<in stage+1
401 stage-1 p.i_data >>in stage n.o_data out>> stage+1
402 | |
403 process --->----^
404 | |
405 +-- r_data ->-+
406
407 input data p.i_data is read (only), is processed and goes into an
408 intermediate result store [process()]. this is updated combinatorially.
409
410 in a non-stall condition, the intermediate result will go into the
411 output (update_output). however if ever there is a stall, it goes
412 into r_data instead [update_buffer()].
413
414 when the non-stall condition is released, r_data is the first
415 to be transferred to the output [flush_buffer()], and the stall
416 condition cleared.
417
418 on the next cycle (as long as stall is not raised again) the
419 input may begin to be processed and transferred directly to output.
420
421 """
422 def __init__(self, stage):
423 ControlBase.__init__(self)
424 self.stage = stage
425
426 # set up the input and output data
427 self.p.i_data = stage.ispec() # input type
428 self.n.o_data = stage.ospec()
429
430 def elaborate(self, platform):
431 m = Module()
432
433 result = self.stage.ospec()
434 r_data = self.stage.ospec()
435 if hasattr(self.stage, "setup"):
436 self.stage.setup(m, self.p.i_data)
437
438 # establish some combinatorial temporaries
439 o_n_validn = Signal(reset_less=True)
440 i_p_valid_o_p_ready = Signal(reset_less=True)
441 p_i_valid = Signal(reset_less=True)
442 m.d.comb += [p_i_valid.eq(self.p.i_valid_logic()),
443 o_n_validn.eq(~self.n.o_valid),
444 i_p_valid_o_p_ready.eq(p_i_valid & self.p.o_ready),
445 ]
446
447 # store result of processing in combinatorial temporary
448 m.d.comb += eq(result, self.stage.process(self.p.i_data))
449
450 # if not in stall condition, update the temporary register
451 with m.If(self.p.o_ready): # not stalled
452 m.d.sync += eq(r_data, result) # update buffer
453
454 with m.If(self.n.i_ready): # next stage is ready
455 with m.If(self.p.o_ready): # not stalled
456 # nothing in buffer: send (processed) input direct to output
457 m.d.sync += [self.n.o_valid.eq(p_i_valid),
458 eq(self.n.o_data, result), # update output
459 ]
460 with m.Else(): # p.o_ready is false, and something is in buffer.
461 # Flush the [already processed] buffer to the output port.
462 m.d.sync += [self.n.o_valid.eq(1), # declare reg empty
463 eq(self.n.o_data, r_data), # flush buffer
464 self.p.o_ready.eq(1), # clear stall condition
465 ]
466 # ignore input, since p.o_ready is also false.
467
468 # (n.i_ready) is false here: next stage is ready
469 with m.Elif(o_n_validn): # next stage being told "ready"
470 m.d.sync += [self.n.o_valid.eq(p_i_valid),
471 self.p.o_ready.eq(1), # Keep the buffer empty
472 eq(self.n.o_data, result), # set output data
473 ]
474
475 # (n.i_ready) false and (n.o_valid) true:
476 with m.Elif(i_p_valid_o_p_ready):
477 # If next stage *is* ready, and not stalled yet, accept input
478 m.d.sync += self.p.o_ready.eq(~(p_i_valid & self.n.o_valid))
479
480 return m
481
482
483 class ExampleAddStage(StageCls):
484 """ an example of how to use the buffered pipeline, as a class instance
485 """
486
487 def ispec(self):
488 """ returns a tuple of input signals which will be the incoming data
489 """
490 return (Signal(16), Signal(16))
491
492 def ospec(self):
493 """ returns an output signal which will happen to contain the sum
494 of the two inputs
495 """
496 return Signal(16)
497
498 def process(self, i):
499 """ process the input data (sums the values in the tuple) and returns it
500 """
501 return i[0] + i[1]
502
503
504 class ExampleBufPipeAdd(BufferedPipeline):
505 """ an example of how to use the buffered pipeline, using a class instance
506 """
507
508 def __init__(self):
509 addstage = ExampleAddStage()
510 BufferedPipeline.__init__(self, addstage)
511
512
513 class ExampleStage(Stage):
514 """ an example of how to use the buffered pipeline, in a static class
515 fashion
516 """
517
518 def ispec():
519 return Signal(16, name="example_input_signal")
520
521 def ospec():
522 return Signal(16, name="example_output_signal")
523
524 def process(i):
525 """ process the input data and returns it (adds 1)
526 """
527 return i + 1
528
529
530 class ExampleStageCls(StageCls):
531 """ an example of how to use the buffered pipeline, in a static class
532 fashion
533 """
534
535 def ispec(self):
536 return Signal(16, name="example_input_signal")
537
538 def ospec(self):
539 return Signal(16, name="example_output_signal")
540
541 def process(self, i):
542 """ process the input data and returns it (adds 1)
543 """
544 return i + 1
545
546
547 class ExampleBufPipe(BufferedPipeline):
548 """ an example of how to use the buffered pipeline.
549 """
550
551 def __init__(self):
552 BufferedPipeline.__init__(self, ExampleStage)
553
554
555 class UnbufferedPipeline(ControlBase):
556 """ A simple pipeline stage with single-clock synchronisation
557 and two-way valid/ready synchronised signalling.
558
559 Note that a stall in one stage will result in the entire pipeline
560 chain stalling.
561
562 Also that unlike BufferedPipeline, the valid/ready signalling does NOT
563 travel synchronously with the data: the valid/ready signalling
564 combines in a *combinatorial* fashion. Therefore, a long pipeline
565 chain will lengthen propagation delays.
566
567 Argument: stage. see Stage API, above
568
569 stage-1 p.i_valid >>in stage n.o_valid out>> stage+1
570 stage-1 p.o_ready <<out stage n.i_ready <<in stage+1
571 stage-1 p.i_data >>in stage n.o_data out>> stage+1
572 | |
573 r_data result
574 | |
575 +--process ->-+
576
577 Attributes:
578 -----------
579 p.i_data : StageInput, shaped according to ispec
580 The pipeline input
581 p.o_data : StageOutput, shaped according to ospec
582 The pipeline output
583 r_data : input_shape according to ispec
584 A temporary (buffered) copy of a prior (valid) input.
585 This is HELD if the output is not ready. It is updated
586 SYNCHRONOUSLY.
587 result: output_shape according to ospec
588 The output of the combinatorial logic. it is updated
589 COMBINATORIALLY (no clock dependence).
590 """
591
592 def __init__(self, stage):
593 ControlBase.__init__(self)
594 self.stage = stage
595 self._data_valid = Signal()
596
597 # set up the input and output data
598 self.p.i_data = stage.ispec() # input type
599 self.n.o_data = stage.ospec() # output type
600
601 def elaborate(self, platform):
602 m = Module()
603
604 r_data = self.stage.ispec() # input type
605 result = self.stage.ospec() # output data
606 if hasattr(self.stage, "setup"):
607 self.stage.setup(m, r_data)
608
609 p_i_valid = Signal(reset_less=True)
610 m.d.comb += p_i_valid.eq(self.p.i_valid_logic())
611 m.d.comb += eq(result, self.stage.process(r_data))
612 m.d.comb += self.n.o_valid.eq(self._data_valid)
613 m.d.comb += self.p.o_ready.eq(~self._data_valid | self.n.i_ready)
614 m.d.sync += self._data_valid.eq(p_i_valid | \
615 (~self.n.i_ready & self._data_valid))
616 with m.If(self.p.i_valid & self.p.o_ready):
617 m.d.sync += eq(r_data, self.p.i_data)
618 m.d.comb += eq(self.n.o_data, result)
619 return m
620
621
622 class ExamplePipeline(UnbufferedPipeline):
623 """ an example of how to use the combinatorial pipeline.
624 """
625
626 def __init__(self):
627 UnbufferedPipeline.__init__(self, ExampleStage)
628
629
630 if __name__ == '__main__':
631 dut = ExampleBufPipe()
632 vl = rtlil.convert(dut, ports=dut.ports())
633 with open("test_bufpipe.il", "w") as f:
634 f.write(vl)
635
636 dut = ExamplePipeline()
637 vl = rtlil.convert(dut, ports=dut.ports())
638 with open("test_combpipe.il", "w") as f:
639 f.write(vl)