update docstrings
[ieee754fpu.git] / src / add / example_buf_pipe.py
1 """ Pipeline and BufferedPipeline implementation, conforming to the same API.
2
3 eq:
4 --
5
6 a strategically very important function that is identical in function
7 to nmigen's Signal.eq function, except it may take objects, or a list
8 of objects, or a tuple of objects, and where objects may also be
9 Records.
10
11 Stage API:
12 ---------
13
14 stage requires compliance with a strict API that may be
15 implemented in several means, including as a static class.
16 the methods of a stage instance must be as follows:
17
18 * ispec() - Input data format specification
19 returns an object or a list or tuple of objects, or
20 a Record, each object having an "eq" function which
21 takes responsibility for copying by assignment all
22 sub-objects
23 * ospec() - Output data format specification
24 requirements as for ospec
25 * process(m, i) - Processes an ispec-formatted object
26 returns a combinatorial block of a result that
27 may be assigned to the output, by way of the "eq"
28 function
29 * setup(m, i) - Optional function for setting up submodules
30 may be used for more complex stages, to link
31 the input (i) to submodules. must take responsibility
32 for adding those submodules to the module (m).
33 the submodules must be combinatorial blocks and
34 must have their inputs and output linked combinatorially.
35
36 StageChain:
37 ----------
38
39 A useful combinatorial wrapper around stages that chains them together
40 and then presents a Stage-API-conformant interface.
41
42 Pipeline:
43 --------
44
45 A simple stalling clock-synchronised pipeline that has no buffering
46 (unlike BufferedPipeline). A stall anywhere along the line will
47 result in a stall back-propagating down the entire chain.
48
49 The BufferedPipeline by contrast will buffer incoming data, allowing
50 previous stages one clock cycle's grace before also having to stall.
51
52 BufferedPipeline:
53 ----------------
54
55 nmigen implementation of buffered pipeline stage, based on zipcpu:
56 https://zipcpu.com/blog/2017/08/14/strategies-for-pipelining.html
57
58 this module requires quite a bit of thought to understand how it works
59 (and why it is needed in the first place). reading the above is
60 *strongly* recommended.
61
62 unlike john dawson's IEEE754 FPU STB/ACK signalling, which requires
63 the STB / ACK signals to raise and lower (on separate clocks) before
64 data may proceeed (thus only allowing one piece of data to proceed
65 on *ALTERNATE* cycles), the signalling here is a true pipeline
66 where data will flow on *every* clock when the conditions are right.
67
68 input acceptance conditions are when:
69 * incoming previous-stage strobe (p.i_valid) is HIGH
70 * outgoing previous-stage ready (p.o_ready) is LOW
71
72 output transmission conditions are when:
73 * outgoing next-stage strobe (n.o_valid) is HIGH
74 * outgoing next-stage ready (n.i_ready) is LOW
75
76 the tricky bit is when the input has valid data and the output is not
77 ready to accept it. if it wasn't for the clock synchronisation, it
78 would be possible to tell the input "hey don't send that data, we're
79 not ready". unfortunately, it's not possible to "change the past":
80 the previous stage *has no choice* but to pass on its data.
81
82 therefore, the incoming data *must* be accepted - and stored: that
83 is the responsibility / contract that this stage *must* accept.
84 on the same clock, it's possible to tell the input that it must
85 not send any more data. this is the "stall" condition.
86
87 we now effectively have *two* possible pieces of data to "choose" from:
88 the buffered data, and the incoming data. the decision as to which
89 to process and output is based on whether we are in "stall" or not.
90 i.e. when the next stage is no longer ready, the output comes from
91 the buffer if a stall had previously occurred, otherwise it comes
92 direct from processing the input.
93
94 this allows us to respect a synchronous "travelling STB" with what
95 dan calls a "buffered handshake".
96
97 it's quite a complex state machine!
98 """
99
100 from nmigen import Signal, Cat, Const, Mux, Module
101 from nmigen.cli import verilog, rtlil
102 from nmigen.hdl.rec import Record, Layout
103
104 from collections.abc import Sequence
105
106
107 class PrevControl:
108 """ contains signals that come *from* the previous stage (both in and out)
109 * i_valid: previous stage indicating all incoming data is valid.
110 may be a multi-bit signal, where all bits are required
111 to be asserted to indicate "valid".
112 * o_ready: output to next stage indicating readiness to accept data
113 * i_data : an input - added by the user of this class
114 """
115
116 def __init__(self, i_width=1):
117 self.i_valid = Signal(i_width, name="p_i_valid") # prev >>in self
118 self.o_ready = Signal(name="p_o_ready") # prev <<out self
119
120 def connect_in(self, prev):
121 """ helper function to connect stage to an input source. do not
122 use to connect stage-to-stage!
123 """
124 return [self.i_valid.eq(prev.i_valid),
125 prev.o_ready.eq(self.o_ready),
126 eq(self.i_data, prev.i_data),
127 ]
128
129 def i_valid_logic(self):
130 vlen = len(self.i_valid)
131 if vlen > 1: # multi-bit case: valid only when i_valid is all 1s
132 all1s = Const(-1, (len(self.i_valid), False))
133 return self.i_valid == all1s
134 # single-bit i_valid case
135 return self.i_valid
136
137
138 class NextControl:
139 """ contains the signals that go *to* the next stage (both in and out)
140 * o_valid: output indicating to next stage that data is valid
141 * i_ready: input from next stage indicating that it can accept data
142 * o_data : an output - added by the user of this class
143 """
144 def __init__(self):
145 self.o_valid = Signal(name="n_o_valid") # self out>> next
146 self.i_ready = Signal(name="n_i_ready") # self <<in next
147
148 def connect_to_next(self, nxt):
149 """ helper function to connect to the next stage data/valid/ready.
150 data/valid is passed *TO* nxt, and ready comes *IN* from nxt.
151 """
152 return [nxt.i_valid.eq(self.o_valid),
153 self.i_ready.eq(nxt.o_ready),
154 eq(nxt.i_data, self.o_data),
155 ]
156
157 def connect_out(self, nxt):
158 """ helper function to connect stage to an output source. do not
159 use to connect stage-to-stage!
160 """
161 return [nxt.o_valid.eq(self.o_valid),
162 self.i_ready.eq(nxt.i_ready),
163 eq(nxt.o_data, self.o_data),
164 ]
165
166
167 def eq(o, i):
168 """ makes signals equal: a helper routine which identifies if it is being
169 passed a list (or tuple) of objects, or signals, or Records, and calls
170 the objects' eq function.
171
172 complex objects (classes) can be used: they must follow the
173 convention of having an eq member function, which takes the
174 responsibility of further calling eq and returning a list of
175 eq assignments
176
177 Record is a special (unusual, recursive) case, where the input
178 is specified as a dictionary (which may contain further dictionaries,
179 recursively), where the field names of the dictionary must match
180 the Record's field spec.
181 """
182 if not isinstance(o, Sequence):
183 o, i = [o], [i]
184 res = []
185 for (ao, ai) in zip(o, i):
186 #print ("eq", ao, ai)
187 if isinstance(ao, Record):
188 for idx, (field_name, field_shape, _) in enumerate(ao.layout):
189 if isinstance(field_shape, Layout):
190 rres = eq(ao.fields[field_name], ai.fields[field_name])
191 else:
192 rres = eq(ao.fields[field_name], ai[field_name])
193 res += rres
194 else:
195 rres = ao.eq(ai)
196 if not isinstance(rres, Sequence):
197 rres = [rres]
198 res += rres
199 return res
200
201
202 class StageChain:
203 """ pass in a list of stages, and they will automatically be
204 chained together via their input and output specs into a
205 combinatorial chain.
206
207 * input to this class will be the input of the first stage
208 * output of first stage goes into input of second
209 * output of second goes into input into third (etc. etc.)
210 * the output of this class will be the output of the last stage
211 """
212 def __init__(self, chain):
213 self.chain = chain
214
215 def ispec(self):
216 return self.chain[0].ispec()
217
218 def ospec(self):
219 return self.chain[-1].ospec()
220
221 def setup(self, m, i):
222 for (idx, c) in enumerate(self.chain):
223 if hasattr(c, "setup"):
224 c.setup(m, i) # stage may have some module stuff
225 o = self.chain[idx].ospec() # only the last assignment survives
226 m.d.comb += eq(o, c.process(i)) # process input into "o"
227 if idx != len(self.chain)-1:
228 ni = self.chain[idx+1].ispec() # becomes new input on next loop
229 m.d.comb += eq(ni, o) # assign output to next input
230 i = ni
231 self.o = o # last loop is the output
232
233 def process(self, i):
234 return self.o
235
236
237 class PipelineBase:
238 """ Common functions for Pipeline API
239 """
240 def __init__(self, stage, in_multi=None):
241 """ pass in a "stage" which may be either a static class or a class
242 instance, which has four functions (one optional):
243 * ispec: returns input signals according to the input specification
244 * ispec: returns output signals to the output specification
245 * process: takes an input instance and returns processed data
246 * setup: performs any module linkage if the stage uses one.
247
248 User must also:
249 * add i_data member to PrevControl and
250 * add o_data member to NextControl
251 """
252 self.stage = stage
253
254 # set up input and output IO ACK (prev/next ready/valid)
255 self.p = PrevControl(in_multi)
256 self.n = NextControl()
257
258 def connect_to_next(self, nxt):
259 """ helper function to connect to the next stage data/valid/ready.
260 """
261 return self.n.connect_to_next(nxt.p)
262
263 def connect_in(self, prev):
264 """ helper function to connect stage to an input source. do not
265 use to connect stage-to-stage!
266 """
267 return self.p.connect_in(prev.p)
268
269 def connect_out(self, nxt):
270 """ helper function to connect stage to an output source. do not
271 use to connect stage-to-stage!
272 """
273 return self.n.connect_out(nxt.n)
274
275 def set_input(self, i):
276 """ helper function to set the input data
277 """
278 return eq(self.p.i_data, i)
279
280 def ports(self):
281 return [self.p.i_valid, self.n.i_ready,
282 self.n.o_valid, self.p.o_ready,
283 self.p.i_data, self.n.o_data # XXX need flattening!
284 ]
285
286
287 class BufferedPipeline(PipelineBase):
288 """ buffered pipeline stage. data and strobe signals travel in sync.
289 if ever the input is ready and the output is not, processed data
290 is stored in a temporary register.
291
292 Argument: stage. see Stage API above
293
294 stage-1 p.i_valid >>in stage n.o_valid out>> stage+1
295 stage-1 p.o_ready <<out stage n.i_ready <<in stage+1
296 stage-1 p.i_data >>in stage n.o_data out>> stage+1
297 | |
298 process --->----^
299 | |
300 +-- r_data ->-+
301
302 input data p.i_data is read (only), is processed and goes into an
303 intermediate result store [process()]. this is updated combinatorially.
304
305 in a non-stall condition, the intermediate result will go into the
306 output (update_output). however if ever there is a stall, it goes
307 into r_data instead [update_buffer()].
308
309 when the non-stall condition is released, r_data is the first
310 to be transferred to the output [flush_buffer()], and the stall
311 condition cleared.
312
313 on the next cycle (as long as stall is not raised again) the
314 input may begin to be processed and transferred directly to output.
315
316 """
317 def __init__(self, stage):
318 PipelineBase.__init__(self, stage)
319
320 # set up the input and output data
321 self.p.i_data = stage.ispec() # input type
322 self.n.o_data = stage.ospec()
323
324 def elaborate(self, platform):
325 m = Module()
326
327 result = self.stage.ospec()
328 r_data = self.stage.ospec()
329 if hasattr(self.stage, "setup"):
330 self.stage.setup(m, self.p.i_data)
331
332 # establish some combinatorial temporaries
333 o_n_validn = Signal(reset_less=True)
334 i_p_valid_o_p_ready = Signal(reset_less=True)
335 p_i_valid = Signal(reset_less=True)
336 m.d.comb += [p_i_valid.eq(self.p.i_valid_logic()),
337 o_n_validn.eq(~self.n.o_valid),
338 i_p_valid_o_p_ready.eq(p_i_valid & self.p.o_ready),
339 ]
340
341 # store result of processing in combinatorial temporary
342 m.d.comb += eq(result, self.stage.process(self.p.i_data))
343
344 # if not in stall condition, update the temporary register
345 with m.If(self.p.o_ready): # not stalled
346 m.d.sync += eq(r_data, result) # update buffer
347
348 with m.If(self.n.i_ready): # next stage is ready
349 with m.If(self.p.o_ready): # not stalled
350 # nothing in buffer: send (processed) input direct to output
351 m.d.sync += [self.n.o_valid.eq(p_i_valid),
352 eq(self.n.o_data, result), # update output
353 ]
354 with m.Else(): # p.o_ready is false, and something is in buffer.
355 # Flush the [already processed] buffer to the output port.
356 m.d.sync += [self.n.o_valid.eq(1), # declare reg empty
357 eq(self.n.o_data, r_data), # flush buffer
358 self.p.o_ready.eq(1), # clear stall condition
359 ]
360 # ignore input, since p.o_ready is also false.
361
362 # (n.i_ready) is false here: next stage is ready
363 with m.Elif(o_n_validn): # next stage being told "ready"
364 m.d.sync += [self.n.o_valid.eq(p_i_valid),
365 self.p.o_ready.eq(1), # Keep the buffer empty
366 eq(self.n.o_data, result), # set output data
367 ]
368
369 # (n.i_ready) false and (n.o_valid) true:
370 with m.Elif(i_p_valid_o_p_ready):
371 # If next stage *is* ready, and not stalled yet, accept input
372 m.d.sync += self.p.o_ready.eq(~(p_i_valid & self.n.o_valid))
373
374 return m
375
376
377 class ExampleAddStage:
378 """ an example of how to use the buffered pipeline, as a class instance
379 """
380
381 def ispec(self):
382 """ returns a tuple of input signals which will be the incoming data
383 """
384 return (Signal(16), Signal(16))
385
386 def ospec(self):
387 """ returns an output signal which will happen to contain the sum
388 of the two inputs
389 """
390 return Signal(16)
391
392 def process(self, i):
393 """ process the input data (sums the values in the tuple) and returns it
394 """
395 return i[0] + i[1]
396
397
398 class ExampleBufPipeAdd(BufferedPipeline):
399 """ an example of how to use the buffered pipeline, using a class instance
400 """
401
402 def __init__(self):
403 addstage = ExampleAddStage()
404 BufferedPipeline.__init__(self, addstage)
405
406
407 class ExampleStage:
408 """ an example of how to use the buffered pipeline, in a static class
409 fashion
410 """
411
412 def ispec():
413 return Signal(16, name="example_input_signal")
414
415 def ospec():
416 return Signal(16, name="example_output_signal")
417
418 def process(i):
419 """ process the input data and returns it (adds 1)
420 """
421 return i + 1
422
423
424 class ExampleStageCls:
425 """ an example of how to use the buffered pipeline, in a static class
426 fashion
427 """
428
429 def ispec(self):
430 return Signal(16, name="example_input_signal")
431
432 def ospec(self):
433 return Signal(16, name="example_output_signal")
434
435 def process(self, i):
436 """ process the input data and returns it (adds 1)
437 """
438 return i + 1
439
440
441 class ExampleBufPipe(BufferedPipeline):
442 """ an example of how to use the buffered pipeline.
443 """
444
445 def __init__(self):
446 BufferedPipeline.__init__(self, ExampleStage)
447
448
449 class Pipeline(PipelineBase):
450 """ A simple pipeline stage with single-clock synchronisation
451 and two-way valid/ready synchronised signalling. Note that
452 a stall in one stage will result in the entire pipeline chain
453 stalling.
454
455 Also that the valid/ready signalling does NOT travel with the
456 data: a long pipeline chain will lengthen propagation delays.
457
458 Argument: stage. see Stage API, above
459
460 Attributes:
461 -----------
462 p.i_data : StageInput, shaped according to ispec
463 The pipeline input
464 p.o_data : StageOutput, shaped according to ospec
465 The pipeline output
466 r_data : input_shape according to ispec
467 A temporary (buffered) copy of a prior (valid) input
468 result: output_shape according to ospec
469 The output of the combinatorial logic
470 """
471
472 def __init__(self, stage):
473 PipelineBase.__init__(self, stage)
474 self._data_valid = Signal()
475
476 # set up the input and output data
477 self.p.i_data = stage.ispec() # input type
478 self.n.o_data = stage.ospec() # output type
479
480 def elaborate(self, platform):
481 m = Module()
482
483 r_data = self.stage.ispec() # input type
484 result = self.stage.ospec() # output data
485 if hasattr(self.stage, "setup"):
486 self.stage.setup(m, r_data)
487
488 p_i_valid = Signal(reset_less=True)
489 m.d.comb += p_i_valid.eq(self.p.i_valid_logic())
490 m.d.comb += eq(result, self.stage.process(r_data))
491 m.d.comb += self.n.o_valid.eq(self._data_valid)
492 m.d.comb += self.p.o_ready.eq(~self._data_valid | self.n.i_ready)
493 m.d.sync += self._data_valid.eq(p_i_valid | \
494 (~self.n.i_ready & self._data_valid))
495 with m.If(self.p.i_valid & self.p.o_ready):
496 m.d.sync += eq(r_data, self.p.i_data)
497 m.d.comb += eq(self.n.o_data, result)
498 return m
499
500
501 class ExamplePipeline(Pipeline):
502 """ an example of how to use the combinatorial pipeline.
503 """
504
505 def __init__(self):
506 Pipeline.__init__(self, ExampleStage)
507
508
509 if __name__ == '__main__':
510 dut = ExampleBufPipe()
511 vl = rtlil.convert(dut, ports=dut.ports())
512 with open("test_bufpipe.il", "w") as f:
513 f.write(vl)
514
515 dut = ExamplePipeline()
516 vl = rtlil.convert(dut, ports=dut.ports())
517 with open("test_combpipe.il", "w") as f:
518 f.write(vl)