33551239c5068754903b7b131585343e9cf777de
1 """ Pipeline and BufferedPipeline implementation, conforming to the same API.
6 a strategically very important function that is identical in function
7 to nmigen's Signal.eq function, except it may take objects, or a list
8 of objects, or a tuple of objects, and where objects may also be
14 stage requires compliance with a strict API that may be
15 implemented in several means, including as a static class.
16 the methods of a stage instance must be as follows:
18 * ispec() - Input data format specification
19 returns an object or a list or tuple of objects, or
20 a Record, each object having an "eq" function which
21 takes responsibility for copying by assignment all
23 * ospec() - Output data format specification
24 requirements as for ospec
25 * process(m, i) - Processes an ispec-formatted object
26 returns a combinatorial block of a result that
27 may be assigned to the output, by way of the "eq"
29 * setup(m, i) - Optional function for setting up submodules
30 may be used for more complex stages, to link
31 the input (i) to submodules. must take responsibility
32 for adding those submodules to the module (m).
33 the submodules must be combinatorial blocks and
34 must have their inputs and output linked combinatorially.
39 A useful combinatorial wrapper around stages that chains them together
40 and then presents a Stage-API-conformant interface.
45 A simple stalling clock-synchronised pipeline that has no buffering
46 (unlike BufferedPipeline). A stall anywhere along the line will
47 result in a stall back-propagating down the entire chain.
49 The BufferedPipeline by contrast will buffer incoming data, allowing
50 previous stages one clock cycle's grace before also having to stall.
52 An advantage of the UnbufferedPipeline over the Buffered one is
53 that the amount of logic needed (number of gates) is greatly
59 nmigen implementation of buffered pipeline stage, based on zipcpu:
60 https://zipcpu.com/blog/2017/08/14/strategies-for-pipelining.html
62 this module requires quite a bit of thought to understand how it works
63 (and why it is needed in the first place). reading the above is
64 *strongly* recommended.
66 unlike john dawson's IEEE754 FPU STB/ACK signalling, which requires
67 the STB / ACK signals to raise and lower (on separate clocks) before
68 data may proceeed (thus only allowing one piece of data to proceed
69 on *ALTERNATE* cycles), the signalling here is a true pipeline
70 where data will flow on *every* clock when the conditions are right.
72 input acceptance conditions are when:
73 * incoming previous-stage strobe (p.i_valid) is HIGH
74 * outgoing previous-stage ready (p.o_ready) is LOW
76 output transmission conditions are when:
77 * outgoing next-stage strobe (n.o_valid) is HIGH
78 * outgoing next-stage ready (n.i_ready) is LOW
80 the tricky bit is when the input has valid data and the output is not
81 ready to accept it. if it wasn't for the clock synchronisation, it
82 would be possible to tell the input "hey don't send that data, we're
83 not ready". unfortunately, it's not possible to "change the past":
84 the previous stage *has no choice* but to pass on its data.
86 therefore, the incoming data *must* be accepted - and stored: that
87 is the responsibility / contract that this stage *must* accept.
88 on the same clock, it's possible to tell the input that it must
89 not send any more data. this is the "stall" condition.
91 we now effectively have *two* possible pieces of data to "choose" from:
92 the buffered data, and the incoming data. the decision as to which
93 to process and output is based on whether we are in "stall" or not.
94 i.e. when the next stage is no longer ready, the output comes from
95 the buffer if a stall had previously occurred, otherwise it comes
96 direct from processing the input.
98 this allows us to respect a synchronous "travelling STB" with what
99 dan calls a "buffered handshake".
101 it's quite a complex state machine!
104 from nmigen
import Signal
, Cat
, Const
, Mux
, Module
105 from nmigen
.cli
import verilog
, rtlil
106 from nmigen
.hdl
.rec
import Record
, Layout
108 from abc
import ABCMeta
, abstractmethod
109 from collections
.abc
import Sequence
113 """ contains signals that come *from* the previous stage (both in and out)
114 * i_valid: previous stage indicating all incoming data is valid.
115 may be a multi-bit signal, where all bits are required
116 to be asserted to indicate "valid".
117 * o_ready: output to next stage indicating readiness to accept data
118 * i_data : an input - added by the user of this class
121 def __init__(self
, i_width
=1):
122 self
.i_valid
= Signal(i_width
, name
="p_i_valid") # prev >>in self
123 self
.o_ready
= Signal(name
="p_o_ready") # prev <<out self
125 def _connect_in(self
, prev
):
126 """ helper function to connect stage to an input source. do not
127 use to connect stage-to-stage!
129 return [self
.i_valid
.eq(prev
.i_valid
),
130 prev
.o_ready
.eq(self
.o_ready
),
131 eq(self
.i_data
, prev
.i_data
),
134 def i_valid_logic(self
):
135 vlen
= len(self
.i_valid
)
136 if vlen
> 1: # multi-bit case: valid only when i_valid is all 1s
137 all1s
= Const(-1, (len(self
.i_valid
), False))
138 return self
.i_valid
== all1s
139 # single-bit i_valid case
144 """ contains the signals that go *to* the next stage (both in and out)
145 * o_valid: output indicating to next stage that data is valid
146 * i_ready: input from next stage indicating that it can accept data
147 * o_data : an output - added by the user of this class
150 self
.o_valid
= Signal(name
="n_o_valid") # self out>> next
151 self
.i_ready
= Signal(name
="n_i_ready") # self <<in next
153 def connect_to_next(self
, nxt
):
154 """ helper function to connect to the next stage data/valid/ready.
155 data/valid is passed *TO* nxt, and ready comes *IN* from nxt.
157 return [nxt
.i_valid
.eq(self
.o_valid
),
158 self
.i_ready
.eq(nxt
.o_ready
),
159 eq(nxt
.i_data
, self
.o_data
),
162 def _connect_out(self
, nxt
):
163 """ helper function to connect stage to an output source. do not
164 use to connect stage-to-stage!
166 return [nxt
.o_valid
.eq(self
.o_valid
),
167 self
.i_ready
.eq(nxt
.i_ready
),
168 eq(nxt
.o_data
, self
.o_data
),
173 """ makes signals equal: a helper routine which identifies if it is being
174 passed a list (or tuple) of objects, or signals, or Records, and calls
175 the objects' eq function.
177 complex objects (classes) can be used: they must follow the
178 convention of having an eq member function, which takes the
179 responsibility of further calling eq and returning a list of
182 Record is a special (unusual, recursive) case, where the input may be
183 specified as a dictionary (which may contain further dictionaries,
184 recursively), where the field names of the dictionary must match
185 the Record's field spec. Alternatively, an object with the same
186 member names as the Record may be assigned: it does not have to
189 if not isinstance(o
, Sequence
):
192 for (ao
, ai
) in zip(o
, i
):
193 #print ("eq", ao, ai)
194 if isinstance(ao
, Record
):
195 for idx
, (field_name
, field_shape
, _
) in enumerate(ao
.layout
):
196 if isinstance(field_shape
, Layout
):
200 if hasattr(val
, field_name
): # check for attribute
201 val
= getattr(val
, field_name
)
203 val
= val
[field_name
] # dictionary-style specification
204 rres
= eq(ao
.fields
[field_name
], val
)
208 if not isinstance(rres
, Sequence
):
214 class StageCls(metaclass
=ABCMeta
):
215 """ Class-based "Stage" API. requires instantiation (after derivation)
217 see "Stage API" above.. Note: python does *not* require derivation
218 from this class. All that is required is that the pipelines *have*
219 the functions listed in this class. Derivation from this class
220 is therefore merely a "courtesy" to maintainers.
223 def ispec(self
): pass # REQUIRED
225 def ospec(self
): pass # REQUIRED
227 #def setup(self, m, i): pass # OPTIONAL
229 def process(self
, i
): pass # REQUIRED
232 class Stage(metaclass
=ABCMeta
):
233 """ Static "Stage" API. does not require instantiation (after derivation)
235 see "Stage API" above. Note: python does *not* require derivation
236 from this class. All that is required is that the pipelines *have*
237 the functions listed in this class. Derivation from this class
238 is therefore merely a "courtesy" to maintainers.
250 #def setup(m, i): pass
257 class StageChain(StageCls
):
258 """ pass in a list of stages, and they will automatically be
259 chained together via their input and output specs into a
262 the end result basically conforms to the exact same Stage API.
264 * input to this class will be the input of the first stage
265 * output of first stage goes into input of second
266 * output of second goes into input into third (etc. etc.)
267 * the output of this class will be the output of the last stage
269 def __init__(self
, chain
):
273 return self
.chain
[0].ispec()
276 return self
.chain
[-1].ospec()
278 def setup(self
, m
, i
):
279 for (idx
, c
) in enumerate(self
.chain
):
280 if hasattr(c
, "setup"):
281 c
.setup(m
, i
) # stage may have some module stuff
282 o
= self
.chain
[idx
].ospec() # only the last assignment survives
283 m
.d
.comb
+= eq(o
, c
.process(i
)) # process input into "o"
284 if idx
!= len(self
.chain
)-1:
285 ni
= self
.chain
[idx
+1].ispec() # becomes new input on next loop
286 m
.d
.comb
+= eq(ni
, o
) # assign output to next input
288 self
.o
= o
# last loop is the output
290 def process(self
, i
):
295 """ Common functions for Pipeline API
297 def __init__(self
, in_multi
=None):
298 """ Base class containing ready/valid/data to previous and next stages
300 * p: contains ready/valid to the previous stage
301 * n: contains ready/valid to the next stage
304 * add i_data member to PrevControl (p) and
305 * add o_data member to NextControl (n)
308 # set up input and output IO ACK (prev/next ready/valid)
309 self
.p
= PrevControl(in_multi
)
310 self
.n
= NextControl()
312 def connect_to_next(self
, nxt
):
313 """ helper function to connect to the next stage data/valid/ready.
315 return self
.n
.connect_to_next(nxt
.p
)
317 def _connect_in(self
, prev
):
318 """ helper function to connect stage to an input source. do not
319 use to connect stage-to-stage!
321 return self
.p
._connect
_in
(prev
.p
)
323 def _connect_out(self
, nxt
):
324 """ helper function to connect stage to an output source. do not
325 use to connect stage-to-stage!
327 return self
.n
._connect
_out
(nxt
.n
)
329 def connect(self
, m
, pipechain
):
330 """ connects a chain (list) of Pipeline instances together and
331 links them to this ControlBase instance:
333 in <----> self <---> out
336 [pipe1, pipe2, pipe3, pipe4]
339 out---in out--in out---in
341 Also takes care of allocating i_data/o_data, by looking up
342 the data spec for each end of the pipechain. i.e It is NOT
343 necessary to allocate self.p.i_data or self.n.o_data manually:
344 this is handled AUTOMATICALLY, here.
346 Basically this function is the direct equivalent of StageChain,
347 except that unlike StageChain, the Pipeline logic is followed.
349 Just as StageChain presents an object that conforms to the
350 Stage API from a list of objects that also conform to the
351 Stage API, an object that calls this Pipeline connect function
352 has the exact same pipeline API as the list of pipline objects
355 Thus it becomes possible to build up larger chains recursively.
356 More complex chains (multi-input, multi-output) will have to be
359 eqs
= [] # collated list of assignment statements
361 # connect inter-chain
362 for i
in range(len(pipechain
)-1):
364 pipe2
= pipechain
[i
+1]
365 eqs
+= pipe1
.connect_to_next(pipe2
)
367 # connect front of chain to ourselves
369 self
.p
.i_data
= front
.stage
.ispec()
370 eqs
+= front
._connect
_in
(self
)
372 # connect end of chain to ourselves
374 self
.n
.o_data
= end
.stage
.ospec()
375 eqs
+= end
._connect
_out
(self
)
377 # activate the assignments
380 def set_input(self
, i
):
381 """ helper function to set the input data
383 return eq(self
.p
.i_data
, i
)
386 return [self
.p
.i_valid
, self
.n
.i_ready
,
387 self
.n
.o_valid
, self
.p
.o_ready
,
388 self
.p
.i_data
, self
.n
.o_data
# XXX need flattening!
392 class BufferedPipeline(ControlBase
):
393 """ buffered pipeline stage. data and strobe signals travel in sync.
394 if ever the input is ready and the output is not, processed data
395 is stored in a temporary register.
397 Argument: stage. see Stage API above
399 stage-1 p.i_valid >>in stage n.o_valid out>> stage+1
400 stage-1 p.o_ready <<out stage n.i_ready <<in stage+1
401 stage-1 p.i_data >>in stage n.o_data out>> stage+1
407 input data p.i_data is read (only), is processed and goes into an
408 intermediate result store [process()]. this is updated combinatorially.
410 in a non-stall condition, the intermediate result will go into the
411 output (update_output). however if ever there is a stall, it goes
412 into r_data instead [update_buffer()].
414 when the non-stall condition is released, r_data is the first
415 to be transferred to the output [flush_buffer()], and the stall
418 on the next cycle (as long as stall is not raised again) the
419 input may begin to be processed and transferred directly to output.
422 def __init__(self
, stage
):
423 ControlBase
.__init
__(self
)
426 # set up the input and output data
427 self
.p
.i_data
= stage
.ispec() # input type
428 self
.n
.o_data
= stage
.ospec()
430 def elaborate(self
, platform
):
433 result
= self
.stage
.ospec()
434 r_data
= self
.stage
.ospec()
435 if hasattr(self
.stage
, "setup"):
436 self
.stage
.setup(m
, self
.p
.i_data
)
438 # establish some combinatorial temporaries
439 o_n_validn
= Signal(reset_less
=True)
440 i_p_valid_o_p_ready
= Signal(reset_less
=True)
441 p_i_valid
= Signal(reset_less
=True)
442 m
.d
.comb
+= [p_i_valid
.eq(self
.p
.i_valid_logic()),
443 o_n_validn
.eq(~self
.n
.o_valid
),
444 i_p_valid_o_p_ready
.eq(p_i_valid
& self
.p
.o_ready
),
447 # store result of processing in combinatorial temporary
448 m
.d
.comb
+= eq(result
, self
.stage
.process(self
.p
.i_data
))
450 # if not in stall condition, update the temporary register
451 with m
.If(self
.p
.o_ready
): # not stalled
452 m
.d
.sync
+= eq(r_data
, result
) # update buffer
454 with m
.If(self
.n
.i_ready
): # next stage is ready
455 with m
.If(self
.p
.o_ready
): # not stalled
456 # nothing in buffer: send (processed) input direct to output
457 m
.d
.sync
+= [self
.n
.o_valid
.eq(p_i_valid
),
458 eq(self
.n
.o_data
, result
), # update output
460 with m
.Else(): # p.o_ready is false, and something is in buffer.
461 # Flush the [already processed] buffer to the output port.
462 m
.d
.sync
+= [self
.n
.o_valid
.eq(1), # declare reg empty
463 eq(self
.n
.o_data
, r_data
), # flush buffer
464 self
.p
.o_ready
.eq(1), # clear stall condition
466 # ignore input, since p.o_ready is also false.
468 # (n.i_ready) is false here: next stage is ready
469 with m
.Elif(o_n_validn
): # next stage being told "ready"
470 m
.d
.sync
+= [self
.n
.o_valid
.eq(p_i_valid
),
471 self
.p
.o_ready
.eq(1), # Keep the buffer empty
472 eq(self
.n
.o_data
, result
), # set output data
475 # (n.i_ready) false and (n.o_valid) true:
476 with m
.Elif(i_p_valid_o_p_ready
):
477 # If next stage *is* ready, and not stalled yet, accept input
478 m
.d
.sync
+= self
.p
.o_ready
.eq(~
(p_i_valid
& self
.n
.o_valid
))
483 class ExampleAddStage(StageCls
):
484 """ an example of how to use the buffered pipeline, as a class instance
488 """ returns a tuple of input signals which will be the incoming data
490 return (Signal(16), Signal(16))
493 """ returns an output signal which will happen to contain the sum
498 def process(self
, i
):
499 """ process the input data (sums the values in the tuple) and returns it
504 class ExampleBufPipeAdd(BufferedPipeline
):
505 """ an example of how to use the buffered pipeline, using a class instance
509 addstage
= ExampleAddStage()
510 BufferedPipeline
.__init
__(self
, addstage
)
513 class ExampleStage(Stage
):
514 """ an example of how to use the buffered pipeline, in a static class
519 return Signal(16, name
="example_input_signal")
522 return Signal(16, name
="example_output_signal")
525 """ process the input data and returns it (adds 1)
530 class ExampleStageCls(StageCls
):
531 """ an example of how to use the buffered pipeline, in a static class
536 return Signal(16, name
="example_input_signal")
539 return Signal(16, name
="example_output_signal")
541 def process(self
, i
):
542 """ process the input data and returns it (adds 1)
547 class ExampleBufPipe(BufferedPipeline
):
548 """ an example of how to use the buffered pipeline.
552 BufferedPipeline
.__init
__(self
, ExampleStage
)
555 class UnbufferedPipeline(ControlBase
):
556 """ A simple pipeline stage with single-clock synchronisation
557 and two-way valid/ready synchronised signalling.
559 Note that a stall in one stage will result in the entire pipeline
562 Also that unlike BufferedPipeline, the valid/ready signalling does NOT
563 travel synchronously with the data: the valid/ready signalling
564 combines in a *combinatorial* fashion. Therefore, a long pipeline
565 chain will lengthen propagation delays.
567 Argument: stage. see Stage API, above
569 stage-1 p.i_valid >>in stage n.o_valid out>> stage+1
570 stage-1 p.o_ready <<out stage n.i_ready <<in stage+1
571 stage-1 p.i_data >>in stage n.o_data out>> stage+1
579 p.i_data : StageInput, shaped according to ispec
581 p.o_data : StageOutput, shaped according to ospec
583 r_data : input_shape according to ispec
584 A temporary (buffered) copy of a prior (valid) input.
585 This is HELD if the output is not ready. It is updated
587 result: output_shape according to ospec
588 The output of the combinatorial logic. it is updated
589 COMBINATORIALLY (no clock dependence).
592 def __init__(self
, stage
):
593 ControlBase
.__init
__(self
)
595 self
._data
_valid
= Signal()
597 # set up the input and output data
598 self
.p
.i_data
= stage
.ispec() # input type
599 self
.n
.o_data
= stage
.ospec() # output type
601 def elaborate(self
, platform
):
604 r_data
= self
.stage
.ispec() # input type
605 result
= self
.stage
.ospec() # output data
606 if hasattr(self
.stage
, "setup"):
607 self
.stage
.setup(m
, r_data
)
609 p_i_valid
= Signal(reset_less
=True)
610 m
.d
.comb
+= p_i_valid
.eq(self
.p
.i_valid_logic())
611 m
.d
.comb
+= eq(result
, self
.stage
.process(r_data
))
612 m
.d
.comb
+= self
.n
.o_valid
.eq(self
._data
_valid
)
613 m
.d
.comb
+= self
.p
.o_ready
.eq(~self
._data
_valid | self
.n
.i_ready
)
614 m
.d
.sync
+= self
._data
_valid
.eq(p_i_valid | \
615 (~self
.n
.i_ready
& self
._data
_valid
))
616 with m
.If(self
.p
.i_valid
& self
.p
.o_ready
):
617 m
.d
.sync
+= eq(r_data
, self
.p
.i_data
)
618 m
.d
.comb
+= eq(self
.n
.o_data
, result
)
622 class ExamplePipeline(UnbufferedPipeline
):
623 """ an example of how to use the combinatorial pipeline.
627 UnbufferedPipeline
.__init
__(self
, ExampleStage
)
630 if __name__
== '__main__':
631 dut
= ExampleBufPipe()
632 vl
= rtlil
.convert(dut
, ports
=dut
.ports())
633 with
open("test_bufpipe.il", "w") as f
:
636 dut
= ExamplePipeline()
637 vl
= rtlil
.convert(dut
, ports
=dut
.ports())
638 with
open("test_combpipe.il", "w") as f
: