update comments
[ieee754fpu.git] / src / add / example_buf_pipe.py
1 """ nmigen implementation of buffered pipeline stage, based on zipcpu:
2 https://zipcpu.com/blog/2017/08/14/strategies-for-pipelining.html
3
4 this module requires quite a bit of thought to understand how it works
5 (and why it is needed in the first place). reading the above is
6 *strongly* recommended.
7
8 unlike john dawson's IEEE754 FPU STB/ACK signalling, which requires
9 the STB / ACK signals to raise and lower (on separate clocks) before
10 data may proceeed (thus only allowing one piece of data to proceed
11 on *ALTERNATE* cycles), the signalling here is a true pipeline
12 where data will flow on *every* clock when the conditions are right.
13
14 input acceptance conditions are when:
15 * incoming previous-stage strobe (p.i_valid) is HIGH
16 * outgoing previous-stage ready (p.o_ready) is LOW
17
18 output transmission conditions are when:
19 * outgoing next-stage strobe (n.o_valid) is HIGH
20 * outgoing next-stage ready (n.i_ready) is LOW
21
22 the tricky bit is when the input has valid data and the output is not
23 ready to accept it. if it wasn't for the clock synchronisation, it
24 would be possible to tell the input "hey don't send that data, we're
25 not ready". unfortunately, it's not possible to "change the past":
26 the previous stage *has no choice* but to pass on its data.
27
28 therefore, the incoming data *must* be accepted - and stored: that
29 is the responsibility / contract that this stage *must* accept.
30 on the same clock, it's possible to tell the input that it must
31 not send any more data. this is the "stall" condition.
32
33 we now effectively have *two* possible pieces of data to "choose" from:
34 the buffered data, and the incoming data. the decision as to which
35 to process and output is based on whether we are in "stall" or not.
36 i.e. when the next stage is no longer ready, the output comes from
37 the buffer if a stall had previously occurred, otherwise it comes
38 direct from processing the input.
39
40 this allows us to respect a synchronous "travelling STB" with what
41 dan calls a "buffered handshake".
42
43 it's quite a complex state machine!
44 """
45
46 from nmigen import Signal, Cat, Const, Mux, Module
47 from nmigen.cli import verilog, rtlil
48 from nmigen.hdl.rec import Record, Layout
49
50 from collections.abc import Sequence
51
52
53 class PrevControl:
54 """ contains signals that come *from* the previous stage (both in and out)
55 * i_valid: input from previous stage indicating incoming data is valid
56 * o_ready: output to next stage indicating readiness to accept data
57 * i_data : an input - added by the user of this class
58 """
59
60 def __init__(self):
61 self.i_valid = Signal(name="p_i_valid") # prev >>in self
62 self.o_ready = Signal(name="p_o_ready") # prev <<out self
63
64 def connect_in(self, prev):
65 """ helper function to connect stage to an input source. do not
66 use to connect stage-to-stage!
67 """
68 return [self.i_valid.eq(prev.i_valid),
69 prev.o_ready.eq(self.o_ready),
70 eq(self.i_data, prev.i_data),
71 ]
72
73
74 class NextControl:
75 """ contains the signals that go *to* the next stage (both in and out)
76 * o_valid: output indicating to next stage that data is valid
77 * i_ready: input from next stage indicating that it can accept data
78 * o_data : an output - added by the user of this class
79 """
80 def __init__(self):
81 self.o_valid = Signal(name="n_o_valid") # self out>> next
82 self.i_ready = Signal(name="n_i_ready") # self <<in next
83
84 def connect_to_next(self, nxt):
85 """ helper function to connect to the next stage data/valid/ready.
86 data/valid is passed *TO* nxt, and ready comes *IN* from nxt.
87 """
88 return [nxt.i_valid.eq(self.o_valid),
89 self.i_ready.eq(nxt.o_ready),
90 eq(nxt.i_data, self.o_data),
91 ]
92
93 def connect_out(self, nxt):
94 """ helper function to connect stage to an output source. do not
95 use to connect stage-to-stage!
96 """
97 return [nxt.o_valid.eq(self.o_valid),
98 self.i_ready.eq(nxt.i_ready),
99 eq(nxt.o_data, self.o_data),
100 ]
101
102
103 def eq(o, i):
104 """ makes signals equal: a helper routine which identifies if it is being
105 passsed a list (or tuple) of objects, and calls the objects' eq
106 function.
107
108 complex objects (classes) can be used: they must follow the
109 convention of having an eq member function, which takes the
110 responsibility of further calling eq and returning a list of
111 eq assignments
112
113 Record is a special (unusual, recursive) case, where the input
114 is specified as a dictionary (which may contain further dictionaries,
115 recursively), where the field names of the dictionary must match
116 the Record's field spec.
117 """
118 if not isinstance(o, Sequence):
119 o, i = [o], [i]
120 res = []
121 for (ao, ai) in zip(o, i):
122 #print ("eq", ao, ai)
123 if isinstance(ao, Record):
124 for idx, (field_name, field_shape, _) in enumerate(ao.layout):
125 if isinstance(field_shape, Layout):
126 rres = eq(ao.fields[field_name], ai.fields[field_name])
127 else:
128 rres = eq(ao.fields[field_name], ai[field_name])
129 res += rres
130 else:
131 res.append(ao.eq(ai))
132 return res
133
134
135 class PipelineBase:
136 """ Common functions for Pipeline API
137 """
138 def __init__(self, stage):
139 """ pass in a "stage" which may be either a static class or a class
140 instance, which has three functions:
141 * ispec: returns input signals according to the input specification
142 * ispec: returns output signals to the output specification
143 * process: takes an input instance and returns processed data
144
145 User must also:
146 * add i_data member to PrevControl and
147 * add o_data member to NextControl
148 """
149 self.stage = stage
150
151 # set up input and output IO ACK (prev/next ready/valid)
152 self.p = PrevControl()
153 self.n = NextControl()
154
155 def connect_to_next(self, nxt):
156 """ helper function to connect to the next stage data/valid/ready.
157 """
158 return self.n.connect_to_next(nxt.p)
159
160 def connect_in(self, prev):
161 """ helper function to connect stage to an input source. do not
162 use to connect stage-to-stage!
163 """
164 return self.p.connect_in(prev.p)
165
166 def connect_out(self, nxt):
167 """ helper function to connect stage to an output source. do not
168 use to connect stage-to-stage!
169 """
170 return self.n.connect_out(nxt.n)
171
172 def set_input(self, i):
173 """ helper function to set the input data
174 """
175 return eq(self.p.i_data, i)
176
177 def ports(self):
178 return [self.p.i_valid, self.n.i_ready,
179 self.n.o_valid, self.p.o_ready,
180 self.p.i_data, self.n.o_data # XXX need flattening!
181 ]
182
183
184 class BufferedPipeline(PipelineBase):
185 """ buffered pipeline stage. data and strobe signals travel in sync.
186 if ever the input is ready and the output is not, processed data
187 is stored in a temporary register.
188
189 stage-1 p.i_valid >>in stage n.o_valid out>> stage+1
190 stage-1 p.o_ready <<out stage n.i_ready <<in stage+1
191 stage-1 p.i_data >>in stage n.o_data out>> stage+1
192 | |
193 process --->----^
194 | |
195 +-- r_data ->-+
196
197 input data p.i_data is read (only), is processed and goes into an
198 intermediate result store [process()]. this is updated combinatorially.
199
200 in a non-stall condition, the intermediate result will go into the
201 output (update_output). however if ever there is a stall, it goes
202 into r_data instead [update_buffer()].
203
204 when the non-stall condition is released, r_data is the first
205 to be transferred to the output [flush_buffer()], and the stall
206 condition cleared.
207
208 on the next cycle (as long as stall is not raised again) the
209 input may begin to be processed and transferred directly to output.
210 """
211 def __init__(self, stage):
212 PipelineBase.__init__(self, stage)
213
214 # set up the input and output data
215 self.p.i_data = stage.ispec() # input type
216 self.n.o_data = stage.ospec()
217
218 def elaborate(self, platform):
219 m = Module()
220
221 result = self.stage.ospec()
222 r_data = self.stage.ospec()
223 if hasattr(self.stage, "setup"):
224 self.stage.setup(m, self.p.i_data)
225
226 # establish some combinatorial temporaries
227 o_n_validn = Signal(reset_less=True)
228 i_p_valid_o_p_ready = Signal(reset_less=True)
229 m.d.comb += [o_n_validn.eq(~self.n.o_valid),
230 i_p_valid_o_p_ready.eq(self.p.i_valid & self.p.o_ready),
231 ]
232
233 # store result of processing in combinatorial temporary
234 #with m.If(self.p.i_valid): # input is valid: process it
235 m.d.comb += eq(result, self.stage.process(self.p.i_data))
236 # if not in stall condition, update the temporary register
237 with m.If(self.p.o_ready): # not stalled
238 m.d.sync += eq(r_data, result) # update buffer
239
240 #with m.If(self.p.i_rst): # reset
241 # m.d.sync += self.n.o_valid.eq(0)
242 # m.d.sync += self.p.o_ready.eq(0)
243 with m.If(self.n.i_ready): # next stage is ready
244 with m.If(self.p.o_ready): # not stalled
245 # nothing in buffer: send (processed) input direct to output
246 m.d.sync += [self.n.o_valid.eq(self.p.i_valid),
247 eq(self.n.o_data, result), # update output
248 ]
249 with m.Else(): # p.o_ready is false, and something is in buffer.
250 # Flush the [already processed] buffer to the output port.
251 m.d.sync += [self.n.o_valid.eq(1),
252 eq(self.n.o_data, r_data), # flush buffer
253 # clear stall condition, declare register empty.
254 self.p.o_ready.eq(1),
255 ]
256 # ignore input, since p.o_ready is also false.
257
258 # (n.i_ready) is false here: next stage is ready
259 with m.Elif(o_n_validn): # next stage being told "ready"
260 m.d.sync += [self.n.o_valid.eq(self.p.i_valid),
261 self.p.o_ready.eq(1), # Keep the buffer empty
262 # set the output data (from comb result)
263 eq(self.n.o_data, result),
264 ]
265 # (n.i_ready) false and (n.o_valid) true:
266 with m.Elif(i_p_valid_o_p_ready):
267 # If next stage *is* ready, and not stalled yet, accept input
268 m.d.sync += self.p.o_ready.eq(~(self.p.i_valid & self.n.o_valid))
269
270 return m
271
272
273 class ExampleAddStage:
274 """ an example of how to use the buffered pipeline, as a class instance
275 """
276
277 def ispec(self):
278 """ returns a tuple of input signals which will be the incoming data
279 """
280 return (Signal(16), Signal(16))
281
282 def ospec(self):
283 """ returns an output signal which will happen to contain the sum
284 of the two inputs
285 """
286 return Signal(16)
287
288 def process(self, i):
289 """ process the input data (sums the values in the tuple) and returns it
290 """
291 return i[0] + i[1]
292
293
294 class ExampleBufPipeAdd(BufferedPipeline):
295 """ an example of how to use the buffered pipeline, using a class instance
296 """
297
298 def __init__(self):
299 addstage = ExampleAddStage()
300 BufferedPipeline.__init__(self, addstage)
301
302
303 class ExampleStage:
304 """ an example of how to use the buffered pipeline, in a static class
305 fashion
306 """
307
308 def ispec():
309 return Signal(16)
310
311 def ospec():
312 return Signal(16)
313
314 def process(i):
315 """ process the input data and returns it (adds 1)
316 """
317 return i + 1
318
319
320 class ExampleBufPipe(BufferedPipeline):
321 """ an example of how to use the buffered pipeline.
322 """
323
324 def __init__(self):
325 BufferedPipeline.__init__(self, ExampleStage)
326
327
328 class CombPipe(PipelineBase):
329 """A simple pipeline stage containing combinational logic that can execute
330 completely in one clock cycle.
331
332 Parameters:
333 -----------
334 input_shape : int or tuple or None
335 the shape of ``input.data`` and ``comb_input``
336 output_shape : int or tuple or None
337 the shape of ``output.data`` and ``comb_output``
338 name : str
339 the name
340
341 Attributes:
342 -----------
343 input : StageInput
344 The pipeline input
345 output : StageOutput
346 The pipeline output
347 comb_input : Signal, input_shape
348 The input to the combinatorial logic
349 comb_output: Signal, output_shape
350 The output of the combinatorial logic
351 """
352
353 def __init__(self, stage):
354 PipelineBase.__init__(self, stage)
355 self._data_valid = Signal()
356
357 # set up the input and output data
358 self.p.i_data = stage.ispec() # input type
359 self.n.o_data = stage.ospec() # output type
360
361 def elaborate(self, platform):
362 m = Module()
363
364 r_data = self.stage.ispec() # input type
365 result = self.stage.ospec() # output data
366 if hasattr(self.stage, "setup"):
367 self.stage.setup(m, r_data)
368
369 m.d.comb += eq(result, self.stage.process(r_data))
370 m.d.comb += self.n.o_valid.eq(self._data_valid)
371 m.d.comb += self.p.o_ready.eq(~self._data_valid | self.n.i_ready)
372 m.d.sync += self._data_valid.eq(self.p.i_valid | \
373 (~self.n.i_ready & self._data_valid))
374 with m.If(self.p.i_valid & self.p.o_ready):
375 m.d.sync += eq(r_data, self.p.i_data)
376 m.d.comb += eq(self.n.o_data, result)
377 return m
378
379
380 class ExampleCombPipe(CombPipe):
381 """ an example of how to use the combinatorial pipeline.
382 """
383
384 def __init__(self):
385 CombPipe.__init__(self, ExampleStage)
386
387
388 if __name__ == '__main__':
389 dut = ExampleBufPipe()
390 vl = rtlil.convert(dut, ports=dut.ports())
391 with open("test_bufpipe.il", "w") as f:
392 f.write(vl)
393
394 dut = ExampleCombPipe()
395 vl = rtlil.convert(dut, ports=dut.ports())
396 with open("test_combpipe.il", "w") as f:
397 f.write(vl)