INSTRUCTION
stringlengths 1
8.43k
| RESPONSE
stringlengths 75
104k
|
|---|---|
Do a reverse search. Args: lat lon. REVERSE 48. 1234 2. 9876
|
def do_REVERSE(self, latlon):
"""Do a reverse search. Args: lat lon.
REVERSE 48.1234 2.9876"""
lat, lon = latlon.split()
for r in reverse(float(lat), float(lon)):
print('{} ({} | {} km | {})'.format(white(r), blue(r.score),
blue(r.distance), blue(r._id)))
|
Print the distance score between two strings. Use | as separator. STRDISTANCE rue des lilas|porte des lilas
|
def do_STRDISTANCE(self, s):
"""Print the distance score between two strings. Use | as separator.
STRDISTANCE rue des lilas|porte des lilas"""
s = s.split('|')
if not len(s) == 2:
print(red('Malformed string. Use | between the two strings.'))
return
one, two = s
print(white(compare_str(one, two)))
|
Inspect loaded Addok config. Output all config without argument. CONFIG [ CONFIG_KEY ]
|
def do_CONFIG(self, name):
"""Inspect loaded Addok config. Output all config without argument.
CONFIG [CONFIG_KEY]"""
if not name:
for name in self.complete_CONFIG():
self.do_CONFIG(name)
return
value = getattr(config, name.upper(), 'Not found.')
print(blue(name), white(format_config(value)))
|
Run a Lua script. Takes the raw Redis arguments. SCRIPT script_name number_of_keys key1 key2… arg1 arg2
|
def do_SCRIPT(self, args):
"""Run a Lua script. Takes the raw Redis arguments.
SCRIPT script_name number_of_keys key1 key2… arg1 arg2
"""
try:
name, keys_count, *args = args.split()
except ValueError:
print(red('Not enough arguments'))
return
try:
keys_count = int(keys_count)
except ValueError:
print(red('You must pass the number of keys as first argument'))
self.do_HELP('SCRIPT')
return
keys = args[:keys_count]
args = args[keys_count:]
try:
output = getattr(scripts, name)(keys=keys, args=args)
except AttributeError:
print(red('No script named {}'.format(name)))
return
except DB.Error as e:
print(red(e))
return
if not isinstance(output, list):
# Script may return just an integer.
output = [output]
for line in output:
print(white(line))
|
Just sends the request using its send method and returns its response.
|
def send(r, stream=False):
"""Just sends the request using its send method and returns its response. """
r.send(stream=stream)
return r.response
|
Concurrently converts a list of Requests to Responses.
|
def map(requests, stream=True, pool=None, size=1, exception_handler=None):
"""Concurrently converts a list of Requests to Responses.
:param requests: a collection of Request objects.
:param stream: If False, the content will not be downloaded immediately.
:param size: Specifies the number of workers to run at a time. If 1, no parallel processing.
:param exception_handler: Callback function, called when exception occured. Params: Request, Exception
"""
pool = pool if pool else Pool(size)
requests = list(requests)
requests = pool.map(send, requests)
ret = []
for request in requests:
if request.response is not None:
ret.append(request.response)
elif exception_handler and hasattr(request, 'exception'):
ret.append(exception_handler(request, request.exception))
else:
ret.append(None)
if not pool:
pool.close()
return ret
|
Concurrently converts a generator object of Requests to a generator of Responses.
|
def imap(requests, stream=True, pool=None, size=2, exception_handler=None):
"""Concurrently converts a generator object of Requests to
a generator of Responses.
:param requests: a generator of Request objects.
:param stream: If False, the content will not be downloaded immediately.
:param size: Specifies the number of requests to make at a time. default is 2
:param exception_handler: Callback function, called when exception occured. Params: Request, Exception
"""
def send(r):
return r.send(stream=stream)
pool = pool if pool else Pool(size)
for request in pool.imap(send, requests):
if request.response is not None:
yield request.response
elif exception_handler:
exception_handler(request, request.exception)
if not pool:
pool.close()
|
Concurrently converts a generator object of Requests to a generator of Responses.
|
def imap_unordered(requests, stream=True, pool=None, size=2, exception_handler=None):
"""Concurrently converts a generator object of Requests to
a generator of Responses.
:param requests: a generator of Request objects.
:param stream: If False, the content will not be downloaded immediately.
:param size: Specifies the number of requests to make at a time. default is 2
:param exception_handler: Callback function, called when exception occured. Params: Request, Exception
"""
def send(r):
return r.send(stream=stream)
pool = pool if pool else Pool(size)
with contextlib.closing(Pool(size)) as pool:
for request in pool.imap_unordered(send, requests):
if request.response is not None:
yield request.response
elif exception_handler:
exception_handler(request, request.exception)
if not pool:
pool.close()
|
Gets value of bits between selected range from memory
|
def getBits_from_array(array, wordWidth, start, end,
reinterpretElmToType=None):
"""
Gets value of bits between selected range from memory
:param start: bit address of start of bit of bits
:param end: bit address of first bit behind bits
:return: instance of BitsVal (derived from SimBits type) which contains
copy of selected bits
"""
inPartOffset = 0
value = Bits(end - start, None).fromPy(None)
while start != end:
assert start < end, (start, end)
dataWordIndex = start // wordWidth
v = array[dataWordIndex]
if reinterpretElmToType is not None:
v = v._reinterpret_cast(reinterpretElmToType)
endOfWord = (dataWordIndex + 1) * wordWidth
width = min(end, endOfWord) - start
offset = start % wordWidth
val = selectBitRange(v.val, offset, width)
vldMask = selectBitRange(v.vldMask, offset, width)
updateTime = v.updateTime
m = mask(width)
value.val |= (val & m) << inPartOffset
value.vldMask |= (vldMask & m) << inPartOffset
value.updateMask = max(value.updateTime, updateTime)
inPartOffset += width
start += width
return value
|
Cast HArray signal or value to signal or value of type Bits
|
def reinterptet_harray_to_bits(typeFrom, sigOrVal, bitsT):
"""
Cast HArray signal or value to signal or value of type Bits
"""
size = int(typeFrom.size)
widthOfElm = typeFrom.elmType.bit_length()
w = bitsT.bit_length()
if size * widthOfElm != w:
raise TypeConversionErr(
"Size of types is different", size * widthOfElm, w)
partT = Bits(widthOfElm)
parts = [p._reinterpret_cast(partT) for p in sigOrVal]
return Concat(*reversed(parts))._reinterpret_cast(bitsT)
|
convert python slice to value of SLICE hdl type
|
def slice_to_SLICE(sliceVals, width):
"""convert python slice to value of SLICE hdl type"""
if sliceVals.step is not None:
raise NotImplementedError()
start = sliceVals.start
stop = sliceVals.stop
if sliceVals.start is None:
start = INT.fromPy(width)
else:
start = toHVal(sliceVals.start)
if sliceVals.stop is None:
stop = INT.fromPy(0)
else:
stop = toHVal(sliceVals.stop)
startIsVal = isinstance(start, Value)
stopIsVal = isinstance(stop, Value)
indexesAreValues = startIsVal and stopIsVal
if indexesAreValues:
updateTime = max(start.updateTime, stop.updateTime)
else:
updateTime = -1
return Slice.getValueCls()((start, stop), SLICE, 1, updateTime)
|
: return: bit range which contains data of this part on bus data signal
|
def getBusWordBitRange(self) -> Tuple[int, int]:
"""
:return: bit range which contains data of this part on bus data signal
"""
offset = self.startOfPart % self.parent.wordWidth
return (offset + self.bit_length(), offset)
|
: return: bit range which contains data of this part on interface of field
|
def getFieldBitRange(self) -> Tuple[int, int]:
"""
:return: bit range which contains data of this part on interface
of field
"""
offset = self.inFieldOffset
return (self.bit_length() + offset, offset)
|
Apply enclosure on list of statements ( fill all unused code branches with assignments from value specified by enclosure )
|
def fill_stm_list_with_enclosure(parentStm: Optional[HdlStatement],
current_enclosure: Set[RtlSignalBase],
statements: List["HdlStatement"],
do_enclose_for: List[RtlSignalBase],
enclosure: Dict[RtlSignalBase, Union[Value, RtlSignalBase]])\
-> None:
"""
Apply enclosure on list of statements
(fill all unused code branches with assignments from value specified by enclosure)
:param parentStm: optional parent statement where this list is some branch
:param current_enclosure: list of signals for which this statement list is enclosed
:param statements: list of statements
:param do_enclose_for: selected signals for which enclosure should be used
:param enclosure: enclosure values for signals
:attention: original statements parameter can be modified
:return: new statements
"""
if statements is None:
statements = []
for e_sig in do_enclose_for:
if e_sig in current_enclosure:
continue
enclosed = False
for stm in statements:
if e_sig in stm._outputs:
if e_sig not in stm._enclosed_for:
stm._fill_enclosure(enclosure)
enclosed = True
break
# any statement was not related with this signal,
if not enclosed:
e = enclosure[e_sig]
a = Assignment(e, e_sig)
statements.append(a)
if parentStm is not None:
a._set_parent_stm(parentStm)
return statements
|
Find files by pattern in directory
|
def find_files(directory, pattern, recursive=True):
"""
Find files by pattern in directory
"""
if not os.path.isdir(directory):
if os.path.exists(directory):
raise IOError(directory + ' is not directory')
else:
raise IOError(directory + " does not exists")
if recursive:
for root, _, files in os.walk(directory):
for basename in files:
if fnmatch.fnmatch(basename, pattern):
filename = os.path.join(root, basename)
yield filename
else:
root = directory
for basename in os.listdir(root):
if fnmatch.fnmatch(basename, pattern):
filename = os.path.join(root, basename)
if os.path.isfile(filename):
yield filename
|
Generate if tree for cases like ( syntax shugar for large elifs )
|
def SwitchLogic(cases, default=None):
"""
Generate if tree for cases like (syntax shugar for large elifs)
..code-block:: python
if cond0:
statements0
elif cond1:
statements1
else:
default
:param case: iterable of tuples (condition, statements)
:param default: default statements
"""
if default is not None:
assigTop = default
else:
assigTop = []
for cond, statements in reversed(cases):
assigTop = If(cond,
statements
).Else(
assigTop
)
return assigTop
|
Hdl convertible in operator check if any of items in iterable equals sigOrVal
|
def In(sigOrVal, iterable):
"""
Hdl convertible in operator, check if any of items
in "iterable" equals "sigOrVal"
"""
res = None
for i in iterable:
i = toHVal(i)
if res is None:
res = sigOrVal._eq(i)
else:
res = res | sigOrVal._eq(i)
assert res is not None, "Parameter iterable is empty"
return res
|
Generate for loop for static items
|
def StaticForEach(parentUnit, items, bodyFn, name=""):
"""
Generate for loop for static items
:param parentUnit: unit where this code should be instantiated
:param items: items which this "for" itering on
:param bodyFn: function which fn(item, index) or fn(item)
returns (statementList, ack).
It's content is performed in every iteration.
When ack is high loop will fall to next iteration
"""
items = list(items)
itemsCnt = len(items)
if itemsCnt == 0:
# if there are no items there is nothing to generate
return []
elif itemsCnt == 1:
# if there is only one item do not generate counter logic generate
return bodyFn(items[0], 0)
else:
# if there is multiple items we have to generate counter logic
index = parentUnit._reg(name + "for_index",
Bits(log2ceil(itemsCnt + 1), signed=False),
defVal=0)
ackSig = parentUnit._sig(name + "for_ack")
statementLists = []
for i, (statementList, ack) in [(i, bodyFn(item, i))
for i, item in enumerate(items)]:
statementLists.append(statementList + [(ackSig(ack)), ])
If(ackSig,
If(index._eq(itemsCnt - 1),
index(0)
).Else(
index(index + 1)
)
)
return Switch(index)\
.addCases(
enumerate(statementLists)
).Default(
bodyFn(items[0], 0)[0],
ackSig(True)
)
|
Connect src ( signals/ interfaces/ values ) to all destinations
|
def connect(src, *destinations, exclude: set=None, fit=False):
"""
Connect src (signals/interfaces/values) to all destinations
:param exclude: interfaces on any level on src or destinations
which should be excluded from connection process
:param fit: auto fit source width to destination width
"""
assignemnts = []
if isinstance(src, HObjList):
for dst in destinations:
assert len(src) == len(dst), (src, dst)
_destinations = [iter(d) for d in destinations]
for _src in src:
dsts = [next(d) for d in _destinations]
assignemnts.append(connect(_src, *dsts, exclude=exclude, fit=fit))
else:
for dst in destinations:
assignemnts.append(_connect(src, dst, exclude, fit))
return assignemnts
|
Rotate left
|
def rol(sig, howMany) -> RtlSignalBase:
"Rotate left"
width = sig._dtype.bit_length()
return sig[(width - howMany):]._concat(sig[:(width - howMany)])
|
Logical shift left
|
def sll(sig, howMany) -> RtlSignalBase:
"Logical shift left"
width = sig._dtype.bit_length()
return sig[(width - howMany):]._concat(vec(0, howMany))
|
Returns no of bits required to store x - 1 for example x = 8 returns 3
|
def log2ceil(x):
"""
Returns no of bits required to store x-1
for example x=8 returns 3
"""
if not isinstance(x, (int, float)):
x = int(x)
if x == 0 or x == 1:
res = 1
else:
res = math.ceil(math.log2(x))
return hInt(res)
|
Check if number or constant is power of two
|
def isPow2(num) -> bool:
"""
Check if number or constant is power of two
"""
if not isinstance(num, int):
num = int(num)
return num != 0 and ((num & (num - 1)) == 0)
|
Add multiple case statements from iterable of tuleles ( caseVal statements )
|
def addCases(self, tupesValStmnts):
"""
Add multiple case statements from iterable of tuleles
(caseVal, statements)
"""
s = self
for val, statements in tupesValStmnts:
s = s.Case(val, statements)
return s
|
c - like case of switch statement
|
def Case(self, caseVal, *statements):
"c-like case of switch statement"
assert self.parentStm is None
caseVal = toHVal(caseVal, self.switchOn._dtype)
assert isinstance(caseVal, Value), caseVal
assert caseVal._isFullVld(), "Cmp with invalid value"
assert caseVal not in self._case_value_index, (
"Switch statement already has case for value ", caseVal)
self.rank += 1
case = []
self._case_value_index[caseVal] = len(self.cases)
self.cases.append((caseVal, case))
cond = self.switchOn._eq(caseVal)
self._inputs.append(cond)
cond.endpoints.append(self)
self._register_stements(statements, case)
return self
|
c - like default of switch statement
|
def Default(self, *statements):
"""c-like default of switch statement
"""
assert self.parentStm is None
self.rank += 1
self.default = []
self._register_stements(statements, self.default)
return self
|
: param stateFrom: apply when FSM is in this state: param condAndNextState: tupes ( condition newState ) last does not to have condition
|
def Trans(self, stateFrom, *condAndNextState):
"""
:param stateFrom: apply when FSM is in this state
:param condAndNextState: tupes (condition, newState),
last does not to have condition
:attention: transitions has priority, first has the biggest
:attention: if stateFrom is None it is evaluated as default
"""
top = []
last = True
for cAndS in reversed(condAndNextState):
if last is True:
last = False
# if this is last trans. it does not have to condition
try:
condition, newvalue = cAndS
except TypeError:
top = self.stateReg(cAndS)
continue
top = []
else:
condition, newvalue = cAndS
# building decision tree
top = \
If(condition,
self.stateReg(newvalue)
).Else(
top
)
if stateFrom is None:
return Switch.Default(self, top)
else:
return Switch.Case(self, stateFrom, top)
|
: return: ( vcd type name vcd width )
|
def vcdTypeInfoForHType(t) -> Tuple[str, int, Callable[[RtlSignalBase, Value], str]]:
"""
:return: (vcd type name, vcd width)
"""
if isinstance(t, (SimBitsT, Bits, HBool)):
return (VCD_SIG_TYPE.WIRE, t.bit_length(), vcdBitsFormatter)
elif isinstance(t, HEnum):
return (VCD_SIG_TYPE.REAL, 1, vcdEnumFormatter)
else:
raise ValueError(t)
|
Register signals from interfaces for Interface or Unit instances
|
def vcdRegisterInterfaces(self, obj: Union[Interface, Unit],
parent: Optional[VcdVarWritingScope]):
"""
Register signals from interfaces for Interface or Unit instances
"""
if hasattr(obj, "_interfaces") and obj._interfaces:
name = obj._name
parent_ = self.vcdWriter if parent is None else parent
subScope = parent_.varScope(name)
self._obj2scope[obj] = subScope
with subScope:
# register all subinterfaces
for chIntf in obj._interfaces:
self.vcdRegisterInterfaces(chIntf, subScope)
if isinstance(obj, (Unit, SimModel)):
# register interfaces from all subunits
for u in obj._units:
self.vcdRegisterInterfaces(u, subScope)
return subScope
else:
t = obj._dtype
if isinstance(t, self.supported_type_classes):
tName, width, formatter = vcdTypeInfoForHType(t)
try:
parent.addVar(obj, getSignalName(obj),
tName, width, formatter)
except VarAlreadyRegistered:
pass
|
This method is called before first step of simulation.
|
def beforeSim(self, simulator, synthesisedUnit):
"""
This method is called before first step of simulation.
"""
vcd = self.vcdWriter
vcd.date(datetime.now())
vcd.timescale(1)
self.vcdRegisterInterfaces(synthesisedUnit, None)
self.vcdRegisterRemainingSignals(synthesisedUnit)
vcd.enddefinitions()
|
This method is called for every value change of any signal.
|
def logChange(self, nowTime, sig, nextVal):
"""
This method is called for every value change of any signal.
"""
try:
self.vcdWriter.logChange(nowTime, sig, nextVal)
except KeyError:
# not every signal has to be registered
pass
|
Serialize HWProcess instance
|
def HWProcess(cls, proc, ctx):
"""
Serialize HWProcess instance
:param scope: name scope to prevent name collisions
"""
body = proc.statements
childCtx = ctx.withIndent()
statemets = [cls.asHdl(s, childCtx) for s in body]
proc.name = ctx.scope.checkedName(proc.name, proc)
return cls.methodTmpl.render(
indent=getIndent(ctx.indent),
name=proc.name,
statements=statemets
)
|
Trim or extend scope lvl = 1 - > only one scope ( global )
|
def setLevel(self, lvl):
"""
Trim or extend scope
lvl = 1 -> only one scope (global)
"""
while len(self) != lvl:
if len(self) > lvl:
self.pop()
else:
self.append(NameScopeItem(len(self)))
|
: return: how many bits is this slice selecting
|
def _size(self):
"""
:return: how many bits is this slice selecting
"""
assert isinstance(self, Value)
return int(self.val[0]) - int(self.val[1])
|
Walk all interfaces on unit and instantiate agent for every interface.
|
def autoAddAgents(unit):
"""
Walk all interfaces on unit and instantiate agent for every interface.
:return: all monitor/driver functions which should be added to simulation
as processes
"""
proc = []
for intf in unit._interfaces:
if not intf._isExtern:
continue
intf._initSimAgent()
assert intf._ag is not None, intf
agents = [intf._ag, ]
if intf._direction == INTF_DIRECTION.MASTER:
agProcs = list(map(lambda a: a.getMonitors(), agents))
elif intf._direction == INTF_DIRECTION.SLAVE:
agProcs = list(map(lambda a: a.getDrivers(), agents))
else:
raise NotImplementedError("intf._direction %r for %r" % (
intf._direction, intf))
for p in agProcs:
proc.extend(p)
return proc
|
Iterable of values to ints ( nonvalid = None )
|
def valuesToInts(values):
"""
Iterable of values to ints (nonvalid = None)
"""
res = []
append = res.append
for d in values:
if isinstance(d, int):
append(d)
else:
append(valToInt(d))
return res
|
If interface has associated rst ( _n ) return it otherwise try to find rst ( _n ) on parent recursively
|
def _getAssociatedRst(self):
"""
If interface has associated rst(_n) return it otherwise
try to find rst(_n) on parent recursively
"""
a = self._associatedRst
if a is not None:
return a
p = self._parent
assert p is not None
if isinstance(p, UnitBase):
return getRst(p)
else:
return p._getAssociatedRst()
|
If interface has associated clk return it otherwise try to find clk on parent recursively
|
def _getAssociatedClk(self):
"""
If interface has associated clk return it otherwise
try to find clk on parent recursively
"""
a = self._associatedClk
if a is not None:
return a
p = self._parent
assert p is not None
if isinstance(p, UnitBase):
return getClk(p)
else:
return p._getAssociatedClk()
|
: return: list of extra discovered processes
|
def Architecture_var(cls, v, serializerVars, extraTypes,
extraTypes_serialized, ctx, childCtx):
"""
:return: list of extra discovered processes
"""
v.name = ctx.scope.checkedName(v.name, v)
serializedVar = cls.SignalItem(v, childCtx, declaration=True)
serializerVars.append(serializedVar)
|
uniq operation with key selector
|
def distinctBy(iterable, fn):
"""
uniq operation with key selector
"""
s = set()
for i in iterable:
r = fn(i)
if r not in s:
s.add(r)
yield i
|
Get value from iterable where fn ( item ) and check if there is not fn ( other item )
|
def single(iterable, fn):
"""
Get value from iterable where fn(item) and check
if there is not fn(other item)
:raise DuplicitValueExc: when there are multiple items satisfying fn()
:raise NoValueExc: when no value satisfying fn(item) found
"""
found = False
ret = None
for i in iterable:
if fn(i):
if found:
raise DuplicitValueExc(i)
found = True
ret = i
if not found:
raise NoValueExc()
return ret
|
: return: generator of first n items from iterrable
|
def take(iterrable, howMay):
"""
:return: generator of first n items from iterrable
"""
assert howMay >= 0
if not howMay:
return
last = howMay - 1
for i, item in enumerate(iterrable):
yield item
if i == last:
return
|
: return: generator of tuples ( isLastFlag item )
|
def iter_with_last(iterable):
"""
:return: generator of tuples (isLastFlag, item)
"""
# Ensure it's an iterator and get the first field
iterable = iter(iterable)
prev = next(iterable)
for item in iterable:
# Lag by one item so I know I'm not at the end
yield False, prev
prev = item
# Last item
yield True, prev
|
same like itertools. groupby
|
def groupedby(collection, fn):
"""
same like itertools.groupby
:note: This function does not needs initial sorting like itertools.groupby
:attention: Order of pairs is not deterministic.
"""
d = {}
for item in collection:
k = fn(item)
try:
arr = d[k]
except KeyError:
arr = []
d[k] = arr
arr.append(item)
yield from d.items()
|
Flatten nested lists tuples generators and maps
|
def flatten(iterables, level=inf):
"""
Flatten nested lists, tuples, generators and maps
:param level: maximum depth of flattening
"""
if level >= 0 and isinstance(iterables, (list, tuple, GeneratorType,
map, zip)):
level -= 1
for i in iterables:
yield from flatten(i, level=level)
else:
yield iterables
|
Doc on parent class: meth: HdlStatement. _cut_off_drivers_of
|
def _cut_off_drivers_of(self, sig: RtlSignalBase):
"""
Doc on parent class :meth:`HdlStatement._cut_off_drivers_of`
"""
if len(self._outputs) == 1 and sig in self._outputs:
self.parentStm = None
return self
# try to cut off all statements which are drivers of specified signal
# in all branches
child_keep_mask = []
newIfTrue = []
all_cut_off = True
all_cut_off &= self._cut_off_drivers_of_list(
sig, self.ifTrue, child_keep_mask, newIfTrue)
self.ifTrue = list(compress(self.ifTrue, child_keep_mask))
newElifs = []
anyElifHit = False
for cond, stms in self.elIfs:
newCase = []
child_keep_mask.clear()
all_cut_off &= self._cut_off_drivers_of_list(
sig, stms, child_keep_mask, newCase)
_stms = list(compress(stms, child_keep_mask))
stms.clear()
stms.extend(_stms)
if newCase:
anyElifHit = True
newElifs.append((cond, newCase))
newIfFalse = None
if self.ifFalse:
newIfFalse = []
child_keep_mask.clear()
all_cut_off &= self._cut_off_drivers_of_list(
sig, self.ifFalse, child_keep_mask, newIfFalse)
self.ifFalse = list(compress(self.ifFalse, child_keep_mask))
assert not all_cut_off, "everything was cut of but this should be already known at start"
if newIfTrue or newIfFalse or anyElifHit or newIfFalse:
# parts were cut off
# generate new statement for them
cond_sig = self.cond
n = self.__class__(cond_sig, newIfTrue)
for c, stms in newElifs:
assert len(c) == 1
c_sig = c[0]
n.Elif(c_sig, stms)
if newIfFalse is not None:
n.Else(newIfFalse)
if self.parentStm is None:
ctx = n._get_rtl_context()
ctx.statements.add(n)
# update io of this
self._inputs.clear()
self._inputs.append(cond_sig)
for c, _ in self.elIfs:
self._inputs.extend(c)
self._inputs.append(cond_sig)
self._outputs.clear()
out_add = self._outputs.append
in_add = self._inputs.append
for stm in self._iter_stms():
for inp in stm._inputs:
in_add(inp)
for outp in stm._outputs:
out_add(outp)
if self._sensitivity is not None or self._enclosed_for is not None:
raise NotImplementedError(
"Sensitivity and enclosure has to be cleaned first")
return n
|
Doc on parent class: meth: HdlStatement. _discover_enclosure
|
def _discover_enclosure(self):
"""
Doc on parent class :meth:`HdlStatement._discover_enclosure`
"""
outputs = self._outputs
self._ifTrue_enclosed_for = self._discover_enclosure_for_statements(
self.ifTrue, outputs)
elif_encls = self._elIfs_enclosed_for = []
for _, stms in self.elIfs:
e = self._discover_enclosure_for_statements(
stms, outputs)
elif_encls.append(e)
self._ifFalse_enclosed_for = self._discover_enclosure_for_statements(
self.ifFalse, outputs)
assert self._enclosed_for is None
encl = self._enclosed_for = set()
for s in self._ifTrue_enclosed_for:
enclosed = True
for elif_e in elif_encls:
if s not in elif_e:
enclosed = False
break
if enclosed and s in self._ifFalse_enclosed_for:
encl.add(s)
|
Doc on parent class: meth: HdlStatement. _discover_sensitivity
|
def _discover_sensitivity(self, seen: set) -> None:
"""
Doc on parent class :meth:`HdlStatement._discover_sensitivity`
"""
assert self._sensitivity is None, self
ctx = self._sensitivity = SensitivityCtx()
self._discover_sensitivity_sig(self.cond, seen, ctx)
if ctx.contains_ev_dependency:
return
for stm in self.ifTrue:
stm._discover_sensitivity(seen)
ctx.extend(stm._sensitivity)
# elifs
for cond, stms in self.elIfs:
if ctx.contains_ev_dependency:
break
self._discover_sensitivity_sig(cond, seen, ctx)
if ctx.contains_ev_dependency:
break
for stm in stms:
if ctx.contains_ev_dependency:
break
stm._discover_sensitivity(seen)
ctx.extend(stm._sensitivity)
if not ctx.contains_ev_dependency and self.ifFalse:
# else
for stm in self.ifFalse:
stm._discover_sensitivity(seen)
ctx.extend(stm._sensitivity)
else:
assert not self.ifFalse, "can not negate event"
|
Doc on parent class: meth: HdlStatement. _iter_stms
|
def _iter_stms(self):
"""
Doc on parent class :meth:`HdlStatement._iter_stms`
"""
yield from self.ifTrue
for _, stms in self.elIfs:
yield from stms
if self.ifFalse is not None:
yield from self.ifFalse
|
Doc on parent class: meth: HdlStatement. _try_reduce
|
def _try_reduce(self) -> Tuple[bool, List[HdlStatement]]:
"""
Doc on parent class :meth:`HdlStatement._try_reduce`
"""
# flag if IO of statement has changed
io_change = False
self.ifTrue, rank_decrease, _io_change = self._try_reduce_list(
self.ifTrue)
self.rank -= rank_decrease
io_change |= _io_change
new_elifs = []
for cond, statements in self.elIfs:
_statements, rank_decrease, _io_change = self._try_reduce_list(
statements)
self.rank -= rank_decrease
io_change |= _io_change
new_elifs.append((cond, _statements))
if self.ifFalse is not None:
self.ifFalse, rank_decrease, _io_update_required = self._try_reduce_list(
self.ifFalse)
self.rank -= rank_decrease
io_change |= _io_change
reduce_self = not self.condHasEffect(
self.ifTrue, self.ifFalse, self.elIfs)
if reduce_self:
res = self.ifTrue
else:
res = [self, ]
self._on_reduce(reduce_self, io_change, res)
# try merge nested ifs as elifs
if self.ifFalse is not None and len(self.ifFalse) == 1:
child = self.ifFalse[0]
if isinstance(child, IfContainer):
self._merge_nested_if_from_else(child)
return res, io_change
|
Merge nested IfContarner form else branch to this IfContainer as elif and else branches
|
def _merge_nested_if_from_else(self, ifStm: "IfContainer"):
"""
Merge nested IfContarner form else branch to this IfContainer
as elif and else branches
"""
self.elIfs.append((ifStm.cond, ifStm.ifTrue))
self.elIfs.extend(ifStm.elIfs)
self.ifFalse = ifStm.ifFalse
|
: attention: statements has to be mergable ( to check use _is_mergable method )
|
def _merge_with_other_stm(self, other: "IfContainer") -> None:
"""
:attention: statements has to be mergable (to check use _is_mergable method)
"""
merge = self._merge_statement_lists
self.ifTrue = merge(self.ifTrue, other.ifTrue)
new_elifs = []
for ((c, elifA), (_, elifB)) in zip(self.elIfs, other.elIfs):
new_elifs.append((c, merge(elifA, elifB)))
self.elIfs = new_elifs
self.ifFalse = merge(self.ifFalse, other.ifFalse)
other.ifTrue = []
other.elIfs = []
other.ifFalse = None
self._on_merge(other)
|
: return: True if other has same meaning as this statement
|
def isSame(self, other: HdlStatement) -> bool:
"""
:return: True if other has same meaning as this statement
"""
if self is other:
return True
if self.rank != other.rank:
return False
if isinstance(other, IfContainer):
if self.cond is other.cond:
if len(self.ifTrue) == len(other.ifTrue) \
and len(self.ifFalse) == len(other.ifFalse) \
and len(self.elIfs) == len(other.elIfs):
if not isSameStatementList(self.ifTrue,
other.ifTrue) \
or not isSameStatementList(self.ifFalse,
other.ifFalse):
return False
for (ac, astms), (bc, bstms) in zip(self.elIfs,
other.elIfs):
if not (ac == bc) or\
not isSameStatementList(astms, bstms):
return False
return True
return False
|
If signal is not driving anything remove it
|
def removeUnconnectedSignals(netlist):
"""
If signal is not driving anything remove it
"""
toDelete = set()
toSearch = netlist.signals
while toSearch:
_toSearch = set()
for sig in toSearch:
if not sig.endpoints:
try:
if sig._interface is not None:
# skip interfaces before we want to check them,
# they should not be optimized out from design
continue
except AttributeError:
pass
for e in sig.drivers:
# drivers of this signal are useless rm them
if isinstance(e, Operator):
inputs = e.operands
if e.result is sig:
e.result = None
else:
inputs = e._inputs
netlist.statements.discard(e)
for op in inputs:
if not isinstance(op, Value):
try:
op.endpoints.remove(e)
except KeyError:
# this operator has 2x+ same operand
continue
_toSearch.add(op)
toDelete.add(sig)
if toDelete:
for sig in toDelete:
if sig.ctx == netlist:
netlist.signals.remove(sig)
_toSearch.discard(sig)
toDelete = set()
toSearch = _toSearch
|
check if process is just unconditional assignments and it is useless to merge them
|
def checkIfIsTooSimple(proc):
"""check if process is just unconditional assignments
and it is useless to merge them"""
try:
a, = proc.statements
if isinstance(a, Assignment):
return True
except ValueError:
pass
return False
|
Try merge procB into procA
|
def tryToMerge(procA: HWProcess, procB: HWProcess):
"""
Try merge procB into procA
:raise IncompatibleStructure: if merge is not possible
:attention: procA is now result if merge has succeed
:return: procA which is now result of merge
"""
if (checkIfIsTooSimple(procA) or
checkIfIsTooSimple(procB) or
areSetsIntersets(procA.outputs, procB.sensitivityList) or
areSetsIntersets(procB.outputs, procA.sensitivityList) or
not HdlStatement._is_mergable_statement_list(procA.statements, procB.statements)):
raise IncompatibleStructure()
procA.statements = HdlStatement._merge_statement_lists(
procA.statements, procB.statements)
procA.outputs.extend(procB.outputs)
procA.inputs.extend(procB.inputs)
procA.sensitivityList.extend(procB.sensitivityList)
return procA
|
Try to merge processes as much is possible
|
def reduceProcesses(processes):
"""
Try to merge processes as much is possible
:param processes: list of processes instances
"""
# sort to make order of merging same deterministic
processes.sort(key=lambda x: (x.name, maxStmId(x)), reverse=True)
# now try to reduce processes with nearly same structure of statements into one
# to minimize number of processes
for _, procs in groupedby(processes, lambda p: p.rank):
for iA, pA in enumerate(procs):
if pA is None:
continue
for iB, pB in enumerate(islice(procs, iA + 1, None)):
if pB is None:
continue
try:
pA = tryToMerge(pA, pB)
except IncompatibleStructure:
continue
procs[iA + 1 + iB] = None
# procs[iA] = pA
for p in procs:
if p is not None:
yield p
|
on writeReqRecieved in monitor mode
|
def onWriteReq(self, sim, addr, data):
"""
on writeReqRecieved in monitor mode
"""
self.requests.append((WRITE, addr, data))
|
Convert object to HDL string
|
def asHdl(cls, obj, ctx: HwtSerializerCtx):
"""
Convert object to HDL string
:param obj: object to serialize
:param ctx: HwtSerializerCtx instance
"""
if isinstance(obj, RtlSignalBase):
return cls.SignalItem(obj, ctx)
elif isinstance(obj, Value):
return cls.Value(obj, ctx)
else:
try:
serFn = obj.asHwt
except AttributeError:
serFn = None
if serFn is not None:
return serFn(cls, ctx)
try:
serFn = getattr(cls, obj.__class__.__name__)
except AttributeError:
serFn = None
if serFn is not None:
return serFn(obj, ctx)
raise SerializerException("Not implemented for %r" % (obj))
|
Entity is just forward declaration of Architecture it is not used in most HDL languages as there is no recursion in hierarchy
|
def Entity(cls, ent: Entity, ctx: HwtSerializerCtx):
"""
Entity is just forward declaration of Architecture, it is not used
in most HDL languages as there is no recursion in hierarchy
"""
cls.Entity_prepare(ent, ctx, serialize=False)
ent.name = ctx.scope.checkedName(ent.name, ent, isGlobal=True)
ports = list(
map(lambda p: (p.name, cls.HdlType(p._dtype, ctx)),
ent.ports))
return unitHeadTmpl.render(
name=ent.name,
ports=ports,
)
|
Convert unit to RTL using specified serializer
|
def toRtl(unitOrCls: Unit, name: str=None,
serializer: GenericSerializer=VhdlSerializer,
targetPlatform=DummyPlatform(), saveTo: str=None):
"""
Convert unit to RTL using specified serializer
:param unitOrCls: unit instance or class, which should be converted
:param name: name override of top unit (if is None name is derived
form class name)
:param serializer: serializer which should be used for to RTL conversion
:param targetPlatform: metainformatins about target platform, distributed
on every unit under _targetPlatform attribute
before Unit._impl() is called
:param saveTo: directory where files should be stored
If None RTL is returned as string.
:raturn: if saveTo returns RTL string else returns list of file names
which were created
"""
if not isinstance(unitOrCls, Unit):
u = unitOrCls()
else:
u = unitOrCls
u._loadDeclarations()
if name is not None:
assert isinstance(name, str)
u._name = name
globScope = serializer.getBaseNameScope()
mouduleScopes = {}
# unitCls : unitobj
serializedClasses = {}
# (unitCls, paramsValues) : unitObj
# where paramsValues are dict name:value
serializedConfiguredUnits = {}
doSerialize = True
createFiles = saveTo is not None
if createFiles:
os.makedirs(saveTo, exist_ok=True)
files = UniqList()
else:
codeBuff = []
for obj in u._toRtl(targetPlatform):
doSerialize = serializer.serializationDecision(
obj,
serializedClasses,
serializedConfiguredUnits)
if doSerialize:
if isinstance(obj, Entity):
s = globScope.fork(1)
s.setLevel(2)
ctx = serializer.getBaseContext()
ctx.scope = s
mouduleScopes[obj] = ctx
ctx.currentUnit = obj.origin
sc = serializer.Entity(obj, ctx)
if createFiles:
fName = obj.name + serializer.fileExtension
fileMode = 'w'
elif isinstance(obj, Architecture):
try:
ctx = mouduleScopes[obj.entity]
except KeyError:
raise SerializerException(
"Entity should be serialized"
" before architecture of %s"
% (obj.getEntityName()))
sc = serializer.Architecture(obj, ctx)
if createFiles:
fName = obj.getEntityName() + serializer.fileExtension
fileMode = 'a'
else:
if hasattr(obj, "_hdlSources"):
for fn in obj._hdlSources:
if isinstance(fn, str):
shutil.copy2(fn, saveTo)
files.append(fn)
continue
else:
sc = serializer.asHdl(obj)
if sc:
if createFiles:
fp = os.path.join(saveTo, fName)
files.append(fp)
with open(fp, fileMode) as f:
if fileMode == 'a':
f.write("\n")
f.write(
serializer.formatter(sc)
)
else:
codeBuff.append(sc)
elif not createFiles:
try:
name = '"%s"' % obj.name
except AttributeError:
name = ""
codeBuff.append(serializer.comment(
"Object of class %s, %s was not serialized as specified" % (
obj.__class__.__name__, name)))
if createFiles:
return files
else:
return serializer.formatter(
"\n".join(codeBuff)
)
|
Resolve name for process and mark outputs of statemens as not hidden
|
def name_for_process_and_mark_outputs(statements: List[HdlStatement])\
-> str:
"""
Resolve name for process and mark outputs of statemens as not hidden
"""
out_names = []
for stm in statements:
for sig in stm._outputs:
if not sig.hasGenericName:
out_names.append(sig.name)
if out_names:
return min(out_names)
else:
return ""
|
Cut off drivers from statements
|
def cut_off_drivers_of(dstSignal, statements):
"""
Cut off drivers from statements
"""
separated = []
stm_filter = []
for stm in statements:
stm._clean_signal_meta()
d = stm._cut_off_drivers_of(dstSignal)
if d is not None:
separated.append(d)
f = d is not stm
stm_filter.append(f)
return list(compress(statements, stm_filter)), separated
|
Pack statements into HWProcess instances * for each out signal resolve it s drivers and collect them * split statements if there is and combinational loop * merge statements if it is possible * resolve sensitivitilists * wrap into HWProcess instance * for every IO of process generate name if signal has not any
|
def statements_to_HWProcesses(statements: List[HdlStatement])\
-> Generator[HWProcess, None, None]:
"""
Pack statements into HWProcess instances,
* for each out signal resolve it's drivers and collect them
* split statements if there is and combinational loop
* merge statements if it is possible
* resolve sensitivitilists
* wrap into HWProcess instance
* for every IO of process generate name if signal has not any
"""
# create copy because this set will be reduced
statements = copy(statements)
# process ranks = how many assignments is probably in process
# used to minimize number of merge tries
processes = []
while statements:
stm = statements.pop()
proc_statements = [stm, ]
ps = _statements_to_HWProcesses(proc_statements, True)
processes.extend(ps)
yield from reduceProcesses(processes)
|
* check if all signals are driven by something * mark signals with hidden = False if they are connecting statements or if they are external interface
|
def markVisibilityOfSignals(ctx, ctxName, signals, interfaceSignals):
"""
* check if all signals are driven by something
* mark signals with hidden = False if they are connecting statements
or if they are external interface
"""
for sig in signals:
driver_cnt = len(sig.drivers)
has_comb_driver = False
if driver_cnt > 1:
sig.hidden = False
for d in sig.drivers:
if not isinstance(d, Operator):
sig.hidden = False
is_comb_driver = False
if isinstance(d, PortItem):
is_comb_driver = True
elif not d._now_is_event_dependent:
for a in walk_assignments(d, sig):
if not a.indexes\
and not a._is_completly_event_dependent:
is_comb_driver = True
break
if has_comb_driver and is_comb_driver:
raise MultipleDriversErr(
"%s: Signal %r has multiple combinational drivers" %
(ctx.getDebugScopeName(), sig))
has_comb_driver |= is_comb_driver
elif driver_cnt == 1:
if not isinstance(sig.drivers[0], Operator):
sig.hidden = False
else:
sig.hidden = False
if sig not in interfaceSignals:
if not sig.defVal._isFullVld():
raise NoDriverErr(
sig, "Signal without any driver or valid value in ", ctxName)
sig._const = True
|
Create new signal in this context
|
def sig(self, name, dtype=BIT, clk=None, syncRst=None, defVal=None):
"""
Create new signal in this context
:param clk: clk signal, if specified signal is synthesized
as SyncSignal
:param syncRst: synchronous reset signal
"""
if isinstance(defVal, RtlSignal):
assert defVal._const, \
"Initial value of register has to be constant"
_defVal = defVal._auto_cast(dtype)
elif isinstance(defVal, Value):
_defVal = defVal._auto_cast(dtype)
elif isinstance(defVal, InterfaceBase):
_defVal = defVal._sig
else:
_defVal = dtype.fromPy(defVal)
if clk is not None:
s = RtlSyncSignal(self, name, dtype, _defVal)
if syncRst is not None and defVal is None:
raise SigLvlConfErr(
"Probably forgotten default value on sync signal %s", name)
if syncRst is not None:
r = If(syncRst._isOn(),
RtlSignal.__call__(s, _defVal)
).Else(
RtlSignal.__call__(s, s.next)
)
else:
r = [RtlSignal.__call__(s, s.next)]
If(clk._onRisingEdge(),
r
)
else:
if syncRst:
raise SigLvlConfErr(
"Signal %s has reset but has no clk" % name)
s = RtlSignal(self, name, dtype, defVal=_defVal)
self.signals.add(s)
return s
|
Build Entity and Architecture instance out of netlist representation
|
def synthesize(self, name, interfaces, targetPlatform):
"""
Build Entity and Architecture instance out of netlist representation
"""
ent = Entity(name)
ent._name = name + "_inst" # instance name
# create generics
for _, v in self.params.items():
ent.generics.append(v)
# interface set for faster lookup
if isinstance(interfaces, set):
intfSet = interfaces
else:
intfSet = set(interfaces)
# create ports
for s in interfaces:
pi = portItemfromSignal(s, ent)
pi.registerInternSig(s)
ent.ports.append(pi)
s.hidden = False
removeUnconnectedSignals(self)
markVisibilityOfSignals(self, name, self.signals, intfSet)
for proc in targetPlatform.beforeHdlArchGeneration:
proc(self)
arch = Architecture(ent)
for p in statements_to_HWProcesses(self.statements):
arch.processes.append(p)
# add signals, variables etc. in architecture
for s in self.signals:
if s not in intfSet and not s.hidden:
arch.variables.append(s)
# instantiate subUnits in architecture
for u in self.subUnits:
arch.componentInstances.append(u)
# add components in architecture
for su in distinctBy(self.subUnits, lambda x: x.name):
arch.components.append(su)
self.synthesised = True
return [ent, arch]
|
Convert python or hdl value/ signal object to hdl value/ signal object
|
def toHVal(op: Any, suggestedType: Optional[HdlType]=None):
"""Convert python or hdl value/signal object to hdl value/signal object"""
if isinstance(op, Value) or isinstance(op, SignalItem):
return op
elif isinstance(op, InterfaceBase):
return op._sig
else:
if isinstance(op, int):
if suggestedType is not None:
return suggestedType.fromPy(op)
if op >= 1 << 31:
raise TypeError(
"Number %d is too big to fit in 32 bit integer of HDL"
" use Bits type instead" % op)
elif op < -(1 << 31):
raise TypeError(
"Number %d is too small to fit in 32 bit integer"
" of HDL use Bits type instead" % op)
try:
hType = defaultPyConversions[type(op)]
except KeyError:
hType = None
if hType is None:
raise TypeError("Unknown hardware type for %s" % (op.__class__))
return hType.fromPy(op)
|
: param dst: is signal connected with value: param val: value object can be instance of Signal or Value
|
def Value(cls, val, ctx: SerializerCtx):
"""
:param dst: is signal connected with value
:param val: value object, can be instance of Signal or Value
"""
t = val._dtype
if isinstance(val, RtlSignalBase):
return cls.SignalItem(val, ctx)
c = cls.Value_try_extract_as_const(val, ctx)
if c:
return c
if isinstance(t, Slice):
return cls.Slice_valAsHdl(t, val, ctx)
elif isinstance(t, HArray):
return cls.HArrayValAsHdl(t, val, ctx)
elif isinstance(t, Bits):
return cls.Bits_valAsHdl(t, val, ctx)
elif isinstance(t, HBool):
return cls.Bool_valAsHdl(t, val, ctx)
elif isinstance(t, HEnum):
return cls.HEnumValAsHdl(t, val, ctx)
elif isinstance(t, Integer):
return cls.Integer_valAsHdl(t, val, ctx)
elif isinstance(t, String):
return cls.String_valAsHdl(t, val, ctx)
else:
raise SerializerException(
"can not resolve value serialization for %r"
% (val))
|
Get maximum _instId from all assigments in statement
|
def getMaxStmIdForStm(stm):
"""
Get maximum _instId from all assigments in statement
"""
maxId = 0
if isinstance(stm, Assignment):
return stm._instId
elif isinstance(stm, WaitStm):
return maxId
else:
for _stm in stm._iter_stms():
maxId = max(maxId, getMaxStmIdForStm(_stm))
return maxId
|
get max statement id used for sorting of processes in architecture
|
def maxStmId(proc):
"""
get max statement id,
used for sorting of processes in architecture
"""
maxId = 0
for stm in proc.statements:
maxId = max(maxId, getMaxStmIdForStm(stm))
return maxId
|
Collect data from interface
|
def monitor(self, sim):
"""Collect data from interface"""
if self.notReset(sim) and self._enabled:
self.wrRd(sim.write, 1)
yield sim.waitOnCombUpdate()
d = self.doRead(sim)
self.data.append(d)
else:
self.wrRd(sim.write, 0)
|
write data to interface
|
def doWrite(self, sim, data):
"""write data to interface"""
sim.write(data, self.intf.data)
|
Push data to interface
|
def driver(self, sim):
"""Push data to interface"""
r = sim.read
if self.actualData is NOP and self.data:
self.actualData = self.data.popleft()
do = self.actualData is not NOP
if do:
self.doWrite(sim, self.actualData)
else:
self.doWrite(sim, None)
en = self.notReset(sim) and self._enabled
if not (en and do):
return
yield sim.waitOnCombUpdate()
rd = self.isRd(r)
if en:
assert rd.vldMask, (
("%r: ready signal for interface %r is in invalid state,"
" this would cause desynchronization") %
(sim.now, self.intf))
if rd.val:
if self._debugOutput is not None:
self._debugOutput.write("%s, wrote, %d: %r\n" % (
self.intf._getFullName(),
sim.now, self.actualData))
if self.data:
self.actualData = self.data.popleft()
else:
self.actualData = NOP
|
: param val: value of python type int or None: param typeObj: instance of Integer: param vldMask: None vldMask is resolved from val if is 0 value is invalidated if is 1 value has to be valid
|
def fromPy(cls, val, typeObj, vldMask=None):
"""
:param val: value of python type int or None
:param typeObj: instance of Integer
:param vldMask: None vldMask is resolved from val,
if is 0 value is invalidated
if is 1 value has to be valid
"""
assert isinstance(typeObj, Integer)
vld = int(val is not None)
if not vld:
assert vldMask is None or vldMask == 0
val = 0
else:
if vldMask == 0:
val = False
vld = 0
else:
val = int(val)
return cls(val, typeObj, vld)
|
Note that this interface will be master
|
def _m(self):
"""
Note that this interface will be master
:return: self
"""
assert not hasattr(self, "_interfaces") or not self._interfaces, \
"Too late to change direction of interface"
self._direction = DIRECTION.asIntfDirection(DIRECTION.opposite(self._masterDir))
return self
|
load declaratoins from _declr method This function is called first for parent and then for children
|
def _loadDeclarations(self):
"""
load declaratoins from _declr method
This function is called first for parent and then for children
"""
if not hasattr(self, "_interfaces"):
self._interfaces = []
self._setAttrListener = self._declrCollector
self._declr()
self._setAttrListener = None
for i in self._interfaces:
i._isExtern = self._isExtern
i._loadDeclarations()
for p in self._params:
p.setReadOnly()
if self._isExtern:
# direction from inside of unit (reverset compared to outside direction)
if self._direction == INTF_DIRECTION.UNKNOWN:
self._direction = INTF_DIRECTION.MASTER
self._setDirectionsLikeIn(self._direction)
|
Remove all signals from this interface ( used after unit is synthesized and its parent is connecting its interface to this unit )
|
def _clean(self, rmConnetions=True, lockNonExternal=True):
"""
Remove all signals from this interface (used after unit is synthesized
and its parent is connecting its interface to this unit)
"""
if self._interfaces:
for i in self._interfaces:
i._clean(rmConnetions=rmConnetions,
lockNonExternal=lockNonExternal)
else:
self._sigInside = self._sig
del self._sig
if lockNonExternal and not self._isExtern:
self._isAccessible = False
|
generate _sig for each interface which has no subinterface if already has _sig return it instead
|
def _signalsForInterface(self, context, prefix='', typeTransform=None):
"""
generate _sig for each interface which has no subinterface
if already has _sig return it instead
:param context: instance of RtlNetlist where signals should be created
:param prefix: name prefix for created signals
:param typeTransform: optional function (type) returns modified type
for signal
"""
sigs = []
if self._interfaces:
for intf in self._interfaces:
sigs.extend(
intf._signalsForInterface(context, prefix,
typeTransform=typeTransform))
else:
if hasattr(self, '_sig'):
sigs = [self._sig]
else:
t = self._dtype
if typeTransform is not None:
t = typeTransform(t)
s = context.sig(prefix + self._getPhysicalName(), t)
s._interface = self
self._sig = s
if hasattr(self, '_boundedEntityPort'):
self._boundedEntityPort.connectSig(self._sig)
sigs = [s]
return sigs
|
Get name in HDL
|
def _getPhysicalName(self):
"""Get name in HDL """
if hasattr(self, "_boundedEntityPort"):
return self._boundedEntityPort.name
else:
return self._getFullName().replace('.', self._NAME_SEPARATOR)
|
Replace parameter on this interface ( in configuration stage )
|
def _replaceParam(self, p, newP):
"""
Replace parameter on this interface (in configuration stage)
:ivar pName: actual name of param on me
:ivar newP: new Param instance by which should be old replaced
"""
i = self._params.index(p)
pName = p._scopes[self][1]
assert i > -1
self._params[i] = newP
del p._scopes[self] # remove reference from old param
newP._registerScope(pName, self)
object.__setattr__(self, pName, newP)
|
: note: doc in: func: ~hwt. synthesizer. interfaceLevel. propDeclCollector. _updateParamsFrom
|
def _updateParamsFrom(self, otherObj, updater=_default_param_updater,
exclude=None, prefix=""):
"""
:note: doc in :func:`~hwt.synthesizer.interfaceLevel.propDeclCollector._updateParamsFrom`
"""
PropDeclrCollector._updateParamsFrom(self, otherObj, updater, exclude, prefix)
|
Sum of all width of interfaces in this interface
|
def _bit_length(self):
"""Sum of all width of interfaces in this interface"""
try:
interfaces = self._interfaces
except AttributeError:
interfaces = None
if interfaces is None:
# not loaded interface
_intf = self._clone()
_intf._loadDeclarations()
interfaces = _intf._interfaces
if interfaces:
w = 0
for i in interfaces:
w += i._bit_length()
return w
else:
return self._dtype.bit_length()
|
connect to another interface interface ( on rtl level ) works like self < = master in VHDL
|
def _connectTo(self, master, exclude=None, fit=False):
"""
connect to another interface interface (on rtl level)
works like self <= master in VHDL
"""
return list(self._connectToIter(master, exclude, fit))
|
get sensitivity type for operator
|
def sensitivityByOp(op):
"""
get sensitivity type for operator
"""
if op == AllOps.RISING_EDGE:
return SENSITIVITY.RISING
elif op == AllOps.FALLING_EDGE:
return SENSITIVITY.FALLING
else:
raise TypeError()
|
Load all operands and process them by self. _evalFn
|
def eval(self, operator, simulator=None):
"""Load all operands and process them by self._evalFn"""
def getVal(v):
while not isinstance(v, Value):
v = v._val
return v
operands = list(map(getVal, operator.operands))
if isEventDependentOp(operator.operator):
operands.append(simulator.now)
elif operator.operator == AllOps.IntToBits:
operands.append(operator.result._dtype)
return self._evalFn(*operands)
|
Cast signed - unsigned to int or bool
|
def convertBits(self, sigOrVal, toType):
"""
Cast signed-unsigned, to int or bool
"""
if isinstance(sigOrVal, Value):
return convertBits__val(self, sigOrVal, toType)
elif isinstance(toType, HBool):
if self.bit_length() == 1:
v = 0 if sigOrVal._dtype.negated else 1
return sigOrVal._eq(self.getValueCls().fromPy(v, self))
elif isinstance(toType, Bits):
if self.bit_length() == toType.bit_length():
return sigOrVal._convSign(toType.signed)
elif toType == INT:
return Operator.withRes(AllOps.BitsToInt, [sigOrVal], toType)
return default_auto_cast_fn(self, sigOrVal, toType)
|
Reinterpret signal of type Bits to signal of type HStruct
|
def reinterpret_bits_to_hstruct(sigOrVal, hStructT):
"""
Reinterpret signal of type Bits to signal of type HStruct
"""
container = hStructT.fromPy(None)
offset = 0
for f in hStructT.fields:
t = f.dtype
width = t.bit_length()
if f.name is not None:
s = sigOrVal[(width + offset):offset]
s = s._reinterpret_cast(t)
setattr(container, f.name, s)
offset += width
return container
|
Cast object of same bit size between to other type ( f. e. bits to struct union or array )
|
def reinterpretBits(self, sigOrVal, toType):
"""
Cast object of same bit size between to other type
(f.e. bits to struct, union or array)
"""
if isinstance(sigOrVal, Value):
return reinterpretBits__val(self, sigOrVal, toType)
elif isinstance(toType, Bits):
return fitTo_t(sigOrVal, toType)
elif sigOrVal._dtype.bit_length() == toType.bit_length():
if isinstance(toType, HStruct):
raise reinterpret_bits_to_hstruct(sigOrVal, toType)
elif isinstance(toType, HUnion):
raise NotImplementedError()
elif isinstance(toType, HArray):
reinterpret_bits_to_harray(sigOrVal, toType)
return default_auto_cast_fn(self, sigOrVal, toType)
|
Sort items from iterators ( generators ) by alwas selecting item with lowest value ( min first )
|
def iterSort(iterators, cmpFn):
"""
Sort items from iterators(generators) by alwas selecting item
with lowest value (min first)
:return: generator of tuples (origin index, item) where origin index
is index of iterator in "iterators" from where item commes from
"""
actual = []
_iterators = []
for i, it in enumerate(iterators):
try:
a = next(it)
_iterators.append((i, it))
actual.append(a)
except StopIteration:
continue
while True:
if not _iterators:
return
elif len(_iterators) == 1:
originIndex, it = _iterators[0]
yield originIndex, actual[0]
for item in it:
yield originIndex, item
return
# select minimum and iterator from where it comes from
minimum = None
minimumIndex = None
secondMin = None
for i, val in enumerate(actual):
skipSecMinCheck = False
if minimum is None:
minimum = val
minimumIndex = i
elif cmpFn(val, minimum):
secondMin = minimum
minimum = val
minimumIndex = i
skipSecMinCheck = True
elif not skipSecMinCheck and (
secondMin is None or cmpFn(val, secondMin)):
secondMin = val
actualI, actualIt = _iterators[minimumIndex]
while not cmpFn(secondMin, minimum):
yield (actualI, minimum)
try:
minimum = next(actualIt)
except StopIteration:
minimum = None
break
# consume from actual iterator while
if minimum is None:
del _iterators[minimumIndex]
del actual[minimumIndex]
else:
# minimum is not minimum anymore
actual[minimumIndex] = minimum
|
: param splitsOnWord: list of lists of parts ( fields splited on word boundaries ): return: generators of ChoicesOfFrameParts for each word which are not crossing word boundaries
|
def groupIntoChoices(splitsOnWord, wordWidth: int, origin: OneOfTransaction):
"""
:param splitsOnWord: list of lists of parts (fields splited on word
boundaries)
:return: generators of ChoicesOfFrameParts for each word
which are not crossing word boundaries
"""
def cmpWordIndex(a, b):
return a.startOfPart // wordWidth < b.startOfPart // wordWidth
actual = None
itCnt = len(splitsOnWord)
for i, item in iterSort(splitsOnWord, cmpWordIndex):
_actualW = item.startOfPart // wordWidth
if actual is None:
# first pass
actual = ChoicesOfFrameParts(item.startOfPart, origin)
actual.extend(
ChoiceOfFrameParts(actual,
origin.possibleTransactions[_i])
for _i in range(itCnt))
actualW = _actualW
elif _actualW > actualW:
actual.resolveEnd()
yield actual
actual = ChoicesOfFrameParts(item.startOfPart, origin)
actual.extend(
ChoiceOfFrameParts(actual,
origin.possibleTransactions[_i])
for _i in range(itCnt))
actualW = _actualW
actual[i].append(item)
if actual is not None:
actual.setIsLast(True)
actual.resolveEnd()
yield actual
|
Count of complete words between two addresses
|
def fullWordCnt(self, start: int, end: int):
"""Count of complete words between two addresses
"""
assert end >= start, (start, end)
gap = max(0, (end - start) - (start % self.wordWidth))
return gap // self.wordWidth
|
Group transaction parts splited on words to words
|
def groupByWordIndex(self, transaction: 'TransTmpl', offset: int):
"""
Group transaction parts splited on words to words
:param transaction: TransTmpl instance which parts
should be grupped into words
:return: generator of tuples (wordIndex, list of transaction parts
in this word)
"""
actualW = None
partsInWord = []
wordWidth = self.wordWidth
for item in self.splitOnWords(transaction, offset):
_actualW = item.startOfPart // wordWidth
if actualW is None:
actualW = _actualW
partsInWord.append(item)
elif _actualW > actualW:
yield (actualW, partsInWord)
actualW = _actualW
partsInWord = [item, ]
else:
partsInWord.append(item)
if partsInWord:
yield (actualW, partsInWord)
|
: return: generator of TransPart instance
|
def splitOnWords(self, transaction, addrOffset=0):
"""
:return: generator of TransPart instance
"""
wordWidth = self.wordWidth
end = addrOffset
for tmp in transaction.walkFlatten(offset=addrOffset):
if isinstance(tmp, OneOfTransaction):
split = [self.splitOnWords(ch, end)
for ch in tmp.possibleTransactions]
yield from groupIntoChoices(split, wordWidth, tmp)
end = addrOffset + tmp.possibleTransactions[0].bitAddrEnd
elif isinstance(tmp, StreamTransaction):
ch_len = tmp.child.bit_length()
if end % self.wordWidth != 0 or ch_len != self.wordWidth:
# assert start, end is aligned
raise NotImplementedError(tmp)
else:
s = StreamOfFramePars(end, tmp)
s.extend(self.splitOnWords(tmp.child, end))
s.setIsLast(True)
s.resolveEnd()
yield s
end = addrOffset + tmp.child.bitAddrEnd
else:
(base, end), tmpl = tmp
startOfPart = base
while startOfPart != end:
wordIndex = startOfPart // wordWidth
endOfWord = (wordIndex + 1) * wordWidth
endOfPart = min(endOfWord, end)
inFieldOffset = startOfPart - base
yield TransPart(self, tmpl, startOfPart, endOfPart,
inFieldOffset)
startOfPart = endOfPart
|
: param val: value of python type bool or None: param typeObj: instance of HdlType: param vldMask: None vldMask is resolved from val if is 0 value is invalidated if is 1 value has to be valid
|
def fromPy(cls, val, typeObj, vldMask=None):
"""
:param val: value of python type bool or None
:param typeObj: instance of HdlType
:param vldMask: None vldMask is resolved from val,
if is 0 value is invalidated
if is 1 value has to be valid
"""
vld = int(val is not None)
if not vld:
assert vldMask is None or vldMask == 0
val = False
else:
if vldMask == 0:
val = False
vld = 0
else:
val = bool(val)
return cls(val, typeObj, vld)
|
Pretty print interface
|
def pprintInterface(intf, prefix="", indent=0, file=sys.stdout):
"""
Pretty print interface
"""
try:
s = intf._sig
except AttributeError:
s = ""
if s is not "":
s = " " + repr(s)
file.write("".join([getIndent(indent), prefix, repr(intf._getFullName()),
s]))
file.write("\n")
if isinstance(intf, HObjList):
for i, p in enumerate(intf):
# interfaces have already name of this array and index in it's name
pprintInterface(p, prefix=prefix, indent=indent + 1, file=file)
else:
for i in intf._interfaces:
pprintInterface(i, indent=indent + 1, file=file)
|
Convert transaction template into FrameTmpls
|
def framesFromTransTmpl(transaction: 'TransTmpl',
wordWidth: int,
maxFrameLen: Union[int, float]=inf,
maxPaddingWords: Union[int, float]=inf,
trimPaddingWordsOnStart: bool=False,
trimPaddingWordsOnEnd: bool=False) -> Generator[
'FrameTmpl', None, None]:
"""
Convert transaction template into FrameTmpls
:param transaction: transaction template used which are FrameTmpls
created from
:param wordWidth: width of data signal in target interface
where frames will be used
:param maxFrameLen: maximum length of frame in bits,
if exceeded another frame will be created
:param maxPaddingWords: maximum of continual padding words in frame,
if exceed frame is split and words are cut of
:attention: if maxPaddingWords<inf trimPaddingWordsOnEnd
or trimPaddingWordsOnStart has to be True
to decide where padding should be trimmed
:param trimPaddingWordsOnStart: trim padding from start of frame
at word granularity
:param trimPaddingWordsOnEnd: trim padding from end of frame
at word granularity
"""
isFirstInFrame = True
partsPending = False
startOfThisFrame = 0
assert maxFrameLen > 0
assert maxPaddingWords >= 0
if maxPaddingWords < inf:
assert trimPaddingWordsOnStart or trimPaddingWordsOnEnd, \
"Padding has to be cut off somewhere"
it = TransTmplWordIterator(wordWidth)
lastWordI = 0
endOfThisFrame = maxFrameLen
parts = []
for wordI, word in it.groupByWordIndex(transaction, 0):
if wordI * wordWidth >= endOfThisFrame:
# now in first+ word behind the frame
# cut off padding at end of frame
paddingWords = wordI - lastWordI
if trimPaddingWordsOnEnd and paddingWords > maxPaddingWords:
# cut off padding and align end of frame to word
_endOfThisFrame = (lastWordI + 1) * wordWidth
else:
_endOfThisFrame = wordI * wordWidth
yield FrameTmpl(transaction,
wordWidth,
startOfThisFrame,
_endOfThisFrame,
parts)
# prepare for start of new frame
parts = []
isFirstInFrame = True
partsPending = False
# start on new word
startOfThisFrame = _endOfThisFrame
endOfThisFrame = startOfThisFrame + maxFrameLen
lastWordI = wordI
# check if padding at potential end of frame can be cut off
if (not isFirstInFrame
and trimPaddingWordsOnEnd
and wordI - lastWordI > 1):
# there is too much continual padding,
# cut it out and start new frame
_endOfThisFrame = (lastWordI + 1) * wordWidth
yield FrameTmpl(transaction,
wordWidth,
startOfThisFrame,
_endOfThisFrame,
parts)
# prepare for start of new frame
parts = []
isFirstInFrame = True
partsPending = False
# start on new word
startOfThisFrame = _endOfThisFrame
endOfThisFrame = startOfThisFrame + maxFrameLen
lastWordI = wordI - 1
if isFirstInFrame:
partsPending = True
isFirstInFrame = False
# cut off padding at start of frame
paddingWords = wordI - lastWordI
if trimPaddingWordsOnStart and paddingWords > maxPaddingWords:
startOfThisFrame += paddingWords * wordWidth
endOfThisFrame = startOfThisFrame + maxFrameLen
# resolve end of this part
parts.extend(word)
lastWordI = wordI
# reminder in "parts" after last iteration
endOfThisFrame = transaction.bitAddrEnd
withPadding = not (trimPaddingWordsOnEnd or trimPaddingWordsOnStart)
if partsPending or (withPadding
and endOfThisFrame != startOfThisFrame):
# cut off padding at end of frame
endOfLastWord = (lastWordI + 1) * wordWidth
if endOfThisFrame < endOfLastWord:
endOfThisFrame = endOfLastWord
else:
paddingWords = it.fullWordCnt(endOfLastWord, endOfThisFrame)
if trimPaddingWordsOnEnd and paddingWords > maxPaddingWords:
endOfThisFrame -= paddingWords * wordWidth
# align end of frame to word
endOfThisFrame = min(startOfThisFrame +
maxFrameLen, endOfThisFrame)
yield FrameTmpl(transaction,
wordWidth,
startOfThisFrame,
endOfThisFrame,
parts)
parts = []
startOfThisFrame = endOfThisFrame
# final padding on the end
while withPadding and startOfThisFrame < transaction.bitAddrEnd:
endOfThisFrame = min(startOfThisFrame +
maxFrameLen, transaction.bitAddrEnd)
yield FrameTmpl(transaction,
wordWidth,
startOfThisFrame,
endOfThisFrame,
[])
startOfThisFrame = endOfThisFrame
|
Walk enumerated words in this frame
|
def walkWords(self, showPadding: bool=False):
"""
Walk enumerated words in this frame
:attention: not all indexes has to be present, only words
with items will be generated when not showPadding
:param showPadding: padding TransParts are also present
:return: generator of tuples (wordIndex, list of TransParts
in this word)
"""
wIndex = 0
lastEnd = self.startBitAddr
parts = []
for p in self.parts:
end = p.startOfPart
if showPadding and end != lastEnd:
# insert padding
while end != lastEnd:
assert end >= lastEnd, (end, lastEnd)
endOfWord = ceil(
(lastEnd + 1) / self.wordWidth) * self.wordWidth
endOfPadding = min(endOfWord, end)
_p = TransPart(self, None, lastEnd, endOfPadding, 0)
parts.append(_p)
if endOfPadding >= endOfWord:
yield (wIndex, parts)
wIndex += 1
parts = []
lastEnd = endOfPadding
if self._wordIndx(lastEnd) != self._wordIndx(p.startOfPart):
yield (wIndex, parts)
wIndex += 1
parts = []
lastEnd = p.endOfPart
parts.append(p)
lastEnd = p.endOfPart
if lastEnd % self.wordWidth == 0:
yield (wIndex, parts)
wIndex += 1
parts = []
if showPadding and (parts
or lastEnd != self.endBitAddr
or lastEnd % self.wordWidth != 0):
# align end to end of last word
end = ceil(self.endBitAddr / self.wordWidth) * self.wordWidth
while end != lastEnd:
assert end >= lastEnd, (end, lastEnd)
endOfWord = ((lastEnd // self.wordWidth) + 1) * self.wordWidth
endOfPadding = min(endOfWord, end)
_p = TransPart(self, None, lastEnd, endOfPadding, 0)
_p.parent = self
parts.append(_p)
if endOfPadding >= endOfWord:
yield (wIndex, parts)
wIndex += 1
parts = []
lastEnd = endOfPadding
if parts:
# in the case end of frame is not aligned to end of word
yield (wIndex, parts)
|
Construct dictionary { StructField: value } for faster lookup of values for fields
|
def fieldToDataDict(dtype, data, res):
"""
Construct dictionary {StructField:value} for faster lookup of values
for fields
"""
# assert data is None or isinstance(data, dict)
for f in dtype.fields:
try:
fVal = data[f.name]
except KeyError:
fVal = None
if isinstance(f.dtype, Bits):
if fVal is not None:
assert isinstance(fVal, int)
res[f] = fVal
elif isinstance(f.dtype, HStruct):
if fVal:
FrameTmpl.fieldToDataDict(f.dtype, fVal, res)
elif isinstance(f.dtype, HArray):
if fVal:
# assert isinstance(fVal, class_or_tuple)
res[f] = fVal
return res
|
Pack data into list of BitsVal of specified dataWidth
|
def packData(self, data):
"""
Pack data into list of BitsVal of specified dataWidth
:param data: dict of values for struct fields {fieldName: value}
:return: list of BitsVal which are representing values of words
"""
typeOfWord = simBitsT(self.wordWidth, None)
fieldToVal = self._fieldToTPart
if fieldToVal is None:
fieldToVal = self._fieldToTPart = self.fieldToDataDict(
self.origin.dtype,
data,
{})
for _, transParts in self.walkWords(showPadding=True):
actualVldMask = 0
actualVal = 0
for tPart in transParts:
high, low = tPart.getBusWordBitRange()
fhigh, flow = tPart.getFieldBitRange()
if not tPart.isPadding:
val = fieldToVal.get(tPart.tmpl.origin, None)
else:
val = None
if val is None:
newBits = 0
vld = 0
else:
newBits = selectBitRange(val, flow, fhigh - flow)
vld = mask(high - low) << low
actualVal = setBitRange(actualVal, low, high - low, newBits)
actualVldMask = setBitRange(actualVal, low, high - low, vld)
yield typeOfWord.getValueCls()(actualVal, typeOfWord,
actualVldMask, -1)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.