@@ -1976,6 +1976,57 @@ def __call__(cls, shape=None, src_loc_at=0, **kwargs):
1976
1976
return signal
1977
1977
1978
1978
1979
+ # also used for MemoryData.Init
1980
+ def _get_init_value (init , shape , what = "signal" ):
1981
+ orig_init = init
1982
+ orig_shape = shape
1983
+ shape = Shape .cast (shape )
1984
+ if isinstance (orig_shape , ShapeCastable ):
1985
+ try :
1986
+ init = Const .cast (orig_shape .const (init ))
1987
+ except Exception :
1988
+ raise TypeError (f"Initial value must be a constant initializer of { orig_shape !r} " )
1989
+ if init .shape () != Shape .cast (shape ):
1990
+ raise ValueError (f"Constant returned by { orig_shape !r} .const() must have the shape "
1991
+ f"that it casts to, { shape !r} , and not { init .shape ()!r} " )
1992
+ return init .value
1993
+ else :
1994
+ if init is None :
1995
+ init = 0
1996
+ try :
1997
+ init = Const .cast (init )
1998
+ except TypeError :
1999
+ raise TypeError ("Initial value must be a constant-castable expression, not {!r}"
2000
+ .format (orig_init ))
2001
+ # Avoid false positives for all-zeroes and all-ones
2002
+ if orig_init is not None and not (isinstance (orig_init , int ) and orig_init in (0 , - 1 )):
2003
+ if init .shape ().signed and not shape .signed :
2004
+ warnings .warn (
2005
+ message = f"Initial value { orig_init !r} is signed, "
2006
+ f"but the { what } shape is { shape !r} " ,
2007
+ category = SyntaxWarning ,
2008
+ stacklevel = 2 )
2009
+ elif (init .shape ().width > shape .width or
2010
+ init .shape ().width == shape .width and
2011
+ shape .signed and not init .shape ().signed ):
2012
+ warnings .warn (
2013
+ message = f"Initial value { orig_init !r} will be truncated to "
2014
+ f"the { what } shape { shape !r} " ,
2015
+ category = SyntaxWarning ,
2016
+ stacklevel = 2 )
2017
+
2018
+ if isinstance (orig_shape , range ) and orig_init is not None and orig_init not in orig_shape :
2019
+ if orig_init == orig_shape .stop :
2020
+ raise SyntaxError (
2021
+ f"Initial value { orig_init !r} equals the non-inclusive end of the { what } "
2022
+ f"shape { orig_shape !r} ; this is likely an off-by-one error" )
2023
+ else :
2024
+ raise SyntaxError (
2025
+ f"Initial value { orig_init !r} is not within the { what } shape { orig_shape !r} " )
2026
+
2027
+ return Const (init .value , shape ).value
2028
+
2029
+
1979
2030
@final
1980
2031
class Signal (Value , DUID , metaclass = _SignalMeta ):
1981
2032
"""A varying integer value.
@@ -2046,54 +2097,9 @@ def __init__(self, shape=None, *, name=None, init=None, reset=None, reset_less=F
2046
2097
DeprecationWarning , stacklevel = 2 )
2047
2098
init = reset
2048
2099
2049
- orig_init = init
2050
- if isinstance (orig_shape , ShapeCastable ):
2051
- try :
2052
- init = Const .cast (orig_shape .const (init ))
2053
- except Exception :
2054
- raise TypeError ("Initial value must be a constant initializer of {!r}"
2055
- .format (orig_shape ))
2056
- if init .shape () != Shape .cast (orig_shape ):
2057
- raise ValueError ("Constant returned by {!r}.const() must have the shape that "
2058
- "it casts to, {!r}, and not {!r}"
2059
- .format (orig_shape , Shape .cast (orig_shape ),
2060
- init .shape ()))
2061
- else :
2062
- if init is None :
2063
- init = 0
2064
- try :
2065
- init = Const .cast (init )
2066
- except TypeError :
2067
- raise TypeError ("Initial value must be a constant-castable expression, not {!r}"
2068
- .format (orig_init ))
2069
- # Avoid false positives for all-zeroes and all-ones
2070
- if orig_init is not None and not (isinstance (orig_init , int ) and orig_init in (0 , - 1 )):
2071
- if init .shape ().signed and not self ._signed :
2072
- warnings .warn (
2073
- message = "Initial value {!r} is signed, but the signal shape is {!r}"
2074
- .format (orig_init , shape ),
2075
- category = SyntaxWarning ,
2076
- stacklevel = 2 )
2077
- elif (init .shape ().width > self ._width or
2078
- init .shape ().width == self ._width and
2079
- self ._signed and not init .shape ().signed ):
2080
- warnings .warn (
2081
- message = "Initial value {!r} will be truncated to the signal shape {!r}"
2082
- .format (orig_init , shape ),
2083
- category = SyntaxWarning ,
2084
- stacklevel = 2 )
2085
- self ._init = Const (init .value , shape ).value
2100
+ self ._init = _get_init_value (init , unsigned (1 ) if orig_shape is None else orig_shape )
2086
2101
self ._reset_less = bool (reset_less )
2087
2102
2088
- if isinstance (orig_shape , range ) and orig_init is not None and orig_init not in orig_shape :
2089
- if orig_init == orig_shape .stop :
2090
- raise SyntaxError (
2091
- f"Initial value { orig_init !r} equals the non-inclusive end of the signal "
2092
- f"shape { orig_shape !r} ; this is likely an off-by-one error" )
2093
- else :
2094
- raise SyntaxError (
2095
- f"Initial value { orig_init !r} is not within the signal shape { orig_shape !r} " )
2096
-
2097
2103
self ._attrs = OrderedDict (() if attrs is None else attrs )
2098
2104
2099
2105
if isinstance (orig_shape , ShapeCastable ):
0 commit comments