diff --git a/compiler/semtypes.nim b/compiler/semtypes.nim index 3fa4493345..8ddc2196a6 100644 --- a/compiler/semtypes.nim +++ b/compiler/semtypes.nim @@ -17,7 +17,7 @@ const errWrongNumberOfVariables = "wrong number of variables" errInvalidOrderInEnumX = "invalid order in enum '$1'" errOrdinalTypeExpected = "ordinal type expected" - errSetTooBig = "set is too large" + errSetTooBig = "set is too large; use `std/sets` for ordinal types with more than 2^16 elements" errBaseTypeMustBeOrdinal = "base type of a set must be an ordinal" errInheritanceOnlyWithNonFinalObjects = "inheritance only works with non-final objects" errXExpectsOneTypeParam = "'$1' expects one type parameter" diff --git a/doc/sets_fragment.txt b/doc/sets_fragment.txt index bc81897322..3e91c85f7e 100644 --- a/doc/sets_fragment.txt +++ b/doc/sets_fragment.txt @@ -13,9 +13,13 @@ range `0 .. MaxSetElements-1` where `MaxSetElements` is currently always The reason is that sets are implemented as high performance bit vectors. Attempting to declare a set with a larger type will result in an error: - ```nim - var s: set[int64] # Error: set is too large - ``` +```nim + + var s: set[int64] # Error: set is too large; use `std/sets` for ordinal types + # with more than 2^16 elements + +``` + **Note:** Nim also offers `hash sets `_ (which you need to import with `import sets`), which have no such restrictions.