@@ -158,28 +158,10 @@ class Multiset:
158158
159159 @overload
160160 def __init__ (self , dimensions : int , * , device = None , dtype = None ):
161- # """Creates an empty multiset of dimensions
162-
163- # Args:
164- # dim (int): number of dimensions of the multiset.
165- # Examples::
166-
167- # >>> M = structures.Multiset(dimensions=10000)
168- # """
169161 ...
170162
171163 @overload
172164 def __init__ (self , input : Tensor , * , size = 0 ):
173- # """Creates an empty multiset of dimensions
174-
175- # Args:
176- # input (Tensor): tensor representing a multiset.
177- # Examples::
178-
179- # >>> letters = list(string.ascii_lowercase)
180- # >>> letters_hv = functional.random_hv(len(letters), 10000)
181- # >>> M = structures.Multiset(input = letters_hv[0])
182- # """
183165 ...
184166
185167 def __init__ (self , dim_or_input : Any , ** kwargs ):
@@ -467,15 +449,15 @@ class Sequence:
467449
468450 Args:
469451 input (Tensor): tensor representing a sequence.
470- length (int, optional): the length of the sequence provided as input. Default: ``0``.
452+ size (int, optional): the length of the sequence provided as input. Default: ``0``.
471453
472454 Examples::
473455
474456 >>> S = structures.Sequence(10000)
475457
476458 >>> letters = list(string.ascii_lowercase)
477459 >>> letters_hv = functional.random_hv(len(letters), 10000)
478- >>> S = structures.Sequence(letters_hv[0], length =1)
460+ >>> S = structures.Sequence(letters_hv[0], size =1)
479461
480462 """
481463
@@ -484,11 +466,11 @@ def __init__(self, dimensions: int, *, device=None, dtype=None):
484466 ...
485467
486468 @overload
487- def __init__ (self , input : Tensor , * , length = 0 ):
469+ def __init__ (self , input : Tensor , * , size = 0 ):
488470 ...
489471
490472 def __init__ (self , dim_or_input : int , ** kwargs ):
491- self .length = kwargs .get ("length " , 0 )
473+ self .size = kwargs .get ("size " , 0 )
492474 if torch .is_tensor (dim_or_input ):
493475 self .value = dim_or_input
494476 else :
@@ -511,7 +493,7 @@ def append(self, input: Tensor) -> None:
511493 """
512494 rotated_value = functional .permute (self .value , shifts = 1 )
513495 self .value = functional .bundle (input , rotated_value )
514- self .length += 1
496+ self .size += 1
515497
516498 def appendleft (self , input : Tensor ) -> None :
517499 """Appends the input tensor to the left of the sequence.
@@ -526,7 +508,7 @@ def appendleft(self, input: Tensor) -> None:
526508 """
527509 rotated_input = functional .permute (input , shifts = len (self ))
528510 self .value = functional .bundle (self .value , rotated_input )
529- self .length += 1
511+ self .size += 1
530512
531513 def pop (self , input : Tensor ) -> None :
532514 """Pops the input tensor from the right of the sequence.
@@ -539,7 +521,7 @@ def pop(self, input: Tensor) -> None:
539521 >>> S.pop(letters_hv[0])
540522
541523 """
542- self .length -= 1
524+ self .size -= 1
543525 self .value = functional .bundle (self .value , - input )
544526 self .value = functional .permute (self .value , shifts = - 1 )
545527
@@ -554,7 +536,7 @@ def popleft(self, input: Tensor) -> None:
554536 >>> S.popleft(letters_hv[1])
555537
556538 """
557- self .length -= 1
539+ self .size -= 1
558540 rotated_input = functional .permute (input , shifts = len (self ))
559541 self .value = functional .bundle (self .value , - rotated_input )
560542
@@ -571,10 +553,10 @@ def replace(self, index: int, old: Tensor, new: Tensor) -> None:
571553 >>> S.replace(0, letters_hv[0], letters_hv[1])
572554
573555 """
574- rotated_old = functional .permute (old , shifts = - self .length + index + 1 )
556+ rotated_old = functional .permute (old , shifts = self .size - index - 1 )
575557 self .value = functional .bundle (self .value , - rotated_old )
576558
577- rotated_new = functional .permute (new , shifts = - self .length + index + 1 )
559+ rotated_new = functional .permute (new , shifts = self .size - index - 1 )
578560 self .value = functional .bundle (self .value , rotated_new )
579561
580562 def concat (self , seq : "Sequence" ) -> "Sequence" :
@@ -591,7 +573,7 @@ def concat(self, seq: "Sequence") -> "Sequence":
591573 """
592574 value = functional .permute (self .value , shifts = len (seq ))
593575 value = functional .bundle (value , seq .value )
594- return Sequence (value , length = len (self ) + len (seq ))
576+ return Sequence (value , size = len (self ) + len (seq ))
595577
596578 def __getitem__ (self , index : int ) -> Tensor :
597579 """Gets the approximate value from given index.
@@ -605,7 +587,7 @@ def __getitem__(self, index: int) -> Tensor:
605587 tensor([ 1., -1., 1., ..., -1., 1., -1.])
606588
607589 """
608- return functional .permute (self .value , shifts = - self .length + index + 1 )
590+ return functional .permute (self .value , shifts = - self .size + index + 1 )
609591
610592 def __len__ (self ) -> int :
611593 """Returns the length of the sequence.
@@ -616,7 +598,7 @@ def __len__(self) -> int:
616598 0
617599
618600 """
619- return self .length
601+ return self .size
620602
621603 def clear (self ) -> None :
622604 """Empties the sequence.
@@ -627,7 +609,7 @@ def clear(self) -> None:
627609
628610 """
629611 self .value .fill_ (0.0 )
630- self .length = 0
612+ self .size = 0
631613
632614 @classmethod
633615 def from_tensor (cls , input : Tensor ):
@@ -659,7 +641,7 @@ class DistinctSequence:
659641
660642 Args:
661643 input (Tensor): tensor representing a distinct sequence.
662- length (int, optional): the size of the distinct sequence provided as input. Default: ``0``.
644+ size (int, optional): the length of the distinct sequence provided as input. Default: ``0``.
663645
664646 Examples::
665647
@@ -671,11 +653,11 @@ def __init__(self, dimensions: int, *, device=None, dtype=None):
671653 ...
672654
673655 @overload
674- def __init__ (self , input : Tensor , * , length = 0 ):
656+ def __init__ (self , input : Tensor , * , size = 0 ):
675657 ...
676658
677659 def __init__ (self , dim_or_input : int , ** kwargs ):
678- self .length = kwargs .get ("length " , 0 )
660+ self .size = kwargs .get ("size " , 0 )
679661 if torch .is_tensor (dim_or_input ):
680662 self .value = dim_or_input
681663 else :
@@ -698,7 +680,7 @@ def append(self, input: Tensor) -> None:
698680 """
699681 rotated_value = functional .permute (self .value , shifts = 1 )
700682 self .value = functional .bind (input , rotated_value )
701- self .length += 1
683+ self .size += 1
702684
703685 def appendleft (self , input : Tensor ) -> None :
704686 """Appends the input tensor to the left of the sequence.
@@ -713,7 +695,7 @@ def appendleft(self, input: Tensor) -> None:
713695 """
714696 rotated_input = functional .permute (input , shifts = len (self ))
715697 self .value = functional .bind (self .value , rotated_input )
716- self .length += 1
698+ self .size += 1
717699
718700 def pop (self , input : Tensor ) -> None :
719701 """Pops the input tensor from the right of the sequence.
@@ -726,7 +708,7 @@ def pop(self, input: Tensor) -> None:
726708 >>> DS.pop(letters_hv[0])
727709
728710 """
729- self .length -= 1
711+ self .size -= 1
730712 self .value = functional .bind (self .value , input )
731713 self .value = functional .permute (self .value , shifts = - 1 )
732714
@@ -741,7 +723,7 @@ def popleft(self, input: Tensor) -> None:
741723 >>> DS.popleft(letters_hv[1])
742724
743725 """
744- self .length -= 1
726+ self .size -= 1
745727 rotated_input = functional .permute (input , shifts = len (self ))
746728 self .value = functional .bind (self .value , rotated_input )
747729
@@ -759,22 +741,22 @@ def replace(self, index: int, old: Tensor, new: Tensor) -> None:
759741 >>> DS.concat(DS1)
760742
761743 """
762- rotated_old = functional .permute (old , shifts = - self .length + index + 1 )
744+ rotated_old = functional .permute (old , shifts = self .size - index - 1 )
763745 self .value = functional .bind (self .value , rotated_old )
764746
765- rotated_new = functional .permute (new , shifts = - self .length + index + 1 )
747+ rotated_new = functional .permute (new , shifts = self .size - index - 1 )
766748 self .value = functional .bind (self .value , rotated_new )
767749
768750 def __len__ (self ) -> int :
769- """Returns the size of the sequence.
751+ """Returns the length of the sequence.
770752
771753 Examples::
772754
773755 >>> len(DS)
774756 0
775757
776758 """
777- return self .length
759+ return self .size
778760
779761 def clear (self ) -> None :
780762 """Empties the sequence.
@@ -785,7 +767,7 @@ def clear(self) -> None:
785767
786768 """
787769 self .value .fill_ (0.0 )
788- self .length = 0
770+ self .size = 0
789771
790772 @classmethod
791773 def from_tensor (cls , input : Tensor ):
@@ -812,13 +794,13 @@ class Graph:
812794
813795 Args:
814796 dimensions (int): number of dimensions of the graph.
815- directed (bool): decidies if the graph will be directed or not.
797+ directed (bool): decides if the graph will be directed or not.
816798 dtype (``torch.dtype``, optional): the desired data type of returned tensor. Default: if ``None``, uses a global default (see ``torch.set_default_tensor_type()``).
817799 device (``torch.device``, optional): the desired device of returned tensor. Default: if ``None``, uses the current device for the default tensor type (see torch.set_default_tensor_type()). ``device`` will be the CPU for CPU tensor types and the current CUDA device for CUDA tensor types.
818800
819801 Args:
820802 input (Tensor): tensor representing a graph hypervector.
821- directed (bool): decidies if the graph will be directed or not.
803+ directed (bool): decides if the graph will be directed or not.
822804
823805 Examples::
824806
0 commit comments