@@ -86,7 +86,7 @@ class DataFile(
8686 The possible mime-types of data files represented by each matching filename pattern in
8787 :py:attr:`Datafile.pattern`.
8888 patterns (list):
89- A list of filename extenion glob patterns that matrches the expected filename patterns for a DataFile
89+ A list of filename extension glob patterns that matrches the expected filename patterns for a DataFile
9090 (*.txt and *.dat")
9191 priority (int):
9292 Used to indicathe order in which subclasses of :py:class:`DataFile` are tried when loading data. A higher
@@ -96,11 +96,11 @@ class DataFile(
9696 shape (tuple of integers):
9797 Returns the shape of the data (rows,columns) - equivalent to self.data.shape.
9898 records (numpy record array):
99- Returns the data in the form of a list of yuples where each tuple maps to the columsn names.
99+ Returns the data in the form of a list of yuples where each tuple maps to the columns names.
100100 clone (DataFile):
101101 Creates a deep copy of the :py:class`DataFile` object.
102102 dict_records (array of dictionaries):
103- View the data as an array or dictionaries where each dictionary represnets one row with keys dervied
103+ View the data as an array or dictionaries where each dictionary represents one row with keys derived
104104 from column headers.
105105 dims (int):
106106 When data columns are set as x,y,z etc. returns the number of dimensions implied in the data set
@@ -257,7 +257,7 @@ def __init__(self, *args, **kargs):
257257 # ============================================================================================
258258
259259 def _init_single (self , * args , ** kargs ):
260- """Handle constructor with 1 arguement - called from __init__."""
260+ """Handle constructor with 1 argument - called from __init__."""
261261 arg = args [0 ]
262262 inits = {
263263 path_types + (bool , bytes , io .IOBase ): self ._init_load ,
@@ -396,7 +396,7 @@ def _init_list(self, arg, **kargs):
396396 raise TypeError (f"Unable to construct DataFile from a { type (arg )} " )
397397
398398 # ============================================================================================
399- ############################ Speical Methods ###############################################
399+ ############################ Special Methods ###############################################
400400 # ============================================================================================
401401
402402 def __call__ (self , * args , ** kargs ):
@@ -448,7 +448,7 @@ def __deepcopy__(self, memo):
448448 return result
449449
450450 def __dir__ (self ):
451- """Reeturns the attributes of the current object.
451+ """Returns the attributes of the current object.
452452
453453 Augmenting the keys of self.__dict__ with the attributes that __getattr__ will handle."""
454454 attr = dir (type (self ))
@@ -638,7 +638,7 @@ def _load(self, filename, *args, **kargs):
638638 Raised if the first row does not start with 'TDI Format 1.5' or 'TDI Format=1.0'.
639639
640640 Note:
641- The *_load* methods shouldbe overidden in each child class to handle the process of loading data from
641+ The *_load* methods shouldbe overridden in each child class to handle the process of loading data from
642642 disc. If they encounter unexpected data, then they should raise StonerLoadError to signal this, so that
643643 the loading class can try a different sub-class instead.
644644 """
@@ -916,7 +916,7 @@ def add_column(self, column_data, header=None, index=None, func_args=None, repla
916916
917917 Returns:
918918 self:
919- The :py:class:`DataFile` instance with the additonal column inserted.
919+ The :py:class:`DataFile` instance with the additional column inserted.
920920
921921 Note:
922922 Like most :py:class:`DataFile` methods, this method operates in-place in that it also modifies
@@ -991,7 +991,7 @@ def add_column(self, column_data, header=None, index=None, func_args=None, repla
991991
992992 # If not replacing, then add extra columns to existing data.
993993 if not replace :
994- colums = copy .copy (self .column_headers )
994+ columns = copy .copy (self .column_headers )
995995 old_setas = self .setas .clone
996996 if index == self .data .shape [1 ]: # appending column
997997 self .data = DataArray (np .append (self .data , np_data , axis = 1 ), setas = self .setas .clone )
@@ -1003,10 +1003,10 @@ def add_column(self, column_data, header=None, index=None, func_args=None, repla
10031003 setas = self .setas .clone ,
10041004 )
10051005 for ix in range (0 , index ):
1006- self .column_headers [ix ] = colums [ix ]
1006+ self .column_headers [ix ] = columns [ix ]
10071007 self .setas [ix ] = old_setas [ix ]
10081008 for ix in range (index , dc ):
1009- self .column_headers [ix + cw ] = colums [ix ]
1009+ self .column_headers [ix + cw ] = columns [ix ]
10101010 self .setas [ix + cw ] = old_setas [ix ]
10111011 # Check that we don't need to expand to overwrite with the new data
10121012 if index + cw > self .shape [1 ]:
@@ -1069,7 +1069,7 @@ def del_column(self, col=None, duplicates=False):
10691069 - If duplicates is True and col is None then all duplicate columns are removed,
10701070 - if col is not None and duplicates is True then all duplicates of the specified column are removed.
10711071 - If duplicates is False and *col* is either None or False then all masked coplumns are deleeted. If
1072- *col* is True, then all columns that are not set i the :py:attr:`setas` attrobute are delted .
1072+ *col* is True, then all columns that are not set i the :py:attr:`setas` attrobute are deleted .
10731073 - If col is a list (duplicates should not be None) then the all the matching columns are found.
10741074 - If col is an iterable of booleans, then all columns whose elements are False are deleted.
10751075 - If col is None and duplicates is None, then all columns with at least one elelemtn masked
@@ -1142,7 +1142,7 @@ def del_nan(self, col=None, clone=False):
11421142 else : # Not cloning so ret is self
11431143 ret = self
11441144
1145- if col is None : # If col is still None, use all columsn that are set to any value in self.setas
1145+ if col is None : # If col is still None, use all columns that are set to any value in self.setas
11461146 col = [ix for ix , col in enumerate (self .setas ) if col != "." ]
11471147 if not isLikeList (col ): # If col isn't a list, make it one now
11481148 col = [col ]
@@ -1163,13 +1163,13 @@ def del_rows(self, col=None, val=None, invert=False):
11631163
11641164 Args:
11651165 col (list,slice,int,string, re, callable or None):
1166- Column containg values to search for.
1166+ Column containing values to search for.
11671167 val (float or callable):
11681168 Specifies rows to delete. Maybe:
11691169 - None - in which case the *col* argument is used to identify rows to be deleted,
11701170 - a float in which case rows whose columncol = val are deleted
11711171 - or a function - in which case rows where the function evaluates to be true are deleted.
1172- - a tuple, in which case rows where column col takes value between the minium and maximum of
1172+ - a tuple, in which case rows where column col takes value between the minimum and maximum of
11731173 the tuple are deleted.
11741174
11751175 Keyword Arguments:
@@ -1247,7 +1247,7 @@ def del_rows(self, col=None, val=None, invert=False):
12471247 return self
12481248
12491249 def dir (self , pattern = None ):
1250- """Return a list of keys in the metadata, filtering wiht a regular expression if necessary.
1250+ """Return a list of keys in the metadata, filtering with a regular expression if necessary.
12511251
12521252 Keyword Arguments:
12531253 pattern (string or re):
@@ -1271,7 +1271,7 @@ def filter(self, func=None, cols=None, reset=True):
12711271
12721272 Args:
12731273 func (callable):
1274- is a callable object that should take a single list as a p[arameter representing one row.
1274+ is a callable object that should take a single list as a p[parameter representing one row.
12751275 cols (list):
12761276 a list of column indices that are used to form the list of values passed to func.
12771277 reset (bool):
@@ -1357,7 +1357,7 @@ def load(cls, *args, **kargs):
13571357
13581358 Each subclass is scanned in turn for a class attribute priority which governs the order in which they
13591359 are tried. Subclasses which can make an early positive determination that a file has the correct format
1360- can have higher priority levels. Classes should return a suitable expcetion if they fail to load the file.
1360+ can have higher priority levels. Classes should return a suitable exception if they fail to load the file.
13611361
13621362 If no class can load a file successfully then a StonerUnrecognisedFormat exception is raised.
13631363 """
@@ -1569,7 +1569,7 @@ def to_pandas(self):
15691569 Notes:
15701570 In addition to transferring the numerical data, the DataFrame's columns are set to
15711571 a multi-level index of the :py:attr:`Stoner.Data.column_headers` and :py:attr:`Stoner.Data.setas`
1572- calues . A pandas DataFrame extension attribute, *metadata* is registered and is used to store
1572+ values . A pandas DataFrame extension attribute, *metadata* is registered and is used to store
15731573 the metada from the :py:class:1Stoner.Data` object. This pandas extension attribute is in fact a trivial
15741574 subclass of the :py:class:`Stoner.core.typeHintedDict`.
15751575
0 commit comments