Fix typos found by codespell in docs, docstrings, and comments.
where
- * parser is the :class:`OptionParser` instance the group will be insterted in
+ * parser is the :class:`OptionParser` instance the group will be inserted in
to
* title is the group title
* description, optional, is a long description of the group
.. Things will get more complicated for future Linux versions, since
ALSA is in the standard kernel as of 2.5.x. Presumably if you
use ALSA, you'll have to make sure its OSS compatibility layer
- is active to use ossaudiodev, but you're gonna need it for the vast
+ is active to use ossaudiodev, but you're going to need it for the vast
majority of Linux audio apps anyway.
Sounds like things are also complicated for other BSDs. In response
microphone input::
mixer.setrecsrc (1 << ossaudiodev.SOUND_MIXER_MIC)
-
In documentation the archive is also referred to as the "base" and the shared
library object is referred to as the "member".
-For dlopen() on AIX (read initAndLoad()) the calls are similiar.
+For dlopen() on AIX (read initAndLoad()) the calls are similar.
Default activity occurs when no path information is provided. When path
information is provided dlopen() does not search any other directories.
if re.match("[0-9]", line):
info.append(line)
else:
- # blank line (seperator), consume line and end for loop
+ # blank line (separator), consume line and end for loop
break
return info
Using the decoded list of parameters and values, format them according to
the RFC rules, including using RFC2231 encoding if the value cannot be
- expressed in 'encoding' and/or the paramter+value is too long to fit within
- 'maxlen'.
+ expressed in 'encoding' and/or the parameter+value is too long to fit
+ within 'maxlen'.
"""
# Special case for RFC2231 encoding: start from decoded values and use
Changing any of the font vars invokes var_changed_font, which
adds all 3 font options to changes and calls set_samples.
Set_samples applies a new font constructed from the font vars to
- font_sample and to highlight_sample on the hightlight page.
+ font_sample and to highlight_sample on the highlight page.
Tabs: Enable users to change spaces entered for indent tabs.
Changing indent_scale value with the mouse sets Var space_num,
Called on font initialization and change events.
Accesses font_name, font_size, and font_bold Variables.
- Updates font_sample and hightlight page highlight_sample.
+ Updates font_sample and highlight page highlight_sample.
"""
font_name = self.font_name.get()
font_weight = tkFont.BOLD if self.font_bold.get() else tkFont.NORMAL
def_op('BUILD_TUPLE', 102) # Number of tuple items
def_op('BUILD_LIST', 103) # Number of list items
def_op('BUILD_SET', 104) # Number of set items
-def_op('BUILD_MAP', 105) # Number of dict entries (upto 255)
+def_op('BUILD_MAP', 105) # Number of dict entries
name_op('LOAD_ATTR', 106) # Index in name list
def_op('COMPARE_OP', 107) # Comparison operator
hascompare.append(107)
# Make sure comparison doesn't forget microseconds, and isn't done
# via comparing a float timestamp (an IEEE double doesn't have enough
- # precision to span microsecond resolution across years 1 thru 9999,
+ # precision to span microsecond resolution across years 1 through 9999,
# so comparing via timestamp necessarily calls some distinct values
# equal).
dt1 = self.theclass(MAXYEAR, 12, 31, 23, 59, 59, 999998)
Test for the function completedefault():
>>> mycmd.completedefault()
- This is the completedefault methode
+ This is the completedefault method
>>> mycmd.completenames("a")
['add']
print("Hello from postloop")
def completedefault(self, *ignored):
- print("This is the completedefault methode")
+ print("This is the completedefault method")
def complete_command(self):
print("complete command")
queue_management_thread = executor._queue_management_thread
del executor
- # Make sure that all the executor ressources were properly cleaned by
+ # Make sure that all the executor resources were properly cleaned by
# the shutdown process
queue_management_thread.join()
for p in processes.values():
# extensive testing for deadlock caused by crashes in a pool.
self.executor.shutdown(wait=True)
crash_cases = [
- # Check problem occuring while pickling a task in
+ # Check problem occurring while pickling a task in
# the task_handler thread
(id, (ErrorAtPickle(),), PicklingError, "error at task pickle"),
- # Check problem occuring while unpickling a task on workers
+ # Check problem occurring while unpickling a task on workers
(id, (ExitAtUnpickle(),), BrokenProcessPool,
"exit at task unpickle"),
(id, (ErrorAtUnpickle(),), BrokenProcessPool,
"error at task unpickle"),
(id, (CrashAtUnpickle(),), BrokenProcessPool,
"crash at task unpickle"),
- # Check problem occuring during func execution on workers
+ # Check problem occurring during func execution on workers
(_crash, (), BrokenProcessPool,
"crash during func execution on worker"),
(_exit, (), SystemExit,
"exit during func execution on worker"),
(_raise_error, (RuntimeError, ), RuntimeError,
"error during func execution on worker"),
- # Check problem occuring while pickling a task result
+ # Check problem occurring while pickling a task result
# on workers
(_return_instance, (CrashAtPickle,), BrokenProcessPool,
"crash during result pickle on worker"),
"exit during result pickle on worker"),
(_return_instance, (ErrorAtPickle,), PicklingError,
"error during result pickle on worker"),
- # Check problem occuring while unpickling a task in
+ # Check problem occurring while unpickling a task in
# the result_handler thread
(_return_instance, (ErrorAtUnpickle,), BrokenProcessPool,
"error during result unpickle in result_handler"),
succs[final].remove(corner)
add_to_successors(this)
- # Generate moves 3 thru m*n-1.
+ # Generate moves 3 through m*n-1.
def advance(len=len):
# If some successor has only one exit, must take it.
# Else favor successors with fewer exits.
yield i
add_to_successors(i)
- # Generate moves 3 thru m*n-1. Alternative version using a
+ # Generate moves 3 through m*n-1. Alternative version using a
# stronger (but more expensive) heuristic to order successors.
# Since the # of backtracking levels is m*n, a poor move early on
# can take eons to undo. Smallest square board for which this
# has to keep looping to find the first object we delete.
objs.reverse()
- # Turn on mutation in C.__eq__. The first time thru the loop,
+ # Turn on mutation in C.__eq__. The first time through the loop,
# under the iterkeys() business the first comparison will delete
# the last item iterkeys() would see, and that causes a
# RuntimeError: dictionary changed size during iteration
['a_directory', 'test_directory', 'test_directory2'])
# load_tests should have been called once with loader, tests and pattern
- # (but there are no tests in our stub module itself, so thats [] at the
- # time of call.
+ # (but there are no tests in our stub module itself, so that is [] at
+ # the time of call).
self.assertEqual(Module.load_tests_args,
[(loader, [], 'test*')])
return _is_coroutine(coro);
}
- /* either an error has occured or
+ /* either an error has occurred or
type(coro) is in iscoroutine_typecache
*/
return has_it;
PyErr_SetString(PyExc_ValueError, "fromutc: tz.dst() gave"
"inconsistent results; cannot convert");
- /* fall thru to failure */
+ /* fall through to failure */
Fail:
Py_XDECREF(off);
Py_XDECREF(dst);
* This macro generates constructor function definitions for specific
* hash algorithms. These constructors are much faster than calling
* the generic one passing it a python string and are noticeably
- * faster than calling a python new() wrapper. Thats important for
+ * faster than calling a python new() wrapper. That is important for
* code that wants to make hashes of a bunch of small strings.
* The first call will lazy-initialize, which reports an exception
* if initialization fails.
if(_Pickler_CommitFrame(self)) {
return -1;
}
- /* Flush the content of the commited frame to the underlying
+ /* Flush the content of the committed frame to the underlying
* file and reuse the pickler buffer for the next frame so as
* to limit memory usage when dumping large complex objects to
* a file.
md5->curlen = 0;
}
- /* pad upto 56 bytes of zeroes */
+ /* pad up to 56 bytes of zeroes */
while (md5->curlen < 56) {
md5->buf[md5->curlen++] = (unsigned char)0;
}
}
#if defined(__APPLE__) && defined(AI_NUMERICSERV)
if ((flags & AI_NUMERICSERV) && (pptr == NULL || (pptr[0] == '0' && pptr[1] == 0))) {
- /* On OSX upto at least OSX 10.8 getaddrinfo crashes
+ /* On OSX up to at least OSX 10.8 getaddrinfo crashes
* if AI_NUMERICSERV is set and the servname is NULL or "0".
* This workaround avoids a segfault in libsystem.
*/
* checks.
*
* Return 0 if the memory debug hooks are not installed or no statistics was
- * writen into out, return 1 otherwise.
+ * written into out, return 1 otherwise.
*/
int
_PyObject_DebugMallocStats(FILE *out)
/* The addend 82520, was selected from the range(0, 1000000) for
generating the greatest number of prime multipliers for tuples
- upto length eight:
+ up to length eight:
1082527, 1165049, 1082531, 1165057, 1247581, 1330103, 1082533,
1330111, 1412633, 1165069, 1247599, 1495177, 1577699
/* PyType_Ready is the closest thing we have to a choke point
* for type objects, so is the best place I can think of to try
* to get type objects into the doubly-linked list of all objects.
- * Still, not all type objects go thru PyType_Ready.
+ * Still, not all type objects go through PyType_Ready.
*/
_Py_AddToAllObjects((PyObject *)type, 0);
#endif
}
}
- /* We are into optional args, skip thru to any remaining
+ /* We are into optional args, skip through to any remaining
* keyword args */
msg = skipitem(&format, p_va, flags);
if (msg) {
return cleanreturn(1, &freelist);
}
- /* We are into optional args, skip thru to any remaining
+ /* We are into optional args, skip through to any remaining
* keyword args */
msg = skipitem(&format, p_va, flags);
assert(msg == NULL);