@@ -155,7 +155,7 @@ extern int mmap_rnd_compat_bits __read_mostly;
/* This function must be updated when the size of struct page grows above 80
* or reduces below 56. The idea that compiler optimizes out switch()
* statement, and only leaves move/store instructions. Also the compiler can
- * combine write statments if they are both assignments and can be reordered,
+ * combine write statements if they are both assignments and can be reordered,
* this can result in several of the writes here being dropped.
*/
#define mm_zero_struct_page(pp) __mm_zero_struct_page(pp)
@@ -554,7 +554,7 @@ static void lru_deactivate_file_fn(struct page *page, struct lruvec *lruvec)
} else {
/*
* The page's writeback ends up during pagevec
- * We moves tha page into tail of inactive.
+ * We moves that page into tail of inactive.
*/
add_page_to_lru_list_tail(page, lruvec);
__count_vm_events(PGROTATED, nr_pages);
tha -> that in mm/swap.c statments -> statements in include/linux/mm.h Signed-off-by: Hyeonggon Yoo <42.hyeyoo@gmail.com> --- include/linux/mm.h | 2 +- mm/swap.c | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-)