尾号为7,8,9,0的同学做,聊斋相关的分词,出现次数最高的20个。
# -*- coding: utf-8 -*- """ Created on Sat Dec 23 18:00:49 2023 @author: 86135 """ import jieba # 读取文本文件 path = "C:\\Users\\86135\\Desktop\\聊斋.txt" file = open(path, "r", encoding="utf-8") text = file.read() file.close() # 使用jieba分词 words = jieba.lcut(text) # 统计词频 counts = {} for word in words: # 过滤掉长度为1的词语 if len(word) == 1: continue # 更新字典中的词频 counts[word] = counts.get(word, 0) + 1 # 对字典中的键值对进行排序 items = list(counts.items()) items.sort(key=lambda x: x[1], reverse=True) # 输出前20个高频词语 with open("top_20_words.txt", "w", encoding="utf-8") as output_file: for i in range(min(20, len(items))): word, count = items[i] print(f"{word:<10}{count:>5}", file=output_file)