import wordcloud
import matplotlib.pyplot as plt
from imageio import imread
print([1,2]+[3,4])

# 创建一个词云对象
wc = wordcloud.WordCloud()
img = imread(r'F:\PyCharm\test\bg.jpg')

# 要生成词云的文本
text = '''
Why can pre-trained language models (PLMs) 001
learn universal representations and effectively 002
adapt to broad NLP tasks differing a lot super- 003
ficially? In this work, we empirically find evi- 004
dence indicating that the adaptations of PLMs 005
to various few-shot tasks can be reparameter- 006
ized as optimizing only a few free parame- 007
ters in a unified low-dimensional intrinsic task 008
subspace, which may help us understand why 009
PLMs could easily adapt to various NLP tasks 010
with small-scale data. To find such a subspace 011
and examine its universality, we propose an 012
analysis pipeline called intrinsic prompt tun- 013
ing (IPT).
'''


wc = wordcloud.WordCloud(
    background_color="white",  # 背景色
    width=800, height=400,      # 图片宽高
    margin=5,                   # 词与词之间的间距
    mask=img
)


# 生成词云
wc.generate(text)

# 显示词云
plt.imshow(wc, interpolation="bilinear")
plt.axis("off")  # 隐藏坐标轴
plt.show()