forked from lxc/linuxcontainers.org
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsync-forum
executable file
·142 lines (116 loc) · 4.09 KB
/
sync-forum
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
#!/usr/bin/python3
from bs4 import BeautifulSoup as BS
import dateutil.parser
import glob
import json
import os
from urllib.request import urlopen, Request
import ruamel.yaml
import time
API_URL = "https://discuss.linuxcontainers.org"
API_USERNAME = "system"
API_KEY = os.environ["API_KEY"]
PROJECTS = ("incus", "lxc", "lxcfs", "distrobuilder")
def load_json(url):
time.sleep(1)
url = url.split(API_URL)[-1]
url = "%s/%s.json" % (API_URL, url)
headers = {}
headers["Api-Key"] = API_KEY
headers["Api-Username"] = API_USERNAME
headers["Content-Type"] = "application/json"
req = Request(url, headers=headers)
return json.loads(urlopen(req).read().decode())
# Update all existing posts
updated = []
for project in PROJECTS:
for post in glob.glob("content/%s/news/*.yaml" % project):
yaml = ruamel.yaml.YAML()
with open(post, "r") as fd:
old_entry = yaml.load(fd)
if "origin" not in old_entry:
continue
try:
dis_topic = load_json(old_entry["origin"])
dis_post = load_json("/posts/%s" %
dis_topic["post_stream"]["posts"][0]["id"])
except Exception:
print("Failed to load: %s" % old_entry["origin"])
continue
entry = yaml.load("""
title: foo
date: bar
origin: baz
content: |-
blah""")
entry["content"] = dis_post["raw"]
soup = BS(dis_post["cooked"], features="html.parser")
for imgtag in soup.find_all('img'):
try:
img_hash = imgtag['data-base62-sha1']
img_src = imgtag['src']
entry["content"] = entry["content"].replace(
"upload://%s.png" % img_hash, img_src)
except KeyError:
pass
entry["title"] = dis_topic["title"]
date = dateutil.parser.parse(dis_topic["created_at"])
entry["date"] = date.strftime("%Y/%m/%d %H:%m")
entry["origin"] = "%s/t/%s/%s" % (API_URL, dis_topic["slug"],
dis_topic["id"])
if entry != old_entry:
with open(post, "w+") as fd:
yaml.dump(entry, fd)
print("%s => updated" % dis_topic["title"])
else:
print("%s => skipping (up to date)" % dis_topic["title"])
updated.append(dis_topic["id"])
# Import new posts
news = load_json("/c/news")
for topic in news["topic_list"]["topics"]:
if topic["id"] in updated:
print("%s => skipping (already exists)" % topic["title"])
continue
try:
dis_topic = load_json("/t/%s" % topic["id"])
dis_post = load_json("/posts/%s" %
dis_topic["post_stream"]["posts"][0]["id"])
except Exception:
print("Failed to load: %s" % topic["id"])
continue
if "release" not in dis_topic["tags"]:
continue
for project in PROJECTS:
if project not in dis_topic["tags"]:
continue
path = "content/%s/news/%s.yaml" % (project, dis_topic["slug"])
break
else:
print("%s => skipping (unknown project: %s)" % (dis_topic["title"],
dis_topic["tags"]))
continue
yaml = ruamel.yaml.YAML()
entry = yaml.load("""
title: foo
date: bar
origin: baz
content: |-
blah""")
entry["content"] = dis_post["raw"]
soup = BS(dis_post["cooked"], features="html.parser")
for imgtag in soup.find_all('img'):
try:
img_hash = imgtag['data-base62-sha1']
img_src = imgtag['src']
entry["content"] = entry["content"].replace(
"upload://%s.png" % img_hash, img_src)
except KeyError:
pass
entry["title"] = dis_topic["title"]
date = dateutil.parser.parse(dis_topic["created_at"])
entry["date"] = date.strftime("%Y/%m/%d %H:%m")
entry["origin"] = "%s/t/%s/%s" % (API_URL, dis_topic["slug"],
dis_topic["id"])
with open(path, "w+") as fd:
yaml.dump(entry, fd)
print("%s => imported" % dis_topic["title"])