39
|
1 #!/usr/bin/python
|
|
2 #
|
|
3 # Check/update default wiki pages from the Trac project website.
|
|
4 #
|
|
5 # Note: This is a development tool used in Trac packaging/QA, not something
|
|
6 # particularly useful for end-users.
|
|
7 #
|
|
8 # Author: Daniel Lundin <daniel@edgewall.com>
|
|
9
|
|
10 import httplib
|
|
11 import re
|
|
12 import sys
|
|
13 import getopt
|
|
14
|
|
15 # Pages to include in distribution
|
|
16 wiki_pages = [
|
|
17 "CamelCase",
|
|
18 "InterMapTxt",
|
|
19 "InterTrac",
|
|
20 "InterWiki",
|
|
21 "RecentChanges",
|
|
22 "TitleIndex",
|
|
23 "TracAccessibility",
|
|
24 "TracAdmin",
|
|
25 "TracBackup",
|
|
26 "TracBrowser",
|
|
27 "TracCgi",
|
|
28 "TracChangeset",
|
|
29 "TracEnvironment",
|
|
30 "TracFastCgi",
|
|
31 "TracGuide",
|
|
32 "TracImport",
|
|
33 "TracIni",
|
|
34 "TracInstall",
|
|
35 "TracInterfaceCustomization",
|
|
36 "TracLinks",
|
|
37 "TracLogging",
|
|
38 "TracModPython",
|
|
39 "TracNotification",
|
|
40 "TracPermissions",
|
|
41 "TracPlugins",
|
|
42 "TracQuery",
|
|
43 "TracReports",
|
|
44 "TracRevisionLog",
|
|
45 "TracRoadmap",
|
|
46 "TracRss",
|
|
47 "TracSearch",
|
|
48 "TracStandalone",
|
|
49 "TracSupport",
|
|
50 "TracSyntaxColoring",
|
|
51 "TracTickets",
|
|
52 "TracTicketsCustomFields",
|
|
53 "TracTimeline",
|
|
54 "TracUnicode",
|
|
55 "TracUpgrade",
|
|
56 "TracWiki",
|
|
57 "WikiDeletePage",
|
|
58 "WikiFormatting",
|
|
59 "WikiHtml",
|
|
60 "WikiMacros",
|
|
61 "WikiNewPage",
|
|
62 "WikiPageNames",
|
|
63 "WikiProcessors",
|
|
64 "WikiRestructuredText",
|
|
65 "WikiRestructuredTextLinks"
|
|
66 ]
|
|
67
|
|
68 def get_page_from_file (pname):
|
|
69 d = ''
|
|
70 try:
|
|
71 f = open(pname ,'r')
|
|
72 d = f.read()
|
|
73 f.close()
|
|
74 except:
|
|
75 print "Missing page: %s" % pname
|
|
76 return d
|
|
77
|
|
78 def get_page_from_web (pname):
|
|
79 host = "projects.edgewall.com"
|
|
80 rfile = "/trac/wiki/%s?format=txt" % pname
|
|
81 c = httplib.HTTPConnection(host)
|
|
82 c.request("GET", rfile)
|
|
83 r = c.getresponse()
|
|
84 d = r.read()
|
|
85 if r.status != 200 or d == ("describe %s here\n" % pname):
|
|
86 c.close()
|
|
87 print "Missing page: %s" % pname
|
|
88 c.close()
|
|
89 f = open(pname, 'w+')
|
|
90 f.write(d)
|
|
91 f.close()
|
|
92 return d
|
|
93
|
|
94 def check_links (data):
|
|
95 def get_refs(t, refs=[]):
|
|
96 r = "(?P<wikilink>(^|(?<=[^A-Za-z]))[!]?[A-Z][a-z/]+(?:[A-Z][a-z/]+)+)"
|
|
97 m = re.search (r, t)
|
|
98 if not m:
|
|
99 refs.sort()
|
|
100 result = []
|
|
101 orf = None
|
|
102 for rf in refs:
|
|
103 if rf != orf:
|
|
104 result.append(rf)
|
|
105 orf = rf
|
|
106 return result
|
|
107 refs.append(m.group())
|
|
108 return get_refs( t[m.end():], refs)
|
|
109 for p in data.keys():
|
|
110 links = get_refs(data[p], [])
|
|
111 for l in links:
|
|
112 if l not in data.keys():
|
|
113 print "Broken link: %s -> %s" % (p, l)
|
|
114
|
|
115 if __name__ == '__main__':
|
|
116 try:
|
|
117 opts, args = getopt.getopt(sys.argv[1:], "ds")
|
|
118 except getopt.GetoptError:
|
|
119 # print help information and exit:
|
|
120 print "%s [-d]" % sys.argv[0]
|
|
121 print "\t-d -- Download pages from the main project wiki."
|
|
122 sys.exit()
|
|
123 get_page = get_page_from_file
|
|
124 for o,a in opts:
|
|
125 if o == '-d':
|
|
126 get_page = get_page_from_web
|
|
127 data = {}
|
|
128 for p in wiki_pages:
|
|
129 data[p] = get_page (p)
|
|
130 check_links(data)
|
|
131
|