1 # =====================================================================
2 # groupRecentPages.awk: W-TW group recent-pages applet.
3 #
4 # Copyright (c) 2007,2008,2009,2010,2011 Carlo Strozzi
5 #
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; version 2 dated June, 1991.
9 #
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU General Public License for more details.
14 #
15 # You should have received a copy of the GNU General Public License
16 # along with this program; if not, write to the Free Software
17 # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
18 #
19 # =====================================================================
20
21 BEGIN {
22
23 fmt1=fmt2=""; i=j=k=0; delete a; delete b; delete c
24 delete d; delete e; delete f1; delete f2
25
26 # work-out the format of each page header.
27 fmt1 = readfmt("tw-page-header")
28 gsub(/%/,"%%",fmt1) # turn plain '%' into '%%'.
29 gsub(/\\/,"\\\\&",fmt1) # turn '\' into '\\'.
30 gsub(/[\n\r]+/,"",fmt1) # just in case.
31 tmp = fmt1
32
33 # Handle custom positioning of output tokens.
34 sub(/.*\[:/,_NULL,tmp); sub(/:].*/,_NULL,tmp)
35 tmp = _strip(tmp,_O_MIDDLE)
36 if (tmp !~ /^[0-5 ]+$/) tmp = "1 2 3 4 5"
37
38 # pad missing arg specs with "0".
39 if ((i=split(tmp,f1," ")) < 5) {
40 while (i++ <= 5) tmp = tmp " 0"
41 i = split(tmp,f1," ")
42 }
43
44 tmp = _NULL
45
46 for (j=1; j<=i; j++) {
47 if (j > 5) break # ignore excess arg specs.
48 if (!sub(//,"%s",fmt1)) fmt1 = fmt1 ""
49 tmp = tmp " " f1[j]
50 }
51
52 # encode any extra markers.
53 gsub(//,"\\<tw:s/\\>",fmt1)
54
55 fmt1 = fmt1 "\n"
56
57 split(_strip(tmp),f1," "); f1[0] = 0
58
59 # work-out the format of each page footer.
60 fmt2 = readfmt("tw-page-footer")
61 gsub(/%/,"%%",fmt2) # turn plain '%' into '%%'.
62 gsub(/\\/,"\\\\&",fmt2) # turn '\' into '\\'.
63 gsub(/[\n\r]+/,"",fmt2) # just in case.
64 tmp = fmt2
65
66 # Handle custom positioning of output tokens.
67 sub(/.*\[:/,_NULL,tmp); sub(/:].*/,_NULL,tmp)
68 tmp = _strip(tmp,_O_MIDDLE)
69 if (tmp !~ /^[0-2 ]+$/) tmp = "1 2"
70
71 # pad missing arg specs with "0".
72 if ((i=split(tmp,f2," ")) < 2) {
73 while (i++ <= 2) tmp = tmp " 0"
74 i = split(tmp,f2," ")
75 }
76
77 tmp = _NULL
78
79 for (j=1; j<=i; j++) {
80 if (j > 2) break # ignore excess arg specs.
81 if (!sub(//,"%s",fmt2)) fmt2 = fmt2 ""
82 tmp = tmp " " f2[j]
83 }
84
85 # encode any extra markers.
86 gsub(//,"\\<tw:s/\\>",fmt2)
87
88 fmt2 = fmt2 "\n"
89
90 split(_strip(tmp),f2," "); f2[0] = 0
91
92 page_dir = _rcget("tw_gstem")
93 g_uri = _rcget("tbl_group.g_uri")
94
95 url = ENVIRON["CSA_RPC_URI"] "/" ENVIRON["CSA_LANG"] "/"
96
97 e[0] = _NULL
98 }
99
100 /^\001/ { next } # skip table header, if any.
101
102 {
103 # Load each page in turn, setting the proper headers/footers and
104 # showing links to nonexistent wiki pages within the same group.
105 # Note that while I use 'ctime' for the 'recent-links' view, I
106 # prefer to use 'vtime' here as I think it makes more sense for
107 # both this view and the 'recent-headlines' one.
108
109 # k_page, p_vtime, p_name, p_modau, p_uri, p_descr, p_etime
110
111 split($0,a,"\t") # split each record into fields.
112
113 # Exclude hidden pages from this view. Note that this is now done
114 # directly by 'updatePage' before the input list is fed into this
115 # script, so that we always get the configured no. of entries on
116 # output even after skipping hidden/restricted/redirected pages.
117 #
118 #if (a[6] ~ /^ *-/) next
119
120 if (a[4] != _NULL) ftr = a[4]
121 else ftr = _nlsmap(_NULL,"anonymous")
122
123 # Remove any redirection URL from visible description.
124 sub(/ *\+*\(:redirect .*/,_NULL,a[6])
125
126 if (a[6] == _NULL) a[6] = a[3] # default description
127
128 # Make sure we pick the new content of a page which is being edited.
129 if (a[1] == _rcget("cgi.page")) page = ENVIRON["TNS_NEW_CONTENT"]
130 else page = page_dir "/" a[1] "+wki"
131
132 # strip ranking.
133 sub(/^.*,/,"",a[2])
134
135 # page header.
136
137 tmp = a[2]
138 gsub(/ /,"T",tmp)
139 gsub(/:/,"_",tmp)
140 tmp = tmp "." ENVIRON["CSA_RID"]
141 tmp = _xmlencode(tmp "." NR)
142
143 split(_localdate(a[2],1),b," "); sub(/:..$/,"",b[2])
144
145 split(_localdate(a[7],1),c," "); sub(/:..$/,"",c[2])
146
147 # Load each page body in turn.
148 value = _NULL
149 while (getline tmp1 < page > 0) value = value tmp1; close(page)
150
151 # Skip empty pages, as they are to be considered as logically deleted.
152 # This test should now be obsolete, but I'll leave it in place still
153 # for a while, as it does not cost much anyway since this program
154 # needs lo load the page bodies also for other reasons, see below.
155
156 if (value !~ /[a-zA-Z0-9]/) next # skip empty pages.
157
158 # Insert entry separator, it may turn out to be of use in the future.
159 if (NR > 1) value = "" value
160
161 e[1] = tmp
162 e[2] = b[1]
163 e[3] = b[2]
164 e[4] = c[1]
165 e[5] = c[2]
166
167 # print header.
168 printf(fmt1,e[f1[1]],e[f1[2]],e[f1[3]],e[f1[4]],e[f1[5]])
169
170 # CSA tag markers must be escaped, or _envtoxml() will happily
171 # interpret anything typed by the user into the file !!
172
173 gsub(/\$\[/,"\\$[",value)
174
175 # Retain only the page abstract if defined.
176 # See also the call to _wikicpi() further down.
177
178 # Retain only page abstracts for this view, where defined.
179 sub(/.*\(::ab:\)/,"",value)
180 sub(/\(:ab::\).*/,"",value)
181
182 #if (sub(/\(:i:\).*/," ... " \
183 # "(" \
184 # _xmlencode(_nlsmap(_NULL,"continue")) ")",value))
185 # value = "" value
186
187 sub(/\(:i:\).*/," ... " \
188 "(" \
189 _xmlencode(_nlsmap(_NULL,"continue")) ")",value)
190
191 # Try and fix any orphan opening/closing tags produced by the
192 # above truncation process. This is done only if we are in XHTML
193 # mode, because it is tricky and it may not work in every possible
194 # situation. I also rely on the fact that a CPI will not be placed in
195 # the middle of things like