20 test "${dst}" -nt "posted_news/" && {
21 echo "${dst}: up to date"
25 echo "${dst}: regenerating"
26 files=$(find posted_news/ -type f -regex "${filter}" -printf "%f\n" | sort -r | xargs)
29 if [[ "$filter" == *snip* ]] && [[ "$f" == *".body.html"* ]] && [[ "$files" == *"${f%.body.html}.snip.html"* ]]; then
32 echo "<!--#include virtual=\"/samba/posted_news/${f}\" -->"
34 if [[ $count == 0 ]]; then break; fi
46 test "${dst}" -nt "posted_news/" && {
47 echo "${dst}: up to date"
51 echo "${dst}: regenerating"
52 files=$(find posted_news/ -type f -regex "${filter}" -printf "%f\n" | sort -r | xargs)
54 echo '<?xml version="1.0" encoding="utf-8"?>'
55 echo '<feed xmlns="http://www.w3.org/2005/Atom">'
56 echo '<id>https://www.samba.org/samba/news.atom</id>'
57 echo '<link href="https://pubsubhubbub.appspot.com/" rel="hub" />'
58 echo '<link href="https://www.samba.org/samba/news.atom" rel="self" />'
59 echo '<title>Samba - News</title>'
60 echo '<updated>'`date +%Y-%m-%dT%H:%m:%SZ`'</updated>'
63 if [[ "$filter" == *snip* ]] && [[ "$f" == *".body.html"* ]] && [[ "$files" == *"${f%.body.html}.snip.html"* ]]; then
67 MY_UPDATED=`echo $f | sed 's/^\(....\)\(..\)\(..\)-\(..\)\(..\).*/\1-\2-\3T\4:\5:00Z/'`
68 echo "<id>https://www.samba.org/samba/news.atom#$MY_UPDATED</id>"
69 echo "<link rel='alternate' href='https://www.samba.org/samba/latest_news.html' />"
70 echo '<author><name>Samba Team</name><email>webmaster@samba.org</email></author>'
71 echo "<updated>$MY_UPDATED</updated>"
74 | perl -pe '$/=""; s/<p class=headline>(.*?)<\/p>/<title>$1<\/title>/s' \
77 | sed 's:</\?br>:<br />:ig' \
78 | sed "/<\/title>/a <content type='xhtml' xml:base='https:\/\/www.samba.org\/samba\/news.atom'><div xmlns='http:\/\/www.w3.org\/1999\/xhtml'>"
79 echo '</div></content></entry>'
81 if [[ $count == 0 ]]; then break; fi
90 generate_latest_stable_release() {
92 local download_url="$2"
94 pushd history >/dev/null
95 ALL_VERSIONS=$(ls samba-*.html | cut -d '-' -f2- | cut -d '.' -f1-3 | sort -t. -k 1,1n -k 2,2n -k 3,3n -u)
96 LATEST_VERSION=$(echo "${ALL_VERSIONS}" | tail -1)
99 echo "LATEST_VERSION: ${LATEST_VERSION}"
101 local tgz="samba-${LATEST_VERSION}.tar.gz"
102 local asc="samba-${LATEST_VERSION}.tar.asc"
103 local release_notes="history/samba-${LATEST_VERSION}.html"
105 test "${dst}" -nt "${release_notes}" && {
106 echo "${dst}: up to date"
110 echo "${dst}: regenerating"
112 echo "<!-- BEGIN: ${dst} -->"
114 echo "<a href=\"${download_url}/${tgz}\">Samba ${LATEST_VERSION} (gzipped)</a><br>"
115 echo "<a href=\"/samba/${release_notes}\">Release Notes</a> ·"
116 echo "<a href=\"${download_url}/${asc}\">Signature</a>"
118 echo "<!-- END: ${dst} -->"
124 generate "generated_news/latest_10_headlines.html" "10" ".*\.headline\.html"
125 generate "generated_news/latest_10_bodies.html" "10" ".*\.body\.html"
126 generate "generated_news/latest_2_bodies.html" "2" ".*\.\(snip\|body\)\.html"
127 generate_feed "news.atom" "10" ".*\.\(snip\|body\)\.html"
129 if ! `w3m -dump 'https://validator.w3.org/feed/check.cgi?url=https%3A%2F%2Fwww.samba.org%2Fsamba%2Fnews.atom' | grep -q Congratulations`
132 echo "NEWS feed error? Maybe a recent web repo checkin contained wrong html"
133 echo "Check https://validator.w3.org/feed/check.cgi?url=https%3A%2F%2Fwww.samba.org%2Fsamba%2Fnews.atom"
134 } | mail -s "ATOM feed eror" janger@samba.org root@samba.org
138 download_url="https://download.samba.org/pub/samba/stable"
139 generate_latest_stable_release "generated_news/latest_stable_release.html" "${download_url}"