import sys
import urllib
import json
+import traceback
os.chdir('/home/vincent/scripts/today/')
sys.path.append("/home/vincent/scripts/dtc/")
import dtc
return dtc.last_inserted()
def last_xkcd():
- try:
- p = urllib.urlopen("http://xkcd.com")
- except IOError:
- return "Impossible de se connecter à xkcd"
+ p = urllib.urlopen("http://xkcd.com")
t = p.read()
current_id = int(re.findall("Permanent link to this comic: http://xkcd.com/(.*?)/", t)[0])
return current_id
+def last_xantah():
+ p = urllib.urlopen("http://www.adoprixtoxis.com/lite/download/xantah_downloads.php")
+ t = p.read()
+ ids = re.findall("""<div class="top">Xantah (.*?)</div>""", t)
+ ids = [int(i) for i in ids]
+ return max(ids)
+
def get_file():
"""Récupère la liste des derniers ids de chaque truc, stockée dans le fichiers."""
f = open(store_published_file)
FETCHS = {
"xkcd" : last_xkcd,
"dtc" : last_dtc,
+ "xantah" : last_xantah,
}
+
def fetch_all():
"""Va chercher sur les différents sites les nouveaux trucs."""
news = {}
for (k, f) in FETCHS.iteritems():
- news[k] = f()
+ try:
+ news[k] = f()
+ except Exception as e:
+ #print "Erreur à la récupération de %s :" % k
+ traceback.print_exc()
return news
def sync():
if __name__ == "__main__":
if sys.argv[1] == "check":
news = fetch_all()
- update_file(news)
+ olds = get_file()
+ olds.update(news)
+ update_file(olds)
elif sys.argv[1] == "whatsup":
news = get_file()
print json.dumps(news)