
Seems the feed is a few days off.
I normally use https://alarmeringen.nl/ which does seem to have a RSS feed based on the place, but I can't figure out how to use it.
https://alarmeringen.nl/feeds/city/rhoon.rss
and even personalised feeds
Moderators: leecollings, remb0
Code: Select all
import urllib.parse
import csv, os, json
import json
import urllib.parse
import requests
try:
from bs4 import BeautifulSoup
except ImportError:
print(
"You don't have BeautifulSoup installed.\nIt is required for this script.\n\nPlease run ' pip install beautifulsoup4 ' and try again\n"
)
quit()
try:
import lxml
except ImportError:
print(
"You don't have lxml installed.\nIt is required for this script.\n\nPlease run ' sudo apt-get install python-lxml ' if you are on python 2.x or 'sudo apt-get install python3-lxml ' if you are on python3.x and try again\n"
)
quit()
try:
from urllib.parse import urlparse, urlencode
from urllib.request import urlopen, Request
from urllib.error import HTTPError
except ImportError:
from urlparse import urlparse
from urllib import urlencode
from urllib2 import urlopen, Request, HTTPError
log_name = "p2000_rhoorn"
text_idx = '1412' # change into idx of your text sensor
domoticz_url = "http://192.168.1.6:8080"
NEWS_URL = "https://alarmeringen.nl/feeds/city/rhoon.rss" #the alarmering feed to be fetched
### change this to the url for your domoticz IP or to http://127.0.0.1 if you run the script on your domoticz machine
if os.path.exists(log_name +
'.stor'): ##check if a file with prevous message exists
with open(log_name + '.stor',
'r') as file: ##if it exists, get the message(+timestamp)
prev_msg = str(file.read()) ##write the content to variable prev_msg
else:
prev_msg = 'nil' ##if file does not exist, assign 'nil' to variable prev_msg
# for tests you can delete the .stor file. Then any message fetched from the feed will be considered new and sensor will update
def domo_post(idx, value):
enc_value = urllib.parse.quote(value)
url = (domoticz_url + '/json.htm?type=command¶m=udevice&idx=' + idx +
'&nvalue=0&svalue=' + enc_value)
try:
response = requests.get(url)
resp_code = ('Sensor update OK')
except:
resp_code = ('Sensor update failed!!')
return resp_code
def news(xml_news_url):
url = xml_news_url
headers = {'User-Agent': 'Mozilla/5.0'}
request = Request(xml_news_url, headers=headers)
xml_page = urlopen(request).read()
soup_page = BeautifulSoup(xml_page, "lxml", from_encoding="ascii")
news_list = soup_page.findAll("item")
msg = (news_list[0].description.text) #get newest entry from feed
print("\nNewest message\n", msg)
pub_date = (news_list[0].pubdate.text
) #get the timestamp to compare messages
pick_check = (
msg + pub_date
) #make a variable from fetched message and it's published date/time
if pick_check != prev_msg: #if stored value is different from current message with time stamp
with open(log_name + '.stor', 'w') as file: #write value to storage
file.write(pick_check)
post = domo_post(text_idx, msg) #and update the text sensor
print('New message received, updating P2000 text sensor')
else: #if message and timestamp are same --> no new message in rss feed
print(
'\nMessage was same as previous one'
) #print message and DONT update text sensor (so no new domoticz event)
i = 0
feedy = ''
#here we loop through the rss feed (only for log file. You can see the latest 30 messages from feed
while i < 30:
title = (news_list[i].title.text)
inhoud = (news_list[i].description.text)
tijd = (news_list[i].pubdate.text)[:-6][5:]
combi = tijd + ';' + inhoud
i = i + 1
feedy = feedy + combi + '\n'
with open(log_name + '.log',
'w') as csvfile: #write latest 30 messages to log file
csvfile.write(feedy)
return feedy
stream = news(NEWS_URL)
Users browsing this forum: No registered users and 1 guest