2019-03-21 12:19:00 +00:00
|
|
|
#!/usr/bin/python -tt
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2024-05-12 07:21:15 +00:00
|
|
|
from pprint import pprint
|
2019-03-21 12:19:00 +00:00
|
|
|
import logging, re, subprocess
|
|
|
|
import urllib2
|
2021-10-21 17:13:52 +00:00
|
|
|
import time
|
2019-07-08 16:39:09 +00:00
|
|
|
from pprint import pprint
|
2019-03-21 12:19:00 +00:00
|
|
|
|
|
|
|
from sr0wx_module import SR0WXModule
|
|
|
|
|
|
|
|
class GeoMagneticSq9atk(SR0WXModule):
|
|
|
|
"""Klasa pobierająca info o sytuacji geomagnetycznej"""
|
|
|
|
|
|
|
|
def __init__(self, language, service_url):
|
|
|
|
self.__language = language
|
|
|
|
self.__service_url = service_url
|
|
|
|
self.__logger = logging.getLogger(__name__)
|
2019-07-08 16:39:09 +00:00
|
|
|
|
2019-03-21 12:19:00 +00:00
|
|
|
self.__days = ['dzis','jutro','po_jutrze']
|
|
|
|
self.__conditions = {
|
|
|
|
0:' ',
|
|
|
|
1:'brak_istotnych_zaburzen__geomagnetycznych', 2:'lekkie_zaburzenia_geomagnetyczne',
|
|
|
|
3:'umiarkowane_zabuz_enia_geomagnetyczne', 4:'mal_a_burza_geomagnetyczna',
|
|
|
|
5:'umiarkowana_burza_geomagnetyczna', 6:'silna_burza_geomagnetyczna',
|
|
|
|
7:'sztorm_geomagnetyczny', 8:'duz_y_sztorm_geomagnetyczny'
|
|
|
|
}
|
|
|
|
self.__seasons = {
|
2021-10-21 17:13:52 +00:00
|
|
|
0:'kro_tko_po_po_l_nocy', 3:'nad_ranem', 6:'rano',
|
|
|
|
9:'przed_pol_udniem', 12:'wczesnym_popol_udniem', 15:'po_pol_udniu',
|
|
|
|
18:'wieczorem', 21:'przed_po_l_noca_',
|
2019-03-21 12:19:00 +00:00
|
|
|
}
|
|
|
|
self.__fluctuations = {
|
|
|
|
0:'niezauwaz_alne', 1:'znikome', 2:'lekkie', 3:'podwyz_szone',
|
|
|
|
4:'umiarkowane', 5:'duz_e', 6:'bardzo_duz_e', 7:'ekstremalne'
|
|
|
|
}
|
|
|
|
|
|
|
|
def downloadDataFromUrl(self, url):
|
2020-03-22 12:01:49 +00:00
|
|
|
self.__logger.info("::: Odpytuję adres: " + url)
|
2019-03-21 12:19:00 +00:00
|
|
|
opener = urllib2.build_opener()
|
|
|
|
headers = {
|
|
|
|
'User-Agent': 'Mozilla/5.0 (Windows NT 5.1; rv:10.0.1) Gecko/20100101 Firefox/10.0.1',
|
|
|
|
}
|
|
|
|
opener.addheaders = headers.items()
|
|
|
|
response = opener.open(url)
|
|
|
|
|
|
|
|
return response.read()
|
|
|
|
|
2019-07-08 16:39:09 +00:00
|
|
|
|
2019-03-21 12:19:00 +00:00
|
|
|
def getDataParsedHtmlData(self):
|
|
|
|
self.__logger.info("::: Pobieram informacje...")
|
2019-07-08 16:39:09 +00:00
|
|
|
|
2019-03-21 12:19:00 +00:00
|
|
|
html = self.downloadDataFromUrl(self.__service_url)
|
2024-05-12 07:21:15 +00:00
|
|
|
r = re.compile(r'<use href="#gm_(\d+)".*?>')
|
|
|
|
|
|
|
|
res = r.findall(html)
|
|
|
|
res = res[1:] # omijamy pierwszy element bo nie jest on częścią kontenera z danymi
|
2019-07-08 16:39:09 +00:00
|
|
|
|
2024-05-12 07:21:15 +00:00
|
|
|
|
|
|
|
return res
|
2019-03-21 12:19:00 +00:00
|
|
|
|
|
|
|
def groupValuesByDays(self, data):
|
2021-10-21 17:13:52 +00:00
|
|
|
hour = 0
|
2019-03-21 12:19:00 +00:00
|
|
|
dayNum = 1
|
2021-10-21 17:13:52 +00:00
|
|
|
current_hour = int(time.strftime("%H"))
|
2024-05-12 07:21:15 +00:00
|
|
|
|
2019-03-21 12:19:00 +00:00
|
|
|
output = {1:{},2:{},3:{}}
|
2024-05-12 07:21:15 +00:00
|
|
|
|
2019-03-21 12:19:00 +00:00
|
|
|
for i, val in enumerate(data):
|
2021-10-21 17:13:52 +00:00
|
|
|
if dayNum > 1 or hour > current_hour-1: # omijamy godziny z przeszłości
|
|
|
|
if dayNum < 4 and i < 24:
|
2024-05-12 07:21:15 +00:00
|
|
|
value = data[i+1]
|
2021-10-21 17:13:52 +00:00
|
|
|
output[dayNum][hour] = value
|
2019-03-21 12:19:00 +00:00
|
|
|
|
|
|
|
hour += 3
|
2021-10-21 17:13:52 +00:00
|
|
|
if hour > 21:
|
|
|
|
hour = 0
|
2019-03-21 12:19:00 +00:00
|
|
|
dayNum += 1
|
2021-10-21 17:13:52 +00:00
|
|
|
|
2019-03-21 12:19:00 +00:00
|
|
|
return output
|
|
|
|
|
|
|
|
def getStrongestConditionOfDay(self,data):
|
|
|
|
maxValue = {
|
|
|
|
'value':0,
|
|
|
|
'at':0,
|
|
|
|
}
|
2019-07-08 16:39:09 +00:00
|
|
|
for key, row in data.iteritems():
|
2019-03-21 12:19:00 +00:00
|
|
|
if row > maxValue['value']:
|
|
|
|
maxValue['value'] = row
|
|
|
|
maxValue['at'] = key
|
|
|
|
return maxValue
|
|
|
|
|
|
|
|
def getDailyFluctuation(self, data):
|
|
|
|
values = data.values()
|
|
|
|
return int(max(values)) - int(min(values))
|
2019-07-08 16:39:09 +00:00
|
|
|
|
2019-03-21 12:19:00 +00:00
|
|
|
def get_data(self):
|
|
|
|
values = self.getDataParsedHtmlData()
|
|
|
|
daysValues = self.groupValuesByDays(values)
|
2019-07-08 16:39:09 +00:00
|
|
|
|
2019-03-21 12:19:00 +00:00
|
|
|
message = ' _ sytuacja_geomagnetyczna_w_regionie ';
|
2019-07-08 16:39:09 +00:00
|
|
|
|
2019-03-21 12:19:00 +00:00
|
|
|
self.__logger.info("::: Przetwarzam dane...\n")
|
|
|
|
for d, day in daysValues.iteritems():
|
2024-05-12 07:21:15 +00:00
|
|
|
|
2019-03-21 12:19:00 +00:00
|
|
|
if len(day) > 0:
|
2019-07-08 16:39:09 +00:00
|
|
|
a=1
|
2019-03-21 12:19:00 +00:00
|
|
|
message += " _ "+self.__days[d-1] + " "
|
2019-07-08 16:39:09 +00:00
|
|
|
condition = self.getStrongestConditionOfDay(day)
|
2024-05-12 07:21:15 +00:00
|
|
|
|
2019-03-21 12:19:00 +00:00
|
|
|
message += self.__seasons[condition['at']] + " "
|
|
|
|
message += self.__conditions[int(condition['value'])] + " "
|
|
|
|
message += self.__fluctuations[self.getDailyFluctuation(day)] + " wahania_dobowe "
|
|
|
|
|
|
|
|
return {
|
|
|
|
"message": message + "_",
|
|
|
|
"source": "gis_meteo",
|
|
|
|
}
|