1
0

fits_parse.py 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631
  1. import re
  2. import csv
  3. import logging
  4. import math
  5. import glob
  6. # import argparse
  7. import numpy as np
  8. import os
  9. import pandas as pd
  10. import time
  11. import datetime
  12. import drms
  13. import urllib
  14. # import json
  15. import matplotlib.pyplot as plt
  16. import matplotlib.patches as patches
  17. import astropy.units as u
  18. import telegram_handler
  19. # import warnings
  20. import sunpy.wcs
  21. import sunpy.map
  22. import pickle
  23. import telepot
  24. from colorlog import ColoredFormatter
  25. from astropy.coordinates import SkyCoord
  26. # from astropy.io import fits
  27. # from astropy.time import Time
  28. # from datetime import timedelta
  29. # from sunpy.coordinates import frames
  30. # from astropy.coordinates import SkyCoord
  31. from tg_tqdm import tg_tqdm
  32. # from tqdm import tqdm
  33. # warnings.filterwarnings("ignore")
  34. # define constants
  35. EMAIL = 'emal@email.ru'
  36. SAVE_PATH = 'dataset'
  37. tg_bot_token = 'TOKEN'
  38. tm_chat_id = 1234
  39. ik_chat_id = 1234
  40. sun_group_id = -1234
  41. DATE_DELIMIT = '2010-06-28'
  42. TG_LOGGER = False
  43. FILE_DELETE = False
  44. LOGGER_LEVEL = logging.WARNING
  45. # LOGGER_LEVEL = logging.DEBUG
  46. VERBOSE = True
  47. PERIOD = 300
  48. START_DATE = '1996-04-01'
  49. CROP_DATE = '2017-11-01'
  50. SLEEP = 0.1
  51. PROGRESS = 10
  52. # logging.basicConfig(filename='futs_parse.log', level=logging.INFO)
  53. def set_logger(level=logging.WARNING, name='logger', telegram=False):
  54. """Return a logger with a default ColoredFormatter."""
  55. file_formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(name)s: %(funcName)s - %(message)s")
  56. stream_formatter = ColoredFormatter(
  57. "%(asctime)s [%(log_color)s%(levelname)-8s%(reset)s: %(funcName)s] %(white)s%(message)s",
  58. datefmt=None,
  59. reset=True,
  60. log_colors={
  61. 'DEBUG': 'cyan',
  62. 'INFO': 'green',
  63. 'WARNING': 'yellow',
  64. 'ERROR': 'red',
  65. 'CRITICAL': 'red',
  66. }
  67. )
  68. logger = logging.getLogger(name)
  69. stream_handler = logging.StreamHandler()
  70. stream_handler.setFormatter(stream_formatter)
  71. log_handler = logging.FileHandler("fits_parse.log")
  72. log_handler.setFormatter(file_formatter)
  73. logger.addHandler(stream_handler)
  74. logger.addHandler(log_handler)
  75. if telegram:
  76. tg_handler = telegram_handler.TelegramHandler(tg_bot_token, sun_group_id)
  77. tg_formatter = telegram_handler.HtmlFormatter()
  78. tg_handler.setFormatter(tg_formatter)
  79. logger.addHandler(tg_handler)
  80. logger.setLevel(level)
  81. return logger
  82. logger = set_logger(level=LOGGER_LEVEL, name='sun_logger', telegram=TG_LOGGER)
  83. def check_dataset_directory():
  84. if not os.path.exists('HMIdataset/fragments'):
  85. logger.warning('HMIdataset folders not exist, create them')
  86. os.makedirs('HMIdataset/fragments')
  87. if not os.path.exists('MDIdataset/fragments'):
  88. logger.warning('MDIdataset folders not exist, create them')
  89. os.makedirs('MDIdataset/fragments')
  90. return True
  91. def clean_folder(path):
  92. for file in os.listdir(path):
  93. file_path = os.path.join(path, file)
  94. if os.path.isfile(file_path):
  95. os.remove(file_path)
  96. return True
  97. def message_of_start(token=tg_bot_token, id=sun_group_id):
  98. bot = telepot.Bot(token)
  99. bot.sendMessage(id, 'Start parsing fits on remote server')
  100. def message_of_start_cropping(token=tg_bot_token, id=sun_group_id):
  101. bot = telepot.Bot(token)
  102. bot.sendMessage(id, '-' * 30)
  103. bot.sendMessage(id, 'Start cropping regions')
  104. bot.sendMessage(id, '-' * 30)
  105. def hook_for_download_fits(t):
  106. """Wraps tqdm instance.
  107. Don't forget to close() or __exit__()
  108. the tqdm instance once you're done with it (easiest using `with` syntax).
  109. Example
  110. -------
  111. >>> with tqdm(...) as t:
  112. ... reporthook = my_hook(t)
  113. ... urllib.urlretrieve(..., reporthook=reporthook)
  114. """
  115. last_b = [0]
  116. def update_to(b=1, bsize=1, tsize=None):
  117. """
  118. b : int, optional
  119. Number of blocks transferred so far [default: 1].
  120. bsize : int, optional
  121. Size of each block (in tqdm units) [default: 1].
  122. tsize : int, optional
  123. Total size (in tqdm units). If [default: None] remains unchanged.
  124. """
  125. if tsize is not None:
  126. t.total = tsize
  127. t.update((b - last_b[0]) * bsize)
  128. last_b[0] = b
  129. return update_to
  130. def request_mfits_by_date_MDI(moment, email=EMAIL, path_to_save='MDIdataset', verbose=False):
  131. """
  132. Function for request fits from JSOC database
  133. moment: pd.datetime object
  134. return: filepath to the magnetogram
  135. """
  136. filename = 'mdi.fd_m_96m_lev182.' + moment.strftime('%Y%m%d_%H%M%S_TAI.data.fits')
  137. filepath = os.path.join(path_to_save, filename)
  138. if os.path.exists(filepath):
  139. pass
  140. else:
  141. c = drms.Client(email=email, verbose=verbose)
  142. str_for_query = 'mdi.fd_M_96m_lev182' + moment.strftime('[%Y.%m.%d_%H:%M:%S_TAI]')
  143. logger.info('Magnetogram: {} will be downloaded ... '.format(str_for_query))
  144. r = c.export(str_for_query, method='url', protocol='fits')
  145. logger.debug(r)
  146. try:
  147. r.wait()
  148. logger.info(r.request_url)
  149. except Exception as e:
  150. logger.warning('Can not wait anymore, skip this. Get Exception: {}'.format(e))
  151. try:
  152. logger.info("Download data and save to path {}".format(filepath))
  153. r.download(path_to_save, verbose=verbose)
  154. except Exception as e:
  155. logger.error('Get error while trying download: {}'.format(e))
  156. logger.warning('Skip this date')
  157. return filepath
  158. def request_batch_mfits_by_date(moment,
  159. period_of_days=30, email=EMAIL,
  160. path_to_save='dataset',
  161. verbose=False,
  162. type_mag='MDI',
  163. token=tg_bot_token,
  164. chat_id=sun_group_id):
  165. '''Request batch fits for a period of days and return:
  166. request url
  167. period of days that was apply
  168. first date of batch
  169. last date of batch
  170. '''
  171. c = drms.Client(email=email, verbose=verbose)
  172. def set_str_for_query(period_of_days=period_of_days):
  173. if type_mag == 'MDI':
  174. str_for_query = 'mdi.fd_M_96m_lev182' + moment.strftime('[%Y.%m.%d_%H:%M:%S_TAI/{}d@24h]'.format(period_of_days))
  175. filename_to_check = 'mdi.fd_m_96m_lev182.' + moment.strftime('%Y%m%d_%H%M%S_TAI.data.fits')
  176. path_to_save = 'MDIdataset'
  177. if type_mag == 'HMI':
  178. str_for_query = 'hmi.m_720s' + moment.strftime('[%Y.%m.%d_%H:%M:%S_TAI/{}d@24h]'.format(period_of_days))
  179. path_to_save = 'HMIdataset'
  180. filename_to_check = 'hmi.m_720s.' + moment.strftime('%Y%m%d_%H%M%S_TAI.magnetogram.fits')
  181. return str_for_query, path_to_save, filename_to_check
  182. str_for_query, path_to_save, filename_to_check = set_str_for_query()
  183. logger.debug('{}\n{}\n{}'.format(str_for_query, path_to_save, filename_to_check))
  184. if os.path.exists(os.path.join(path_to_save, filename_to_check)):
  185. period_of_days = 10
  186. logger.info('Files already exists. Skip downloads this batch size of {}'.format(period_of_days))
  187. return None, period_of_days, moment, moment + datetime.timedelta(days=period_of_days), period_of_days
  188. logger.info('Magnetogram: {} will be downloaded ... '.format(str_for_query))
  189. r = c.export(str_for_query, protocol='fits')
  190. logger.debug(r)
  191. logger.debug(r.has_failed())
  192. treshold = round(math.log(period_of_days) ** 2 / 2)
  193. while r.has_failed():
  194. period_of_days -= round(treshold)
  195. if period_of_days < round(treshold / 2):
  196. logger.warning('Period of days is too small, skip this request to 10 days')
  197. logger.warning('Export request was {}: '.format(str_for_query))
  198. period_of_days = 10
  199. return None, period_of_days, moment, moment + datetime.timedelta(days=period_of_days), period_of_days
  200. time.sleep(1)
  201. logger.info('Export request has failed. Reduce number of days in it on {}. Now days in request {}'.format(int(treshold), period_of_days))
  202. str_for_query, _, _ = set_str_for_query(period_of_days=period_of_days)
  203. logger.debug('Request string: {}'.format(str_for_query))
  204. r = c.export(str_for_query, protocol='fits')
  205. logger.debug(r)
  206. logger.debug(len(r.data))
  207. try:
  208. r.wait(sleep=10, retries_notfound=10)
  209. except Exception as e:
  210. logger.error('Can not wait anymore, skip this. Get Exception: {}'.format(e))
  211. logger.info("Download data and save to path {}".format(path_to_save))
  212. first_date_batch = r.urls[0:]['record'].values[0].replace('[', ' ').split()[1].split('_')[0].replace('.', '-')
  213. last_date_batch = r.urls[-1:]['record'].values[0].replace('[', ' ').split()[1].split('_')[0].replace('.', '-')
  214. with tg_tqdm(r.urls.index, token=token, chat_id=chat_id, desc='DOWNLOAD BATCH',
  215. postfix='start_date = {}, end_date = {}'.format(first_date_batch, last_date_batch)) as batch_d:
  216. for ind in batch_d:
  217. try:
  218. # file_name = '.'.join(r.urls.filename[ind].split('.')[:3] + r.urls.filename[ind].split('.')[4:])
  219. urllib.request.urlretrieve(r.urls.url[ind], os.path.join(path_to_save, r.urls.filename[ind]))
  220. except Exception as e:
  221. logger.error('Get error while trying download {}: {}'.format(r.urls.url[ind], repr(e)))
  222. logger.warning('Skip this file')
  223. len_batch = len(r.urls)
  224. return r.request_url, period_of_days, first_date_batch, last_date_batch, len_batch
  225. def request_mfits_by_date_HMI(moment, email=EMAIL, path_to_save='HMIdataset', verbose=False):
  226. """
  227. Function for request fits from JSOC database
  228. moment: pd.datetime object
  229. return: filepath to the magnetogram
  230. """
  231. filename = 'hmi.m_720s.' + moment.strftime('%Y%m%d_%H%M%S_TAI.magnetogram.fits')
  232. filepath = os.path.join(path_to_save, filename)
  233. if os.path.exists(filepath):
  234. pass
  235. else:
  236. c = drms.Client(email=email, verbose=verbose)
  237. str_for_query = 'hmi.m_720s' + moment.strftime('[%Y.%m.%d_%H:%M:%S_TAI]{magnetogram}')
  238. logger.info('Magnetogram: {} will be downloaded ... '.format(str_for_query))
  239. r = c.export(str_for_query, method='url', protocol='fits')
  240. logger.debug(r)
  241. try:
  242. r.wait()
  243. logger.info(r.request_url)
  244. except Exception as e:
  245. logger.warning('Can not wait anymore, skip this. Get Exception: {}'.format(e))
  246. try:
  247. logger.info("Download data and save to path {}".format(filepath))
  248. r.download(path_to_save, verbose=verbose)
  249. except Exception as e:
  250. logger.error('Get error while trying download: {}'.format(e))
  251. logger.warning('Skip this date')
  252. return filepath
  253. def read_fits_to_map(filepath, plot_show=False, ln=False):
  254. """
  255. read fits to sunpy object and plot in logariphmic scale
  256. return
  257. mymap: sunpy object
  258. """
  259. mymap = sunpy.map.Map(filepath)
  260. if plot_show:
  261. plt.figure(figsize=(12, 12))
  262. if ln:
  263. data = np.sign(mymap.data) * np.log1p(np.abs(mymap.data))
  264. data = mymap.data
  265. plt.imshow(data, cmap='gray')
  266. return mymap
  267. def region_coord_list(datestr, sunspots_df, limit_deg=45):
  268. """
  269. Function for working with sunspot_1996_2017.pkl dataframe,
  270. return list of tuples: (datestr, NOAA number, location)
  271. used in cropping
  272. args:
  273. datestr: string for date in the format used in dataframe '2001-04-30'
  274. sunspots_df: dataframe from file sunspot_1996_2017.pkl
  275. return: list of tuples
  276. """
  277. date_df = sunspots_df.loc[datestr]
  278. date_df.index = date_df.index.droplevel()
  279. rc_list = []
  280. for index, row in date_df.iterrows():
  281. try:
  282. restriction_degree = (abs(float(row.location[1:3]) <= limit_deg)) and (abs(float(row.location[4:])) <= limit_deg)
  283. if restriction_degree:
  284. rc_list.append((pd.to_datetime(datestr, format='%Y-%m-%d'), index, row.location))
  285. except ValueError as e:
  286. if TG_LOGGER:
  287. time.sleep(SLEEP)
  288. logger.warning('Some error with read location {} in degree for date {}: {}'.format(row.location, datestr, e))
  289. except Exception as e:
  290. if TG_LOGGER:
  291. time.sleep(SLEEP)
  292. logger.error('Some error with read location {} in degree for date {}: {}'.format(row.location, datestr, e))
  293. return rc_list
  294. def return_pixel_from_map(mag_map, record, limit_deg=45):
  295. '''
  296. convert lon lat coordinate to coordinate in pixel in sun map and return it
  297. '''
  298. pattern = re.compile("[NS]\d{2}[EW]\d{2}")
  299. assert bool(pattern.match(record)), 'Pattern should be in the same format as N20E18'
  300. assert (abs(float(record[1:3]) <= limit_deg)) and (abs(float(record[4:])) <= limit_deg), 'Consider only regions between -{}, +{} degree'.format(limit_deg)
  301. if record[0] == 'N':
  302. lat = float(record[1:3])
  303. else:
  304. lat = -float(record[1:3])
  305. if record[3] == 'W':
  306. lon = float(record[4:])
  307. else:
  308. lon = -float(record[4:])
  309. hpc_coord = sunpy.wcs.convert_hg_hpc(lon, lat, b0_deg=mag_map.meta['crlt_obs'])
  310. coord = SkyCoord(hpc_coord[0] * u.arcsec, hpc_coord[1] * u.arcsec, frame=mag_map.coordinate_frame)
  311. # pixel_pos = mag_map.world_to_pixel(coord)
  312. pixel_pos = mag_map.world_to_pixel(coord) * u.pixel
  313. # pixel_pos = pixel_pos.to_value()
  314. return pixel_pos
  315. def crop_regions(mag_map, rc_list, type_mag, delta=100, plot_rec=False, plot_crop=False, limit_deg=45, ln=False):
  316. '''
  317. Crop region by size delta and save it to disk,
  318. if plot_rec, plot rectangle of regions on disk,
  319. if plot_crop, plot only crop regions
  320. '''
  321. if ln:
  322. data = np.sign(mag_map.data) * np.log1p(np.abs(mag_map.data))
  323. data = mag_map.data
  324. if type_mag == 'MDI':
  325. delta = 100
  326. if type_mag == 'HMI':
  327. delta = 200
  328. if plot_rec:
  329. fig, ax = plt.subplots(1, figsize=(12, 12))
  330. ax.matshow(data)
  331. plt.gray()
  332. ax.set_title('{} magnetogram at '.format(type_mag) + rc_list[0][0].strftime('%Y-%m-%d %H:%M'))
  333. for record in rc_list:
  334. try:
  335. pxs = return_pixel_from_map(mag_map, record[2], limit_deg).to_value()
  336. except Exception as e:
  337. logger.error('Some error with get pixel coordinates from map: {}. Skip it'.format(e))
  338. continue
  339. rect = patches.Rectangle((pxs[0] - 1.25 * delta, pxs[1] - delta), 2.5 * delta, 2 * delta, linewidth=3, edgecolor='r', facecolor='none')
  340. ax.add_patch(rect)
  341. ax.annotate('{}.AR'.format(type_mag) + str(record[1]), xy=(pxs[0], pxs[1]), xytext=(pxs[0], pxs[1] - 50), color='yellow', fontsize='xx-large')
  342. plt.show()
  343. submaps = []
  344. for record in rc_list:
  345. filename = '{}.{}.AR{}.fits'.format(type_mag, record[0].strftime('%Y-%m-%d_%H%M%S'), record[1])
  346. filepath = os.path.join('{}dataset/fragments'.format(type_mag), filename)
  347. try:
  348. pxs = return_pixel_from_map(mag_map, record[2], limit_deg)
  349. except Exception as e:
  350. logger.error('Some error with get pixel coordinates from map: {}. Skip it'.format(e))
  351. continue
  352. bot_l = [pxs[0] - delta * 1.25 * u.pixel, pxs[1] - delta * u.pixel]
  353. top_r = [pxs[0] + delta * 1.25 * u.pixel, pxs[1] + delta * u.pixel]
  354. submap = mag_map.submap(bot_l * u.pixel, top_r * u.pixel)
  355. if plot_crop:
  356. submap.peek()
  357. try:
  358. submap.save(filepath)
  359. except Exception as e:
  360. if TG_LOGGER:
  361. time.sleep(SLEEP)
  362. logger.info('Could not save fits {} cause: {}. Skip it'.format(filename, e))
  363. submaps.append(submap)
  364. return submaps
  365. def date_compare(date):
  366. return date < datetime.datetime.fromtimestamp(time.mktime(time.strptime(DATE_DELIMIT, '%Y-%m-%d')))
  367. if __name__ == '__main__':
  368. check_dataset_directory()
  369. message_of_start()
  370. try:
  371. sunspots = pickle.load(urllib.request.urlopen('https://raw.githubusercontent.com/iknyazeva/FitsProcessing/master/sunspot_1996_2017.pkl'))
  372. logger.info('Load sunspot dataframe is successful!')
  373. except Exception as e:
  374. logger.error('Can not load sunspot dataframe, halt parsing! Get Exception: {}'.format(e))
  375. raise(e)
  376. requests_urls = []
  377. if START_DATE:
  378. try:
  379. start_moment = sunspots[(sunspots.index.get_level_values(0) > START_DATE)].index.get_level_values(0)[0]
  380. except IndexError as e:
  381. logger.info('Index out of bound. Possibly the table is ended: {}'.format(e))
  382. start_moment = START_DATE
  383. except Exception as e:
  384. logger.error('Some error then get start_moment for first iteration: {}'.format(e))
  385. else:
  386. start_moment = sunspots.index.get_level_values(0)[0]
  387. logger.debug(start_moment)
  388. count_of_days_left = len(sunspots[(sunspots.index.get_level_values(0) >= start_moment)].groupby(level=0))
  389. logger.debug(count_of_days_left)
  390. with tg_tqdm(sunspots[(sunspots.index.get_level_values(0) > start_moment)].groupby(level=0),
  391. token=tg_bot_token, chat_id=sun_group_id, desc='MAIN PROGRESS DOWNLOAD') as tgm:
  392. number_batch = 1
  393. while count_of_days_left > 0:
  394. tgm.set_postfix(batch=number_batch)
  395. if date_compare(start_moment):
  396. request_url,\
  397. period_of_days,\
  398. first_date_batch,\
  399. last_date_batch,\
  400. len_batch = request_batch_mfits_by_date(start_moment, period_of_days=PERIOD,
  401. email=EMAIL, type_mag='MDI', verbose=VERBOSE)
  402. else:
  403. request_url,\
  404. period_of_days,\
  405. first_date_batch,\
  406. last_date_batch,\
  407. len_batch = request_batch_mfits_by_date(start_moment, period_of_days=PERIOD,
  408. email=EMAIL, type_mag='HMI', verbose=VERBOSE)
  409. logger.debug('Returned period of days {}'.format(period_of_days))
  410. # requests_urls.append(request_url)
  411. try:
  412. start_moment = sunspots[(sunspots.index.get_level_values(0) > last_date_batch)].index.get_level_values(0)[0]
  413. except IndexError as e:
  414. logger.info('Index out of bound. Possibly the table is ended: {}'.format(e))
  415. except Exception as e:
  416. logger.error('Some error then get start_moment for next iteration: {}'.format(e))
  417. count_of_days_left = len(sunspots[(sunspots.index.get_level_values(0) >= start_moment)])
  418. number_batch += 1
  419. with open('requests_urls.csv', 'a', newline='') as file:
  420. csv.writer(file).writerow(request_url)
  421. tgm.update(len_batch)
  422. # with open('requests_urls.csv', 'w') as file:
  423. # csv.writer(file, delimiter='\n').writerow(requests_urls)
  424. message_of_start_cropping()
  425. if CROP_DATE:
  426. crop_df = sunspots[(sunspots.index.get_level_values(0) > CROP_DATE)]
  427. else:
  428. crop_df = sunspots
  429. with tg_tqdm(range(1), tg_bot_token, sun_group_id,
  430. total=len(crop_df.groupby(level=0)), desc='CROPPING PROGRESS') as tgt:
  431. def is_progress(acc, total, progress=PROGRESS, tqdm_instanse=tgt):
  432. if (acc % PROGRESS == 0):
  433. logger.debug('In if acc = {}'.format(acc))
  434. time.sleep(SLEEP)
  435. tgt.update(PROGRESS)
  436. elif (acc >= total):
  437. logger.debug('In if acc = {}'.format(acc))
  438. time.sleep(SLEEP)
  439. tgt.update(total % PROGRESS)
  440. return True
  441. acc = 0
  442. total = len(crop_df.groupby(level=0))
  443. logger.debug(total)
  444. for date, df in crop_df.groupby(level=0):
  445. rc_list = region_coord_list(str(date), df, limit_deg=45)
  446. if not rc_list:
  447. acc += 1
  448. time.sleep(SLEEP)
  449. is_progress(acc, total)
  450. logger.debug('rc_list is empty - {}, acc = {}'.format(rc_list, acc))
  451. continue
  452. if date_compare(date):
  453. filename = 'mdi.fd_m_96m_lev182.' + date.strftime('%Y%m%d_%H%M%S_TAI') + '*.fits'
  454. path = 'MDIdataset/'
  455. try:
  456. filepath = glob.glob(path + filename)[0]
  457. if TG_LOGGER:
  458. time.sleep(SLEEP)
  459. logger.debug('filepath: {}'.format(filepath))
  460. except IndexError as e:
  461. logger.info('File with this date {} is not exist'.format(str(date)))
  462. acc += 1
  463. is_progress(acc, total)
  464. continue
  465. except Exception as e:
  466. logger.error('Some error with glob:'.format(e))
  467. acc += 1
  468. is_progress(acc, total)
  469. continue
  470. type_mag = 'MDI'
  471. else:
  472. filename = 'hmi.m_720s.' + date.strftime('%Y%m%d_%H%M%S_TAI') + '*.fits'
  473. path = 'HMIdataset/'
  474. try:
  475. filepath = glob.glob(path + filename)[0]
  476. if TG_LOGGER:
  477. time.sleep(SLEEP)
  478. logger.debug('filepath: {}'.format(filepath))
  479. except IndexError as e:
  480. if TG_LOGGER:
  481. time.sleep(SLEEP)
  482. logger.info('File with this date {} is not exist'.format(str(date)))
  483. acc += 1
  484. is_progress(acc, total)
  485. continue
  486. except Exception as e:
  487. if TG_LOGGER:
  488. time.sleep(SLEEP)
  489. logger.error('Some error with glob:'.format(e))
  490. acc += 1
  491. is_progress(acc, total)
  492. continue
  493. type_mag = 'HMI'
  494. try:
  495. sun_map = read_fits_to_map(filepath, plot_show=False)
  496. crop_regions(sun_map, rc_list, plot_rec=False, plot_crop=False, type_mag=type_mag)
  497. except ValueError as e:
  498. if TG_LOGGER:
  499. time.sleep(SLEEP)
  500. logger.info('Get Exception while reading: {}'.format(e))
  501. logger.info('Doing active farther, skip it.')
  502. # acc += 1
  503. # continue
  504. except Exception as e:
  505. if TG_LOGGER:
  506. time.sleep(SLEEP)
  507. logger.error('Get Exception while reading: {}'.format(e))
  508. logger.warning('Doing active farther, skip it.')
  509. # acc += 1
  510. # continue
  511. # tgt.update()
  512. acc += 1
  513. logger.debug('acc = {}'.format(acc))
  514. is_progress(acc, total)
  515. if FILE_DELETE:
  516. clean_folder('MDIdataset')
  517. clean_folder('HMIdataset')