fits_parse.py 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631
  1. import re
  2. import csv
  3. import logging
  4. import math
  5. import glob
  6. # import argparse
  7. # import numpy as np
  8. import os
  9. import pandas as pd
  10. import time
  11. import datetime
  12. import drms
  13. import urllib
  14. # import json
  15. import matplotlib.pyplot as plt
  16. import matplotlib.patches as patches
  17. import astropy.units as u
  18. import telegram_handler
  19. # import warnings
  20. import sunpy.wcs
  21. import sunpy.map
  22. import pickle
  23. import telepot
  24. from colorlog import ColoredFormatter
  25. from astropy.coordinates import SkyCoord
  26. # from astropy.io import fits
  27. # from astropy.time import Time
  28. # from datetime import timedelta
  29. # from sunpy.coordinates import frames
  30. # from astropy.coordinates import SkyCoord
  31. from tg_tqdm import tg_tqdm
  32. # from tqdm import tqdm
  33. # warnings.filterwarnings("ignore")
  34. # define constants
  35. EMAIL = 'iknyazeva@gmail.com'
  36. # EMAIL = 'metya.tm@gmail.com'
  37. SAVE_PATH = 'dataset'
  38. tg_bot_token = '831964163:AAH7SoaoqWzWIcHaS3yfdmMu-H46hhtUaXw'
  39. tm_chat_id = 1147194
  40. ik_chat_id = 94616973
  41. sun_group_id = -321681009
  42. DATE_DELIMIT = '2010-06-28'
  43. TG_LOGGER = False
  44. FILE_DELETE = False
  45. LOGGER_LEVEL = logging.WARNING
  46. # LOGGER_LEVEL = logging.DEBUG
  47. VERBOSE = True
  48. PERIOD = 300
  49. START_DATE = '2018-01-01'
  50. CROP_DATE = '2017-11-01'
  51. SLEEP = 0.1
  52. PROGRESS = 10
  53. # logging.basicConfig(filename='futs_parse.log', level=logging.INFO)
  54. def set_logger(level=logging.WARNING, name='logger', telegram=False):
  55. """Return a logger with a default ColoredFormatter."""
  56. file_formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(name)s: %(funcName)s - %(message)s")
  57. stream_formatter = ColoredFormatter(
  58. "%(asctime)s [%(log_color)s%(levelname)-8s%(reset)s: %(funcName)s] %(white)s%(message)s",
  59. datefmt=None,
  60. reset=True,
  61. log_colors={
  62. 'DEBUG': 'cyan',
  63. 'INFO': 'green',
  64. 'WARNING': 'yellow',
  65. 'ERROR': 'red',
  66. 'CRITICAL': 'red',
  67. }
  68. )
  69. logger = logging.getLogger(name)
  70. stream_handler = logging.StreamHandler()
  71. stream_handler.setFormatter(stream_formatter)
  72. log_handler = logging.FileHandler("fits_parse.log")
  73. log_handler.setFormatter(file_formatter)
  74. logger.addHandler(stream_handler)
  75. logger.addHandler(log_handler)
  76. if telegram:
  77. tg_handler = telegram_handler.TelegramHandler(tg_bot_token, sun_group_id)
  78. tg_formatter = telegram_handler.HtmlFormatter()
  79. tg_handler.setFormatter(tg_formatter)
  80. logger.addHandler(tg_handler)
  81. logger.setLevel(level)
  82. return logger
  83. logger = set_logger(level=LOGGER_LEVEL, name='sun_logger', telegram=TG_LOGGER)
  84. def check_dataset_directory():
  85. if not os.path.exists('HMIdataset/fragments'):
  86. logger.warning('HMIdataset folders not exist, create them')
  87. os.makedirs('HMIdataset/fragments')
  88. if not os.path.exists('MDIdataset/fragments'):
  89. logger.warning('MDIdataset folders not exist, create them')
  90. os.makedirs('MDIdataset/fragments')
  91. return True
  92. def clean_folder(path):
  93. for file in os.listdir(path):
  94. file_path = os.path.join(path, file)
  95. if os.path.isfile(file_path):
  96. os.remove(file_path)
  97. return True
  98. def message_of_start(token=tg_bot_token, id=sun_group_id):
  99. bot = telepot.Bot(token)
  100. bot.sendMessage(id, 'Start parsing fits on remote server')
  101. def message_of_start_cropping(token=tg_bot_token, id=sun_group_id):
  102. bot = telepot.Bot(token)
  103. bot.sendMessage(id, '-' * 30)
  104. bot.sendMessage(id, 'Start cropping regions')
  105. bot.sendMessage(id, '-' * 30)
  106. def hook_for_download_fits(t):
  107. """Wraps tqdm instance.
  108. Don't forget to close() or __exit__()
  109. the tqdm instance once you're done with it (easiest using `with` syntax).
  110. Example
  111. -------
  112. >>> with tqdm(...) as t:
  113. ... reporthook = my_hook(t)
  114. ... urllib.urlretrieve(..., reporthook=reporthook)
  115. """
  116. last_b = [0]
  117. def update_to(b=1, bsize=1, tsize=None):
  118. """
  119. b : int, optional
  120. Number of blocks transferred so far [default: 1].
  121. bsize : int, optional
  122. Size of each block (in tqdm units) [default: 1].
  123. tsize : int, optional
  124. Total size (in tqdm units). If [default: None] remains unchanged.
  125. """
  126. if tsize is not None:
  127. t.total = tsize
  128. t.update((b - last_b[0]) * bsize)
  129. last_b[0] = b
  130. return update_to
  131. def request_mfits_by_date_MDI(moment, email=EMAIL, path_to_save='MDIdataset', verbose=False):
  132. """
  133. Function for request fits from JSOC database
  134. moment: pd.datetime object
  135. return: filepath to the magnetogram
  136. """
  137. filename = 'mdi.fd_m_96m_lev182.' + moment.strftime('%Y%m%d_%H%M%S_TAI.data.fits')
  138. filepath = os.path.join(path_to_save, filename)
  139. if os.path.exists(filepath):
  140. pass
  141. else:
  142. c = drms.Client(email=email, verbose=verbose)
  143. str_for_query = 'mdi.fd_M_96m_lev182' + moment.strftime('[%Y.%m.%d_%H:%M:%S_TAI]')
  144. logger.info('Magnetogram: {} will be downloaded ... '.format(str_for_query))
  145. r = c.export(str_for_query, method='url', protocol='fits')
  146. logger.debug(r)
  147. try:
  148. r.wait()
  149. logger.info(r.request_url)
  150. except Exception as e:
  151. logger.warning('Can not wait anymore, skip this. Get Exception: {}'.format(e))
  152. try:
  153. logger.info("Download data and save to path {}".format(filepath))
  154. r.download(path_to_save, verbose=verbose)
  155. except Exception as e:
  156. logger.error('Get error while trying download: {}'.format(e))
  157. logger.warning('Skip this date')
  158. return filepath
  159. def request_batch_mfits_by_date(moment,
  160. period_of_days=30, email=EMAIL,
  161. path_to_save='dataset',
  162. verbose=False,
  163. type_mag='MDI',
  164. token=tg_bot_token,
  165. chat_id=sun_group_id):
  166. '''Request batch fits for a period of days and return:
  167. request url
  168. period of days that was apply
  169. first date of butch
  170. last date of batch
  171. '''
  172. c = drms.Client(email=email, verbose=verbose)
  173. def set_str_for_query(period_of_days=period_of_days):
  174. if type_mag == 'MDI':
  175. str_for_query = 'mdi.fd_M_96m_lev182' + moment.strftime('[%Y.%m.%d_%H:%M:%S_TAI/{}d@24h]'.format(period_of_days))
  176. filename_to_check = 'mdi.fd_m_96m_lev182.' + moment.strftime('%Y%m%d_%H%M%S_TAI.data.fits')
  177. path_to_save = 'MDIdataset'
  178. if type_mag == 'HMI':
  179. str_for_query = 'hmi.m_720s' + moment.strftime('[%Y.%m.%d_%H:%M:%S_TAI/{}d@24h]'.format(period_of_days))
  180. path_to_save = 'HMIdataset'
  181. filename_to_check = 'hmi.m_720s.' + moment.strftime('%Y%m%d_%H%M%S_TAI.magnetogram.fits')
  182. return str_for_query, path_to_save, filename_to_check
  183. str_for_query, path_to_save, filename_to_check = set_str_for_query()
  184. logger.debug('{}\n{}\n{}'.format(str_for_query, path_to_save, filename_to_check))
  185. if os.path.exists(os.path.join(path_to_save, filename_to_check)):
  186. period_of_days = 10
  187. logger.info('Files already exists. Skip downloads this batch size of {}'.format(period_of_days))
  188. return None, period_of_days, moment, moment + datetime.timedelta(days=period_of_days), period_of_days
  189. logger.info('Magnetogram: {} will be downloaded ... '.format(str_for_query))
  190. r = c.export(str_for_query, protocol='fits')
  191. logger.debug(r)
  192. logger.debug(r.has_failed())
  193. treshold = round(math.log(period_of_days) ** 2 / 2)
  194. while r.has_failed():
  195. period_of_days -= round(treshold)
  196. if period_of_days < round(treshold / 2):
  197. logger.warning('Period of days is too small, skip this request to 10 days')
  198. logger.warning('Export request was {}: '.format(str_for_query))
  199. period_of_days = 10
  200. return None, period_of_days, moment, moment + datetime.timedelta(days=period_of_days), period_of_days
  201. time.sleep(1)
  202. logger.info('Export request has failed. Reduce number of days in it on {}. Now days in request {}'.format(int(treshold), period_of_days))
  203. str_for_query, _, _ = set_str_for_query(period_of_days=period_of_days)
  204. logger.debug('Request string: {}'.format(str_for_query))
  205. r = c.export(str_for_query, protocol='fits')
  206. logger.debug(r)
  207. logger.debug(len(r.data))
  208. try:
  209. r.wait(sleep=10, retries_notfound=10)
  210. except Exception as e:
  211. logger.error('Can not wait anymore, skip this. Get Exception: {}'.format(e))
  212. logger.info("Download data and save to path {}".format(path_to_save))
  213. first_date_batch = r.urls[0:]['record'].values[0].replace('[', ' ').split()[1].split('_')[0].replace('.', '-')
  214. last_date_batch = r.urls[-1:]['record'].values[0].replace('[', ' ').split()[1].split('_')[0].replace('.', '-')
  215. with tg_tqdm(r.urls.index, token=token, chat_id=chat_id, desc='DOWNLOAD BATCH',
  216. postfix='start_date = {}, end_date = {}'.format(first_date_batch, last_date_batch)) as batch_d:
  217. for ind in batch_d:
  218. try:
  219. # file_name = '.'.join(r.urls.filename[ind].split('.')[:3] + r.urls.filename[ind].split('.')[4:])
  220. urllib.request.urlretrieve(r.urls.url[ind], os.path.join(path_to_save, r.urls.filename[ind]))
  221. except Exception as e:
  222. logger.error('Get error while trying download {}: {}'.format(r.urls.url[ind], repr(e)))
  223. logger.warning('Skip this file')
  224. len_batch = len(r.urls)
  225. return r.request_url, period_of_days, first_date_batch, last_date_batch, len_batch
  226. def request_mfits_by_date_HMI(moment, email=EMAIL, path_to_save='HMIdataset', verbose=False):
  227. """
  228. Function for request fits from JSOC database
  229. moment: pd.datetime object
  230. return: filepath to the magnetogram
  231. """
  232. filename = 'hmi.m_720s.' + moment.strftime('%Y%m%d_%H%M%S_TAI.magnetogram.fits')
  233. filepath = os.path.join(path_to_save, filename)
  234. if os.path.exists(filepath):
  235. pass
  236. else:
  237. c = drms.Client(email=email, verbose=verbose)
  238. str_for_query = 'hmi.m_720s' + moment.strftime('[%Y.%m.%d_%H:%M:%S_TAI]{magnetogram}')
  239. logger.info('Magnetogram: {} will be downloaded ... '.format(str_for_query))
  240. r = c.export(str_for_query, method='url', protocol='fits')
  241. logger.debug(r)
  242. try:
  243. r.wait()
  244. logger.info(r.request_url)
  245. except Exception as e:
  246. logger.warning('Can not wait anymore, skip this. Get Exception: {}'.format(e))
  247. try:
  248. logger.info("Download data and save to path {}".format(filepath))
  249. r.download(path_to_save, verbose=verbose)
  250. except Exception as e:
  251. logger.error('Get error while trying download: {}'.format(e))
  252. logger.warning('Skip this date')
  253. return filepath
  254. def read_fits_to_map(filepath, plot_show=False):
  255. """
  256. read fits to sunpy object and plot in logariphmic scale
  257. return
  258. mymap: sunpy object
  259. """
  260. mymap = sunpy.map.Map(filepath)
  261. if plot_show:
  262. plt.figure(figsize=(12, 12))
  263. # data = np.sign(mymap.data)*np.log1p(np.abs(mymap.data))
  264. data = mymap.data
  265. plt.imshow(data, cmap='gray')
  266. return mymap
  267. def region_coord_list(datestr, sunspots_df, limit_deg=45):
  268. """
  269. Function for working with sunspot_1996_2017.pkl dataframe,
  270. return list of tuples: (datestr, NOAA number, location)
  271. used in cropping
  272. args:
  273. datestr: string for date in the format used in dataframe '2001-04-30'
  274. sunspots_df: dataframe from file sunspot_1996_2017.pkl
  275. return: list of tuples
  276. """
  277. date_df = sunspots_df.loc[datestr]
  278. date_df.index = date_df.index.droplevel()
  279. rc_list = []
  280. for index, row in date_df.iterrows():
  281. try:
  282. restriction_degree = (abs(float(row.location[1:3]) <= limit_deg)) and (abs(float(row.location[4:])) <= limit_deg)
  283. if restriction_degree:
  284. rc_list.append((pd.to_datetime(datestr, format='%Y-%m-%d'), index, row.location))
  285. except ValueError as e:
  286. if TG_LOGGER:
  287. time.sleep(SLEEP)
  288. logger.warning('Some error with read location {} in degree for date {}: {}'.format(row.location, datestr, e))
  289. except Exception as e:
  290. if TG_LOGGER:
  291. time.sleep(SLEEP)
  292. logger.error('Some error with read location {} in degree for date {}: {}'.format(row.location, datestr, e))
  293. return rc_list
  294. def return_pixel_from_map(mag_map, record, limit_deg=45):
  295. '''
  296. convert lon lat coordinate to coordinate in pixel in sun map and return it
  297. '''
  298. pattern = re.compile("[NS]\d{2}[EW]\d{2}")
  299. assert bool(pattern.match(record)), 'Pattern should be in the same format as N20E18'
  300. assert (abs(float(record[1:3]) <= limit_deg)) and (abs(float(record[4:])) <= limit_deg), 'Consider only regions between -{}, +{} degree'.format(limit_deg)
  301. if record[0] == 'N':
  302. lat = float(record[1:3])
  303. else:
  304. lat = -float(record[1:3])
  305. if record[3] == 'W':
  306. lon = float(record[4:])
  307. else:
  308. lon = -float(record[4:])
  309. hpc_coord = sunpy.wcs.convert_hg_hpc(lon, lat, b0_deg=mag_map.meta['crlt_obs'])
  310. coord = SkyCoord(hpc_coord[0] * u.arcsec, hpc_coord[1] * u.arcsec, frame=mag_map.coordinate_frame)
  311. # pixel_pos = mag_map.world_to_pixel(coord)
  312. pixel_pos = mag_map.world_to_pixel(coord) * u.pixel
  313. # pixel_pos = pixel_pos.to_value()
  314. return pixel_pos
  315. def crop_regions(mag_map, rc_list, type_mag, delta=100, plot_rec=False, plot_crop=False, limit_deg=45):
  316. '''
  317. Crop region by size delta and save it to disk,
  318. if plot_rec, plot rectangle of regions on disk,
  319. if plot_crop, plot only crop regions
  320. '''
  321. # data = np.sign(mag_map.data)*np.log1p(np.abs(mag_map.data))
  322. data = mag_map.data
  323. if type_mag == 'MDI':
  324. delta = 100
  325. if type_mag == 'HMI':
  326. delta = 200
  327. if plot_rec:
  328. fig, ax = plt.subplots(1, figsize=(12, 12))
  329. ax.matshow(data)
  330. plt.gray()
  331. ax.set_title('{} magnetogram at '.format(type_mag) + rc_list[0][0].strftime('%Y-%m-%d %H:%M'))
  332. for record in rc_list:
  333. try:
  334. pxs = return_pixel_from_map(mag_map, record[2], limit_deg).to_value()
  335. except Exception as e:
  336. logger.error('Some error with get pixel coordinates from map: {}. Skip it'.format(e))
  337. continue
  338. rect = patches.Rectangle((pxs[0] - 1.25 * delta, pxs[1] - delta), 2.5 * delta, 2 * delta, linewidth=3, edgecolor='r', facecolor='none')
  339. ax.add_patch(rect)
  340. ax.annotate('{}.AR'.format(type_mag) + str(record[1]), xy=(pxs[0], pxs[1]), xytext=(pxs[0], pxs[1] - 50), color='yellow', fontsize='xx-large')
  341. plt.show()
  342. submaps = []
  343. for record in rc_list:
  344. filename = '{}.{}.AR{}.fits'.format(type_mag, record[0].strftime('%Y-%m-%d_%H%M%S'), record[1])
  345. filepath = os.path.join('{}dataset/fragments'.format(type_mag), filename)
  346. try:
  347. pxs = return_pixel_from_map(mag_map, record[2], limit_deg)
  348. except Exception as e:
  349. logger.error('Some error with get pixel coordinates from map: {}. Skip it'.format(e))
  350. continue
  351. bot_l = [pxs[0] - delta * 1.25 * u.pixel, pxs[1] - delta * u.pixel]
  352. top_r = [pxs[0] + delta * 1.25 * u.pixel, pxs[1] + delta * u.pixel]
  353. submap = mag_map.submap(bot_l * u.pixel, top_r * u.pixel)
  354. if plot_crop:
  355. submap.peek()
  356. try:
  357. submap.save(filepath)
  358. except Exception as e:
  359. if TG_LOGGER:
  360. time.sleep(SLEEP)
  361. logger.info('Could not save fits {} cause: {}. Skip it'.format(filename, e))
  362. submaps.append(submap)
  363. return submaps
  364. def date_compare(date):
  365. return date < datetime.datetime.fromtimestamp(time.mktime(time.strptime(DATE_DELIMIT, '%Y-%m-%d')))
  366. if __name__ == '__main__':
  367. check_dataset_directory()
  368. message_of_start()
  369. try:
  370. sunspots = pickle.load(urllib.request.urlopen('https://raw.githubusercontent.com/iknyazeva/FitsProcessing/master/sunspot_1996_2017.pkl'))
  371. logger.info('Load sunspot dataframe is successful!')
  372. except Exception as e:
  373. logger.error('Can not load sunspot dataframe, halt parsing! Get Exception: {}'.format(e))
  374. raise(e)
  375. requests_urls = []
  376. if START_DATE:
  377. try:
  378. start_moment = sunspots[(sunspots.index.get_level_values(0) > START_DATE)].index.get_level_values(0)[0]
  379. except IndexError as e:
  380. logger.info('Index out of bound. Possibly the table is ended: {}'.format(e))
  381. start_moment = START_DATE
  382. except Exception as e:
  383. logger.error('Some error then get start_moment for first iteration: {}'.format(e))
  384. else:
  385. start_moment = sunspots.index.get_level_values(0)[0]
  386. logger.debug(start_moment)
  387. count_of_days_left = len(sunspots[(sunspots.index.get_level_values(0) >= start_moment)].groupby(level=0))
  388. logger.debug(count_of_days_left)
  389. with tg_tqdm(sunspots[(sunspots.index.get_level_values(0) > start_moment)].groupby(level=0),
  390. token=tg_bot_token, chat_id=sun_group_id, desc='MAIN PROGRESS DOWNLOAD') as tgm:
  391. number_batch = 1
  392. while count_of_days_left > 0:
  393. tgm.set_postfix(batch=number_batch)
  394. if date_compare(start_moment):
  395. request_url,\
  396. period_of_days,\
  397. first_date_batch,\
  398. last_date_batch,\
  399. len_batch = request_batch_mfits_by_date(start_moment, period_of_days=PERIOD,
  400. email=EMAIL, type_mag='MDI', verbose=VERBOSE)
  401. else:
  402. request_url,\
  403. period_of_days,\
  404. first_date_batch,\
  405. last_date_batch,\
  406. len_batch = request_batch_mfits_by_date(start_moment, period_of_days=PERIOD,
  407. email=EMAIL, type_mag='HMI', verbose=VERBOSE)
  408. logger.debug('Returned period of days {}'.format(period_of_days))
  409. # requests_urls.append(request_url)
  410. try:
  411. start_moment = sunspots[(sunspots.index.get_level_values(0) > last_date_batch)].index.get_level_values(0)[0]
  412. except IndexError as e:
  413. logger.info('Index out of bound. Possibly the table is ended: {}'.format(e))
  414. except Exception as e:
  415. logger.error('Some error then get start_moment for next iteration: {}'.format(e))
  416. count_of_days_left = len(sunspots[(sunspots.index.get_level_values(0) >= start_moment)])
  417. number_batch += 1
  418. with open('requests_urls.csv', 'a', newline='') as file:
  419. csv.writer(file).writerow(request_url)
  420. tgm.update(len_batch)
  421. # with open('requests_urls.csv', 'w') as file:
  422. # csv.writer(file, delimiter='\n').writerow(requests_urls)
  423. message_of_start_cropping()
  424. if CROP_DATE:
  425. crop_df = sunspots[(sunspots.index.get_level_values(0) > CROP_DATE)]
  426. else:
  427. crop_df = sunspots
  428. with tg_tqdm(range(1), tg_bot_token, sun_group_id,
  429. total=len(crop_df.groupby(level=0)), desc='CROPPING PROGRESS') as tgt:
  430. def is_progress(acc, total, progress=PROGRESS, tqdm_instanse=tgt):
  431. if (acc % PROGRESS == 0):
  432. logger.debug('In if acc = {}'.format(acc))
  433. time.sleep(SLEEP)
  434. tgt.update(PROGRESS)
  435. elif (acc >= total):
  436. logger.debug('In if acc = {}'.format(acc))
  437. time.sleep(SLEEP)
  438. tgt.update(total % PROGRESS)
  439. return True
  440. acc = 0
  441. total = len(crop_df.groupby(level=0))
  442. logger.debug(total)
  443. for date, df in crop_df.groupby(level=0):
  444. rc_list = region_coord_list(str(date), df, limit_deg=45)
  445. if not rc_list:
  446. acc += 1
  447. time.sleep(SLEEP)
  448. is_progress(acc, total)
  449. logger.debug('rc_list is empty - {}, acc = {}'.format(rc_list, acc))
  450. continue
  451. if date_compare(date):
  452. filename = 'mdi.fd_m_96m_lev182.' + date.strftime('%Y%m%d_%H%M%S_TAI') + '*.fits'
  453. path = 'MDIdataset/'
  454. try:
  455. filepath = glob.glob(path + filename)[0]
  456. if TG_LOGGER:
  457. time.sleep(SLEEP)
  458. logger.debug('filepath: {}'.format(filepath))
  459. except IndexError as e:
  460. logger.info('File with this date {} is not exist'.format(str(date)))
  461. acc += 1
  462. is_progress(acc, total)
  463. continue
  464. except Exception as e:
  465. logger.error('Some error with glob:'.format(e))
  466. acc += 1
  467. is_progress(acc, total)
  468. continue
  469. type_mag = 'MDI'
  470. else:
  471. filename = 'hmi.m_720s.' + date.strftime('%Y%m%d_%H%M%S_TAI') + '*.fits'
  472. path = 'HMIdataset/'
  473. try:
  474. filepath = glob.glob(path + filename)[0]
  475. if TG_LOGGER:
  476. time.sleep(SLEEP)
  477. logger.debug('filepath: {}'.format(filepath))
  478. except IndexError as e:
  479. if TG_LOGGER:
  480. time.sleep(SLEEP)
  481. logger.info('File with this date {} is not exist'.format(str(date)))
  482. acc += 1
  483. is_progress(acc, total)
  484. continue
  485. except Exception as e:
  486. if TG_LOGGER:
  487. time.sleep(SLEEP)
  488. logger.error('Some error with glob:'.format(e))
  489. acc += 1
  490. is_progress(acc, total)
  491. continue
  492. type_mag = 'HMI'
  493. try:
  494. sun_map = read_fits_to_map(filepath, plot_show=False)
  495. crop_regions(sun_map, rc_list, plot_rec=False, plot_crop=False, type_mag=type_mag)
  496. except ValueError as e:
  497. if TG_LOGGER:
  498. time.sleep(SLEEP)
  499. logger.info('Get Exception while reading: {}'.format(e))
  500. logger.info('Doing active farther, skip it.')
  501. # acc += 1
  502. # continue
  503. except Exception as e:
  504. if TG_LOGGER:
  505. time.sleep(SLEEP)
  506. logger.error('Get Exception while reading: {}'.format(e))
  507. logger.warning('Doing active farther, skip it.')
  508. # acc += 1
  509. # continue
  510. # tgt.update()
  511. acc += 1
  512. logger.debug('acc = {}'.format(acc))
  513. is_progress(acc, total)
  514. if FILE_DELETE:
  515. clean_folder('MDIdataset')
  516. clean_folder('HMIdataset')