Correct and simplify URLError exception handling
[weather.git] / weather.py
1 """Contains various object definitions needed by the weather utility."""
2
3 weather_copyright = """\
4 # Copyright (c) 2006-2020 Jeremy Stanley <fungi@yuggoth.org>. Permission to
5 # use, copy, modify, and distribute this software is granted under terms
6 # provided in the LICENSE file distributed with this software.
7 #"""
8
9 weather_version = "2.4.1"
10
11 radian_to_km = 6372.795484
12 radian_to_mi = 3959.871528
13
14 def pyversion(ref=None):
15     """Determine the Python version and optionally compare to a reference."""
16     import platform
17     ver = platform.python_version()
18     if ref:
19         return [
20             int(x) for x in ver.split(".")[:2]
21         ] >= [
22             int(x) for x in ref.split(".")[:2]
23         ]
24     else: return ver
25
26 class Selections:
27     """An object to contain selection data."""
28     def __init__(self):
29         """Store the config, options and arguments."""
30         self.config = get_config()
31         self.options, self.arguments = get_options(self.config)
32         if self.get_bool("cache") and self.get_bool("cache_search") \
33             and not self.get_bool("longlist"):
34             integrate_search_cache(
35                 self.config,
36                 self.get("cachedir"),
37                 self.get("setpath")
38             )
39         if not self.arguments:
40             if "id" in self.options.__dict__ \
41                 and self.options.__dict__["id"]:
42                 self.arguments.append( self.options.__dict__["id"] )
43                 del( self.options.__dict__["id"] )
44                 import sys
45                 message = "WARNING: the --id option is deprecated and will eventually be removed\n"
46                 sys.stderr.write(message)
47             elif "city" in self.options.__dict__ \
48                 and self.options.__dict__["city"] \
49                 and "st" in self.options.__dict__ \
50                 and self.options.__dict__["st"]:
51                 self.arguments.append(
52                     "^%s city, %s" % (
53                         self.options.__dict__["city"],
54                         self.options.__dict__["st"]
55                     )
56                 )
57                 del( self.options.__dict__["city"] )
58                 del( self.options.__dict__["st"] )
59                 import sys
60                 message = "WARNING: the --city/--st options are deprecated and will eventually be removed\n"
61                 sys.stderr.write(message)
62     def get(self, option, argument=None):
63         """Retrieve data from the config or options."""
64         if argument:
65             if self.config.has_section(argument) and (
66                 self.config.has_option(argument, "city") \
67                     or self.config.has_option(argument, "id") \
68                     or self.config.has_option(argument, "st")
69             ):
70                 self.config.remove_section(argument)
71                 import sys
72                 message = "WARNING: the city/id/st options are now unsupported in aliases\n"
73                 sys.stderr.write(message)
74             if not self.config.has_section(argument):
75                 guessed = guess(
76                     argument,
77                     path=self.get("setpath"),
78                     info=self.get("info"),
79                     cache_search=(
80                         self.get("cache") and self.get("cache_search")
81                     ),
82                     cachedir=self.get("cachedir"),
83                     quiet=self.get_bool("quiet")
84                 )
85                 self.config.add_section(argument)
86                 for item in guessed.items():
87                     self.config.set(argument, *item)
88             if self.config.has_option(argument, option):
89                 return self.config.get(argument, option)
90         if option in self.options.__dict__:
91             return self.options.__dict__[option]
92         import sys
93         message = "WARNING: no URI defined for %s\n" % option
94         sys.stderr.write(message)
95         return None
96     def get_bool(self, option, argument=None):
97         """Get data and coerce to a boolean if necessary."""
98         return bool(self.get(option, argument))
99     def getint(self, option, argument=None):
100         """Get data and coerce to an integer if necessary."""
101         value = self.get(option, argument)
102         if value: return int(value)
103         else: return 0
104
105 def average(coords):
106     """Average a list of coordinates."""
107     x = 0
108     y = 0
109     for coord in coords:
110         x += coord[0]
111         y += coord[1]
112     count = len(coords)
113     return (x/count, y/count)
114
115 def filter_units(line, units="imperial"):
116     """Filter or convert units in a line of text between US/UK and metric."""
117     import re
118     # filter lines with both pressures in the form of "X inches (Y hPa)" or
119     # "X in. Hg (Y hPa)"
120     dual_p = re.match(
121         "(.* )(\d*(\.\d+)? (inches|in\. Hg)) \((\d*(\.\d+)? hPa)\)(.*)",
122         line
123     )
124     if dual_p:
125         preamble, in_hg, i_fr, i_un, hpa, h_fr, trailer = dual_p.groups()
126         if units == "imperial": line = preamble + in_hg + trailer
127         elif units == "metric": line = preamble + hpa + trailer
128     # filter lines with both temperatures in the form of "X F (Y C)"
129     dual_t = re.match(
130         "(.* )(-?\d*(\.\d+)? F) \((-?\d*(\.\d+)? C)\)(.*)",
131         line
132     )
133     if dual_t:
134         preamble, fahrenheit, f_fr, celsius, c_fr, trailer = dual_t.groups()
135         if units == "imperial": line = preamble + fahrenheit + trailer
136         elif units == "metric": line = preamble + celsius + trailer
137     # if metric is desired, convert distances in the form of "X mile(s)" to
138     # "Y kilometer(s)"
139     if units == "metric":
140         imperial_d = re.match(
141             "(.* )(\d+)( mile\(s\))(.*)",
142             line
143         )
144         if imperial_d:
145             preamble, mi, m_u, trailer = imperial_d.groups()
146             line = preamble + str(int(round(int(mi)*1.609344))) \
147                 + " kilometer(s)" + trailer
148     # filter speeds in the form of "X MPH (Y KT)" to just "X MPH"; if metric is
149     # desired, convert to "Z KPH"
150     imperial_s = re.match(
151         "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
152         line
153     )
154     if imperial_s:
155         preamble, mph, m_u, kt, trailer = imperial_s.groups()
156         if units == "imperial": line = preamble + mph + m_u + trailer
157         elif units == "metric": 
158             line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
159                 trailer
160     imperial_s = re.match(
161         "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
162         line
163     )
164     if imperial_s:
165         preamble, mph, m_u, kt, trailer = imperial_s.groups()
166         if units == "imperial": line = preamble + mph + m_u + trailer
167         elif units == "metric": 
168             line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
169                 trailer
170     # if imperial is desired, qualify given forcast temperatures like "X F"; if
171     # metric is desired, convert to "Y C"
172     imperial_t = re.match(
173         "(.* )(High |high |Low |low )(\d+)(\.|,)(.*)",
174         line
175     )
176     if imperial_t:
177         preamble, parameter, fahrenheit, sep, trailer = imperial_t.groups()
178         if units == "imperial":
179             line = preamble + parameter + fahrenheit + " F" + sep + trailer
180         elif units == "metric":
181             line = preamble + parameter \
182                 + str(int(round((int(fahrenheit)-32)*5/9))) + " C" + sep \
183                 + trailer
184     # hand off the resulting line
185     return line
186
187 def get_uri(
188     uri,
189     ignore_fail=False,
190     cache_data=False,
191     cacheage=900,
192     cachedir="."
193 ):
194     """Return a string containing the results of a URI GET."""
195     if pyversion("3"):
196         import urllib, urllib.error, urllib.request
197         URLError = urllib.error.URLError
198         urlopen = urllib.request.urlopen
199     else:
200         import urllib2 as urllib
201         URLError = urllib.URLError
202         urlopen = urllib.urlopen
203     import os, time
204     if cache_data:
205         dcachedir = os.path.join( os.path.expanduser(cachedir), "datacache" )
206         if not os.path.exists(dcachedir):
207             try: os.makedirs(dcachedir)
208             except (IOError, OSError): pass
209         dcache_fn = os.path.join(
210             dcachedir,
211             uri.split(":",1)[1].replace("/","_")
212         )
213     now = time.time()
214     if cache_data and os.access(dcache_fn, os.R_OK) \
215         and now-cacheage < os.stat(dcache_fn).st_mtime <= now:
216         dcache_fd = open(dcache_fn)
217         data = dcache_fd.read()
218         dcache_fd.close()
219     else:
220         try:
221             data = urlopen(uri).read().decode("utf-8")
222         except URLError:
223             if ignore_fail: return ""
224             import os, sys
225             sys.stderr.write("%s error: failed to retrieve\n   %s\n\n" % (
226                 os.path.basename( sys.argv[0] ), uri))
227             raise
228         # Some data sources are HTML with the plain text wrapped in pre tags
229         if "<pre>" in data:
230             data = data[data.find("<pre>")+5:data.find("</pre>")]
231         if cache_data:
232             try:
233                 import codecs
234                 dcache_fd = codecs.open(dcache_fn, "w", "utf-8")
235                 dcache_fd.write(data)
236                 dcache_fd.close()
237             except (IOError, OSError): pass
238     return data
239
240 def get_metar(
241     uri=None,
242     verbose=False,
243     quiet=False,
244     headers=None,
245     imperial=False,
246     metric=False,
247     cache_data=False,
248     cacheage=900,
249     cachedir="."
250 ):
251     """Return a summarized METAR for the specified station."""
252     if not uri:
253         import os, sys
254         message = "%s error: METAR URI required for conditions\n" % \
255             os.path.basename( sys.argv[0] )
256         sys.stderr.write(message)
257         sys.exit(1)
258     metar = get_uri(
259         uri,
260         cache_data=cache_data,
261         cacheage=cacheage,
262         cachedir=cachedir
263     )
264     if pyversion("3") and type(metar) is bytes: metar = metar.decode("utf-8")
265     if verbose: return metar
266     else:
267         import re
268         lines = metar.split("\n")
269         if not headers:
270             headers = \
271                 "relative_humidity," \
272                 + "precipitation_last_hour," \
273                 + "sky conditions," \
274                 + "temperature," \
275                 + "heat index," \
276                 + "windchill," \
277                 + "weather," \
278                 + "wind"
279         headerlist = headers.lower().replace("_"," ").split(",")
280         output = []
281         if not quiet:
282             title = "Current conditions at %s"
283             place = lines[0].split(", ")
284             if len(place) > 1:
285                 place = "%s, %s" % ( place[0].title(), place[1] )
286             else: place = "<UNKNOWN>"
287             output.append(title%place)
288             output.append("Last updated " + lines[1])
289         header_match = False
290         for header in headerlist:
291             for line in lines:
292                 if line.lower().startswith(header + ":"):
293                     if re.match(r".*:\d+$", line): line = line[:line.rfind(":")]
294                     if imperial: line = filter_units(line, units="imperial")
295                     elif metric: line = filter_units(line, units="metric")
296                     if quiet: output.append(line)
297                     else: output.append("   " + line)
298                     header_match = True
299         if not header_match:
300             output.append(
301                 "(no conditions matched your header list, try with --verbose)"
302             )
303         return "\n".join(output)
304
305 def get_alert(
306     uri=None,
307     verbose=False,
308     quiet=False,
309     cache_data=False,
310     cacheage=900,
311     cachedir="."
312 ):
313     """Return alert notice for the specified URI."""
314     if not uri:
315         return ""
316     alert = get_uri(
317         uri,
318         ignore_fail=True,
319         cache_data=cache_data,
320         cacheage=cacheage,
321         cachedir=cachedir
322     ).strip()
323     if pyversion("3") and type(alert) is bytes: alert = alert.decode("utf-8")
324     if alert:
325         if verbose: return alert
326         else:
327             if alert.find("\nNATIONAL WEATHER SERVICE") == -1:
328                 muted = False
329             else:
330                 muted = True
331             lines = alert.split("\n")
332             import time
333             valid_time = time.strftime("%Y%m%d%H%M")
334             output = []
335             for line in lines:
336                 if line.startswith("Expires:") \
337                     and "Expires:" + valid_time > line:
338                     return ""
339                 if muted and line.startswith("NATIONAL WEATHER SERVICE"):
340                     muted = False
341                     line = ""
342                 elif line == "&&":
343                     line = ""
344                 elif line == "$$":
345                     muted = True
346                 if line and not muted:
347                     if quiet: output.append(line)
348                     else: output.append("   " + line)
349             return "\n".join(output)
350
351 def get_options(config):
352     """Parse the options passed on the command line."""
353
354     # for optparse's builtin -h/--help option
355     usage = \
356         "usage: %prog [options] [alias1|search1 [alias2|search2 [...]]]"
357
358     # for optparse's builtin --version option
359     verstring = "%prog " + weather_version
360
361     # create the parser
362     import optparse
363     option_parser = optparse.OptionParser(usage=usage, version=verstring)
364     # separate options object from list of arguments and return both
365
366     # the -a/--alert option
367     if config.has_option("default", "alert"):
368         default_alert = bool(config.get("default", "alert"))
369     else: default_alert = False
370     option_parser.add_option("-a", "--alert",
371         dest="alert",
372         action="store_true",
373         default=default_alert,
374         help="include local alert notices")
375
376     # the --atypes option
377     if config.has_option("default", "atypes"):
378         default_atypes = config.get("default", "atypes")
379     else:
380         default_atypes = \
381             "coastal_flood_statement," \
382             + "flash_flood_statement," \
383             + "flash_flood_warning," \
384             + "flash_flood_watch," \
385             + "flood_statement," \
386             + "flood_warning," \
387             + "severe_thunderstorm_warning," \
388             + "severe_weather_statement," \
389             + "special_weather_statement," \
390             + "urgent_weather_message"
391     option_parser.add_option("--atypes",
392         dest="atypes",
393         default=default_atypes,
394         help="list of alert notification types to display")
395
396     # the --build-sets option
397     option_parser.add_option("--build-sets",
398         dest="build_sets",
399         action="store_true",
400         default=False,
401         help="(re)build location correlation sets")
402
403     # the --cacheage option
404     if config.has_option("default", "cacheage"):
405         default_cacheage = config.getint("default", "cacheage")
406     else: default_cacheage = 900
407     option_parser.add_option("--cacheage",
408         dest="cacheage",
409         default=default_cacheage,
410         help="duration in seconds to refresh cached data")
411
412     # the --cachedir option
413     if config.has_option("default", "cachedir"):
414         default_cachedir = config.get("default", "cachedir")
415     else: default_cachedir = "~/.weather"
416     option_parser.add_option("--cachedir",
417         dest="cachedir",
418         default=default_cachedir,
419         help="directory for storing cached searches and data")
420
421     # the -f/--forecast option
422     if config.has_option("default", "forecast"):
423         default_forecast = bool(config.get("default", "forecast"))
424     else: default_forecast = False
425     option_parser.add_option("-f", "--forecast",
426         dest="forecast",
427         action="store_true",
428         default=default_forecast,
429         help="include a local forecast")
430
431     # the --headers option
432     if config.has_option("default", "headers"):
433         default_headers = config.get("default", "headers")
434     else:
435         default_headers = \
436             "temperature," \
437             + "relative_humidity," \
438             + "wind," \
439             + "heat_index," \
440             + "windchill," \
441             + "weather," \
442             + "sky_conditions," \
443             + "precipitation_last_hour"
444     option_parser.add_option("--headers",
445         dest="headers",
446         default=default_headers,
447         help="list of conditions headers to display")
448
449     # the --imperial option
450     if config.has_option("default", "imperial"):
451         default_imperial = bool(config.get("default", "imperial"))
452     else: default_imperial = False
453     option_parser.add_option("--imperial",
454         dest="imperial",
455         action="store_true",
456         default=default_imperial,
457         help="filter/convert conditions for US/UK units")
458
459     # the --info option
460     option_parser.add_option("--info",
461         dest="info",
462         action="store_true",
463         default=False,
464         help="output detailed information for your search")
465
466     # the -l/--list option
467     option_parser.add_option("-l", "--list",
468         dest="list",
469         action="store_true",
470         default=False,
471         help="list all configured aliases and cached searches")
472
473     # the --longlist option
474     option_parser.add_option("--longlist",
475         dest="longlist",
476         action="store_true",
477         default=False,
478         help="display details of all configured aliases")
479
480     # the -m/--metric option
481     if config.has_option("default", "metric"):
482         default_metric = bool(config.get("default", "metric"))
483     else: default_metric = False
484     option_parser.add_option("-m", "--metric",
485         dest="metric",
486         action="store_true",
487         default=default_metric,
488         help="filter/convert conditions for metric units")
489
490     # the -n/--no-conditions option
491     if config.has_option("default", "conditions"):
492         default_conditions = bool(config.get("default", "conditions"))
493     else: default_conditions = True
494     option_parser.add_option("-n", "--no-conditions",
495         dest="conditions",
496         action="store_false",
497         default=default_conditions,
498         help="disable output of current conditions")
499
500     # the --no-cache option
501     if config.has_option("default", "cache"):
502         default_cache = bool(config.get("default", "cache"))
503     else: default_cache = True
504     option_parser.add_option("--no-cache",
505         dest="cache",
506         action="store_false",
507         default=True,
508         help="disable all caching (searches and data)")
509
510     # the --no-cache-data option
511     if config.has_option("default", "cache_data"):
512         default_cache_data = bool(config.get("default", "cache_data"))
513     else: default_cache_data = True
514     option_parser.add_option("--no-cache-data",
515         dest="cache_data",
516         action="store_false",
517         default=True,
518         help="disable retrieved data caching")
519
520     # the --no-cache-search option
521     if config.has_option("default", "cache_search"):
522         default_cache_search = bool(config.get("default", "cache_search"))
523     else: default_cache_search = True
524     option_parser.add_option("--no-cache-search",
525         dest="cache_search",
526         action="store_false",
527         default=True,
528         help="disable search result caching")
529
530     # the -q/--quiet option
531     if config.has_option("default", "quiet"):
532         default_quiet = bool(config.get("default", "quiet"))
533     else: default_quiet = False
534     option_parser.add_option("-q", "--quiet",
535         dest="quiet",
536         action="store_true",
537         default=default_quiet,
538         help="skip preambles and don't indent")
539
540     # the --setpath option
541     if config.has_option("default", "setpath"):
542         default_setpath = config.get("default", "setpath")
543     else: default_setpath = ".:~/.weather"
544     option_parser.add_option("--setpath",
545         dest="setpath",
546         default=default_setpath,
547         help="directory search path for correlation sets")
548
549     # the -v/--verbose option
550     if config.has_option("default", "verbose"):
551         default_verbose = bool(config.get("default", "verbose"))
552     else: default_verbose = False
553     option_parser.add_option("-v", "--verbose",
554         dest="verbose",
555         action="store_true",
556         default=default_verbose,
557         help="show full decoded feeds")
558
559     # deprecated options
560     if config.has_option("default", "city"):
561         default_city = config.get("default", "city")
562     else: default_city = ""
563     option_parser.add_option("-c", "--city",
564         dest="city",
565         default=default_city,
566         help=optparse.SUPPRESS_HELP)
567     if config.has_option("default", "id"):
568         default_id = config.get("default", "id")
569     else: default_id = ""
570     option_parser.add_option("-i", "--id",
571         dest="id",
572         default=default_id,
573         help=optparse.SUPPRESS_HELP)
574     if config.has_option("default", "st"):
575         default_st = config.get("default", "st")
576     else: default_st = ""
577     option_parser.add_option("-s", "--st",
578         dest="st",
579         default=default_st,
580         help=optparse.SUPPRESS_HELP)
581
582     options, arguments = option_parser.parse_args()
583     return options, arguments
584
585 def get_config():
586     """Parse the aliases and configuration."""
587     if pyversion("3"): import configparser
588     else: import ConfigParser as configparser
589     config = configparser.ConfigParser()
590     import os
591     rcfiles = [
592         "/etc/weatherrc",
593         "/etc/weather/weatherrc",
594         os.path.expanduser("~/.weather/weatherrc"),
595         os.path.expanduser("~/.weatherrc"),
596         "weatherrc"
597         ]
598     for rcfile in rcfiles:
599         if os.access(rcfile, os.R_OK): config.read(rcfile)
600     for section in config.sections():
601         if section != section.lower():
602             if config.has_section(section.lower()):
603                 config.remove_section(section.lower())
604             config.add_section(section.lower())
605             for option,value in config.items(section):
606                 config.set(section.lower(), option, value)
607     return config
608
609 def integrate_search_cache(config, cachedir, setpath):
610     """Add cached search results into the configuration."""
611     if pyversion("3"): import configparser
612     else: import ConfigParser as configparser
613     import os, time
614     scache_fn = os.path.join( os.path.expanduser(cachedir), "searches" )
615     if not os.access(scache_fn, os.R_OK): return config
616     scache_fd = open(scache_fn)
617     created = float( scache_fd.readline().split(":")[1].strip().split()[0] )
618     scache_fd.close()
619     now = time.time()
620     datafiles = data_index(setpath)
621     if datafiles:
622         data_freshness = sorted(
623             [ x[1] for x in datafiles.values() ],
624             reverse=True
625         )[0]
626     else: data_freshness = now
627     if created < data_freshness <= now:
628         try:
629             os.remove(scache_fn)
630             print( "[clearing outdated %s]" % scache_fn )
631         except (IOError, OSError):
632             pass
633         return config
634     scache = configparser.ConfigParser()
635     scache.read(scache_fn)
636     for section in scache.sections():
637         if not config.has_section(section):
638             config.add_section(section)
639             for option,value in scache.items(section):
640                 config.set(section, option, value)
641     return config
642
643 def list_aliases(config, detail=False):
644     """Return a formatted list of aliases defined in the config."""
645     if detail:
646         output = "\n# configured alias details..."
647         for section in sorted(config.sections()):
648             output += "\n\n[%s]" % section
649             for item in sorted(config.items(section)):
650                 output += "\n%s = %s" % item
651         output += "\n"
652     else:
653         output = "configured aliases and cached searches..."
654         for section in sorted(config.sections()):
655             if config.has_option(section, "description"):
656                 description = config.get(section, "description")
657             else: description = "(no description provided)"
658             output += "\n   %s: %s" % (section, description)
659     return output
660
661 def data_index(path):
662     import os
663     datafiles = {}
664     for filename in ("airports", "places", "stations", "zctas", "zones"):
665         for dirname in path.split(":"):
666             for extension in ("", ".gz", ".txt"):
667                 candidate = os.path.expanduser(
668                     os.path.join( dirname, "".join( (filename, extension) ) )
669                 )
670                 if os.path.exists(candidate):
671                     datafiles[filename] = (
672                         candidate,
673                         os.stat(candidate).st_mtime
674                     )
675                     break
676             if filename in datafiles:
677                 break
678     return datafiles
679
680 def guess(
681     expression,
682     path=".",
683     max_results=20,
684     info=False,
685     cache_search=False,
686     cacheage=900,
687     cachedir=".",
688     quiet=False
689 ):
690     """Find URIs using airport, gecos, placename, station, ZCTA/ZIP, zone."""
691     import codecs, datetime, time, os, re, sys
692     if pyversion("3"): import configparser
693     else: import ConfigParser as configparser
694     datafiles = data_index(path)
695     if re.match("[A-Za-z]{3}$", expression): searchtype = "airport"
696     elif re.match("[A-Za-z0-9]{4}$", expression): searchtype = "station"
697     elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", expression): searchtype = "zone"
698     elif re.match("[0-9]{5}$", expression): searchtype = "ZCTA"
699     elif re.match(
700         r"[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?, *[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?$",
701         expression
702     ):
703         searchtype = "coordinates"
704     elif re.match(r"(FIPS|fips)\d+$", expression): searchtype = "FIPS"
705     else:
706         searchtype = "name"
707         cache_search = False
708     if cache_search: action = "caching"
709     else: action = "using"
710     if info:
711         scores = [
712             (0.005, "bad"),
713             (0.025, "poor"),
714             (0.160, "suspect"),
715             (0.500, "mediocre"),
716             (0.840, "good"),
717             (0.975, "great"),
718             (0.995, "excellent"),
719             (1.000, "ideal"),
720         ]
721     if not quiet: print("Searching via %s..."%searchtype)
722     stations = configparser.ConfigParser()
723     dataname = "stations"
724     if dataname in datafiles:
725         datafile = datafiles[dataname][0]
726         if datafile.endswith(".gz"):
727             import gzip
728             if pyversion("3"):
729                 stations.read_string(
730                     gzip.open(datafile).read().decode("utf-8") )
731             else: stations.readfp( gzip.open(datafile) )
732         else:
733             stations.read(datafile)
734     else:
735         message = "%s error: can't find \"%s\" data file\n" % (
736             os.path.basename( sys.argv[0] ),
737             dataname
738         )
739         sys.stderr.write(message)
740         exit(1)
741     zones = configparser.ConfigParser()
742     dataname = "zones"
743     if dataname in datafiles:
744         datafile = datafiles[dataname][0]
745         if datafile.endswith(".gz"):
746             import gzip
747             if pyversion("3"):
748                 zones.read_string( gzip.open(datafile).read().decode("utf-8") )
749             else: zones.readfp( gzip.open(datafile) )
750         else:
751             zones.read(datafile)
752     else:
753         message = "%s error: can't find \"%s\" data file\n" % (
754             os.path.basename( sys.argv[0] ),
755             dataname
756         )
757         sys.stderr.write(message)
758         exit(1)
759     search = None
760     station = ("", 0)
761     zone = ("", 0)
762     dataset = None
763     possibilities = []
764     uris = {}
765     if searchtype == "airport":
766         expression = expression.lower()
767         airports = configparser.ConfigParser()
768         dataname = "airports"
769         if dataname in datafiles:
770             datafile = datafiles[dataname][0]
771             if datafile.endswith(".gz"):
772                 import gzip
773                 if pyversion("3"):
774                     airports.read_string(
775                         gzip.open(datafile).read().decode("utf-8") )
776                 else: airports.readfp( gzip.open(datafile) )
777             else:
778                 airports.read(datafile)
779         else:
780             message = "%s error: can't find \"%s\" data file\n" % (
781                 os.path.basename( sys.argv[0] ),
782                 dataname
783             )
784             sys.stderr.write(message)
785             exit(1)
786         if airports.has_section(expression) \
787             and airports.has_option(expression, "station"):
788             search = (expression, "IATA/FAA airport code %s" % expression)
789             station = ( airports.get(expression, "station"), 0 )
790             if stations.has_option(station[0], "zone"):
791                 zone = eval( stations.get(station[0], "zone") )
792                 dataset = stations
793             if not ( info or quiet ) \
794                 and stations.has_option( station[0], "description" ):
795                 print(
796                     "[%s result %s]" % (
797                         action,
798                         stations.get(station[0], "description")
799                     )
800                 )
801         else:
802             message = "No IATA/FAA airport code \"%s\" in the %s file.\n" % (
803                 expression,
804                 datafiles["airports"][0]
805             )
806             sys.stderr.write(message)
807             exit(1)
808     elif searchtype == "station":
809         expression = expression.lower()
810         if stations.has_section(expression):
811             station = (expression, 0)
812             if not search:
813                 search = (expression, "ICAO station code %s" % expression)
814             if stations.has_option(expression, "zone"):
815                 zone = eval( stations.get(expression, "zone") )
816                 dataset = stations
817             if not ( info or quiet ) \
818                 and stations.has_option(expression, "description"):
819                 print(
820                     "[%s result %s]" % (
821                         action,
822                         stations.get(expression, "description")
823                     )
824                 )
825         else:
826             message = "No ICAO weather station \"%s\" in the %s file.\n" % (
827                 expression,
828                 datafiles["stations"][0]
829             )
830             sys.stderr.write(message)
831             exit(1)
832     elif searchtype == "zone":
833         expression = expression.lower()
834         if zones.has_section(expression) \
835             and zones.has_option(expression, "station"):
836             zone = (expression, 0)
837             station = eval( zones.get(expression, "station") )
838             dataset = zones
839             search = (expression, "NWS/NOAA weather zone %s" % expression)
840             if not ( info or quiet ) \
841                 and zones.has_option(expression, "description"):
842                 print(
843                     "[%s result %s]" % (
844                         action,
845                         zones.get(expression, "description")
846                     )
847                 )
848         else:
849             message = "No usable NWS weather zone \"%s\" in the %s file.\n" % (
850                 expression,
851                 datafiles["zones"][0]
852             )
853             sys.stderr.write(message)
854             exit(1)
855     elif searchtype == "ZCTA":
856         zctas = configparser.ConfigParser()
857         dataname = "zctas"
858         if dataname in datafiles:
859             datafile = datafiles[dataname][0]
860             if datafile.endswith(".gz"):
861                 import gzip
862                 if pyversion("3"):
863                     zctas.read_string(
864                         gzip.open(datafile).read().decode("utf-8") )
865                 else: zctas.readfp( gzip.open(datafile) )
866             else:
867                 zctas.read(datafile)
868         else:
869             message = "%s error: can't find \"%s\" data file\n" % (
870                 os.path.basename( sys.argv[0] ),
871                 dataname
872             )
873             sys.stderr.write(message)
874             exit(1)
875         dataset = zctas
876         if zctas.has_section(expression) \
877             and zctas.has_option(expression, "station"):
878             station = eval( zctas.get(expression, "station") )
879             search = (expression, "Census ZCTA (ZIP code) %s" % expression)
880             if zctas.has_option(expression, "zone"):
881                 zone = eval( zctas.get(expression, "zone") )
882         else:
883             message = "No census ZCTA (ZIP code) \"%s\" in the %s file.\n" % (
884                 expression,
885                 datafiles["zctas"][0]
886             )
887             sys.stderr.write(message)
888             exit(1)
889     elif searchtype == "coordinates":
890         search = (expression, "Geographic coordinates %s" % expression)
891         stationtable = {}
892         for station in stations.sections():
893             if stations.has_option(station, "location"):
894                 stationtable[station] = {
895                     "location": eval( stations.get(station, "location") )
896                 }
897         station = closest( gecos(expression), stationtable, "location", 0.1 )
898         if not station[0]:
899             message = "No ICAO weather station found near %s.\n" % expression
900             sys.stderr.write(message)
901             exit(1)
902         zonetable = {}
903         for zone in zones.sections():
904             if zones.has_option(zone, "centroid"):
905                 zonetable[zone] = {
906                     "centroid": eval( zones.get(zone, "centroid") )
907                 }
908         zone = closest( gecos(expression), zonetable, "centroid", 0.1 )
909         if not zone[0]:
910             message = "No NWS weather zone near %s; forecasts unavailable.\n" \
911                 % expression
912             sys.stderr.write(message)
913     elif searchtype in ("FIPS", "name"):
914         places = configparser.ConfigParser()
915         dataname = "places"
916         if dataname in datafiles:
917             datafile = datafiles[dataname][0]
918             if datafile.endswith(".gz"):
919                 import gzip
920                 if pyversion("3"):
921                     places.read_string(
922                         gzip.open(datafile).read().decode("utf-8") )
923                 else: places.readfp( gzip.open(datafile) )
924             else:
925                 places.read(datafile)
926         else:
927             message = "%s error: can't find \"%s\" data file\n" % (
928                 os.path.basename( sys.argv[0] ),
929                 dataname
930             )
931             sys.stderr.write(message)
932             exit(1)
933         dataset = places
934         place = expression.lower()
935         if places.has_section(place) and places.has_option(place, "station"):
936             station = eval( places.get(place, "station") )
937             search = (expression, "Census Place %s" % expression)
938             if places.has_option(place, "description"):
939                 search = (
940                     search[0],
941                     search[1] + ", %s" % places.get(place, "description")
942                 )
943             if places.has_option(place, "zone"):
944                 zone = eval( places.get(place, "zone") )
945             if not ( info or quiet ) \
946                 and places.has_option(place, "description"):
947                 print(
948                     "[%s result %s]" % (
949                         action,
950                         places.get(place, "description")
951                     )
952                 )
953         else:
954             for place in places.sections():
955                 if places.has_option(place, "description") \
956                     and places.has_option(place, "station") \
957                     and re.search(
958                         expression,
959                         places.get(place, "description"),
960                         re.I
961                     ):
962                         possibilities.append(place)
963             for place in stations.sections():
964                 if stations.has_option(place, "description") \
965                     and re.search(
966                         expression,
967                         stations.get(place, "description"),
968                         re.I
969                     ):
970                         possibilities.append(place)
971             for place in zones.sections():
972                 if zones.has_option(place, "description") \
973                     and zones.has_option(place, "station") \
974                     and re.search(
975                         expression,
976                         zones.get(place, "description"),
977                         re.I
978                     ):
979                         possibilities.append(place)
980             if len(possibilities) == 1:
981                 place = possibilities[0]
982                 if places.has_section(place):
983                     station = eval( places.get(place, "station") )
984                     description = places.get(place, "description")
985                     if places.has_option(place, "zone"):
986                         zone = eval( places.get(place, "zone" ) )
987                     search = ( expression, "%s: %s" % (place, description) )
988                 elif stations.has_section(place):
989                     station = (place, 0.0)
990                     description = stations.get(place, "description")
991                     if stations.has_option(place, "zone"):
992                         zone = eval( stations.get(place, "zone" ) )
993                     search = ( expression, "ICAO station code %s" % place )
994                 elif zones.has_section(place):
995                     station = eval( zones.get(place, "station") )
996                     description = zones.get(place, "description")
997                     zone = (place, 0.0)
998                     search = ( expression, "NWS/NOAA weather zone %s" % place )
999                 if not ( info or quiet ):
1000                     print( "[%s result %s]" % (action, description) )
1001             if not possibilities and not station[0]:
1002                 message = "No FIPS code/census area match in the %s file.\n" % (
1003                     datafiles["places"][0]
1004                 )
1005                 sys.stderr.write(message)
1006                 exit(1)
1007     if station[0]:
1008         uris["metar"] = stations.get( station[0], "metar" )
1009         if zone[0]:
1010             for key,value in zones.items( zone[0] ):
1011                 if key not in ("centroid", "description", "station"):
1012                     uris[key] = value
1013     elif possibilities:
1014         count = len(possibilities)
1015         if count <= max_results:
1016             print( "Your search is ambiguous, returning %s matches:" % count )
1017             for place in sorted(possibilities):
1018                 if places.has_section(place):
1019                     print(
1020                         "   [%s] %s" % (
1021                             place,
1022                             places.get(place, "description")
1023                         )
1024                     )
1025                 elif stations.has_section(place):
1026                     print(
1027                         "   [%s] %s" % (
1028                             place,
1029                             stations.get(place, "description")
1030                         )
1031                     )
1032                 elif zones.has_section(place):
1033                     print(
1034                         "   [%s] %s" % (
1035                             place,
1036                             zones.get(place, "description")
1037                         )
1038                     )
1039         else:
1040             print(
1041                 "Your search is too ambiguous, returning %s matches." % count
1042             )
1043         exit(0)
1044     if info:
1045         stationlist = []
1046         zonelist = []
1047         if dataset:
1048             for section in dataset.sections():
1049                 if dataset.has_option(section, "station"):
1050                     stationlist.append(
1051                         eval( dataset.get(section, "station") )[1]
1052                     )
1053                 if dataset.has_option(section, "zone"):
1054                     zonelist.append( eval( dataset.get(section, "zone") )[1] )
1055         stationlist.sort()
1056         zonelist.sort()
1057         scount = len(stationlist)
1058         zcount = len(zonelist)
1059         sranks = []
1060         zranks = []
1061         for score in scores:
1062             if stationlist:
1063                 sranks.append( stationlist[ int( (1-score[0]) * scount ) ] )
1064             if zonelist:
1065                 zranks.append( zonelist[ int( (1-score[0]) * zcount ) ] )
1066         description = search[1]
1067         uris["description"] = description
1068         print(
1069             "%s\n%s" % ( description, "-" * len(description) )
1070         )
1071         print(
1072             "%s: %s" % (
1073                 station[0],
1074                 stations.get( station[0], "description" )
1075             )
1076         )
1077         km = radian_to_km*station[1]
1078         mi = radian_to_mi*station[1]
1079         if sranks and not description.startswith("ICAO station code "):
1080             for index in range(0, len(scores)):
1081                 if station[1] >= sranks[index]:
1082                     score = scores[index][1]
1083                     break
1084             print(
1085                 "   (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1086             )
1087         elif searchtype == "coordinates":
1088             print( "   (%.3gkm, %.3gmi)" % (km, mi) )
1089         if zone[0]:
1090             print(
1091                 "%s: %s" % ( zone[0], zones.get( zone[0], "description" ) )
1092             )
1093         km = radian_to_km*zone[1]
1094         mi = radian_to_mi*zone[1]
1095         if zranks and not description.startswith("NWS/NOAA weather zone "):
1096             for index in range(0, len(scores)):
1097                 if zone[1] >= zranks[index]:
1098                     score = scores[index][1]
1099                     break
1100             print(
1101                 "   (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1102             )
1103         elif searchtype == "coordinates" and zone[0]:
1104             print( "   (%.3gkm, %.3gmi)" % (km, mi) )
1105     if cache_search:
1106         now = time.time()
1107         nowstamp = "%s (%s)" % (
1108             now,
1109             datetime.datetime.isoformat(
1110                 datetime.datetime.fromtimestamp(now),
1111                 " "
1112             )
1113         )
1114         search_cache = ["\n"]
1115         search_cache.append( "[%s]\n" % search[0] ) 
1116         search_cache.append( "cached = %s\n" % nowstamp )
1117         for uriname in sorted(uris.keys()):
1118             search_cache.append( "%s = %s\n" % ( uriname, uris[uriname] ) )
1119         real_cachedir = os.path.expanduser(cachedir)
1120         if not os.path.exists(real_cachedir):
1121             try: os.makedirs(real_cachedir)
1122             except (IOError, OSError): pass
1123         scache_fn = os.path.join(real_cachedir, "searches")
1124         if not os.path.exists(scache_fn):
1125             then = sorted(
1126                     [ x[1] for x in datafiles.values() ],
1127                     reverse=True
1128                 )[0]
1129             thenstamp = "%s (%s)" % (
1130                 then,
1131                 datetime.datetime.isoformat(
1132                     datetime.datetime.fromtimestamp(then),
1133                     " "
1134                 )
1135             )
1136             search_cache.insert(
1137                 0,
1138                 "# based on data files from: %s\n" % thenstamp
1139             )
1140         try:
1141             scache_existing = configparser.ConfigParser()
1142             scache_existing.read(scache_fn)
1143             if not scache_existing.has_section(search[0]):
1144                 scache_fd = codecs.open(scache_fn, "a", "utf-8")
1145                 scache_fd.writelines(search_cache)
1146                 scache_fd.close()
1147         except (IOError, OSError): pass
1148     if not info:
1149         return(uris)
1150
1151 def closest(position, nodes, fieldname, angle=None):
1152     import math
1153     if not angle: angle = 2*math.pi
1154     match = None
1155     for name in nodes:
1156         if fieldname in nodes[name]:
1157             node = nodes[name][fieldname]
1158             if node and abs( position[0]-node[0] ) < angle:
1159                 if abs( position[1]-node[1] ) < angle \
1160                     or abs( abs( position[1]-node[1] ) - 2*math.pi ) < angle:
1161                     if position == node:
1162                         angle = 0
1163                         match = name
1164                     else:
1165                         candidate = math.acos(
1166                             math.sin( position[0] ) * math.sin( node[0] ) \
1167                                 + math.cos( position[0] ) \
1168                                 * math.cos( node[0] ) \
1169                                 * math.cos( position[1] - node[1] )
1170                             )
1171                         if candidate < angle:
1172                             angle = candidate
1173                             match = name
1174     if match: match = str(match)
1175     return (match, angle)
1176
1177 def gecos(formatted):
1178     import math, re
1179     coordinates = formatted.split(",")
1180     for coordinate in range(0, 2):
1181         degrees, foo, minutes, bar, seconds, hemisphere = re.match(
1182             r"([\+-]?\d+\.?\d*)(-(\d+))?(-(\d+))?([ensw]?)$",
1183             coordinates[coordinate].strip().lower()
1184         ).groups()
1185         value = float(degrees)
1186         if minutes: value += float(minutes)/60
1187         if seconds: value += float(seconds)/3600
1188         if hemisphere and hemisphere in "sw": value *= -1
1189         coordinates[coordinate] = math.radians(value)
1190     return tuple(coordinates)
1191
1192 def correlate():
1193     import codecs, csv, datetime, hashlib, os, re, sys, tarfile, time, zipfile
1194     if pyversion("3"): import configparser
1195     else: import ConfigParser as configparser
1196     for filename in os.listdir("."):
1197         if re.match("[0-9]{4}_Gaz_counties_national.zip$", filename):
1198             gcounties_an = filename
1199             gcounties_fn = filename[:-4] + ".txt"
1200         elif re.match("[0-9]{4}_Gaz_cousubs_national.zip$", filename):
1201             gcousubs_an = filename
1202             gcousubs_fn = filename[:-4] + ".txt"
1203         elif re.match("[0-9]{4}_Gaz_place_national.zip$", filename):
1204             gplace_an = filename
1205             gplace_fn = filename[:-4] + ".txt"
1206         elif re.match("[0-9]{4}_Gaz_zcta_national.zip$", filename):
1207             gzcta_an = filename
1208             gzcta_fn = filename[:-4] + ".txt"
1209         elif re.match("bp[0-9]{2}[a-z]{2}[0-9]{2}.dbx$", filename):
1210             cpfzcf_fn = filename
1211     nsdcccc_fn = "nsd_cccc.txt"
1212     ourairports_fn = "airports.csv"
1213     overrides_fn = "overrides.conf"
1214     overrideslog_fn = "overrides.log"
1215     slist_fn = "slist"
1216     zlist_fn = "zlist"
1217     qalog_fn = "qa.log"
1218     airports_fn = "airports"
1219     places_fn = "places"
1220     stations_fn = "stations"
1221     zctas_fn = "zctas"
1222     zones_fn = "zones"
1223     header = """\
1224 %s
1225 # generated by %s on %s from these public domain sources:
1226 #
1227 # https://www.census.gov/geographies/reference-files/time-series/geo/gazetteer-files.html
1228 # %s %s %s
1229 # %s %s %s
1230 # %s %s %s
1231 # %s %s %s
1232 #
1233 # https://www.weather.gov/gis/ZoneCounty/
1234 # %s %s %s
1235 #
1236 # https://tgftp.nws.noaa.gov/data/
1237 # %s %s %s
1238 #
1239 # https://ourairports.com/data/
1240 # %s %s %s
1241 #
1242 # ...and these manually-generated or hand-compiled adjustments:
1243 # %s %s %s
1244 # %s %s %s
1245 # %s %s %s\
1246 """ % (
1247         weather_copyright,
1248         os.path.basename( sys.argv[0] ),
1249         datetime.date.isoformat(
1250             datetime.datetime.utcfromtimestamp( int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) )
1251         ),
1252         hashlib.md5( open(gcounties_an, "rb").read() ).hexdigest(),
1253         datetime.date.isoformat(
1254             datetime.datetime.utcfromtimestamp( os.path.getmtime(gcounties_an) )
1255         ),
1256         gcounties_an,
1257         hashlib.md5( open(gcousubs_an, "rb").read() ).hexdigest(),
1258         datetime.date.isoformat(
1259             datetime.datetime.utcfromtimestamp( os.path.getmtime(gcousubs_an) )
1260         ),
1261         gcousubs_an,
1262         hashlib.md5( open(gplace_an, "rb").read() ).hexdigest(),
1263         datetime.date.isoformat(
1264             datetime.datetime.utcfromtimestamp( os.path.getmtime(gplace_an) )
1265         ),
1266         gplace_an,
1267         hashlib.md5( open(gzcta_an, "rb").read() ).hexdigest(),
1268         datetime.date.isoformat(
1269             datetime.datetime.utcfromtimestamp( os.path.getmtime(gzcta_an) )
1270         ),
1271         gzcta_an,
1272         hashlib.md5( open(cpfzcf_fn, "rb").read() ).hexdigest(),
1273         datetime.date.isoformat(
1274             datetime.datetime.utcfromtimestamp( os.path.getmtime(cpfzcf_fn) )
1275         ),
1276         cpfzcf_fn,
1277         hashlib.md5( open(nsdcccc_fn, "rb").read() ).hexdigest(),
1278         datetime.date.isoformat(
1279             datetime.datetime.utcfromtimestamp( os.path.getmtime(nsdcccc_fn) )
1280         ),
1281         nsdcccc_fn,
1282         hashlib.md5( open(ourairports_fn, "rb").read() ).hexdigest(),
1283         datetime.date.isoformat(
1284             datetime.datetime.utcfromtimestamp( os.path.getmtime(ourairports_fn) )
1285         ),
1286         ourairports_fn,
1287         hashlib.md5( open(overrides_fn, "rb").read() ).hexdigest(),
1288         datetime.date.isoformat(
1289             datetime.datetime.utcfromtimestamp( os.path.getmtime(overrides_fn) )
1290         ),
1291         overrides_fn,
1292         hashlib.md5( open(slist_fn, "rb").read() ).hexdigest(),
1293         datetime.date.isoformat(
1294             datetime.datetime.utcfromtimestamp( os.path.getmtime(slist_fn) )
1295         ),
1296         slist_fn,
1297         hashlib.md5( open(zlist_fn, "rb").read() ).hexdigest(),
1298         datetime.date.isoformat(
1299             datetime.datetime.utcfromtimestamp( os.path.getmtime(zlist_fn) )
1300         ),
1301         zlist_fn
1302     )
1303     airports = {}
1304     places = {}
1305     stations = {}
1306     zctas = {}
1307     zones = {}
1308     message = "Reading %s:%s..." % (gcounties_an, gcounties_fn)
1309     sys.stdout.write(message)
1310     sys.stdout.flush()
1311     count = 0
1312     gcounties = zipfile.ZipFile(gcounties_an).open(gcounties_fn, "r")
1313     columns = gcounties.readline().decode("utf-8").strip().split("\t")
1314     for line in gcounties:
1315         fields = line.decode("utf-8").strip().split("\t")
1316         f_geoid = fields[ columns.index("GEOID") ].strip()
1317         f_name = fields[ columns.index("NAME") ].strip()
1318         f_usps = fields[ columns.index("USPS") ].strip()
1319         f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1320         f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1321         if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1322             fips = "fips%s" % f_geoid
1323             if fips not in places: places[fips] = {}
1324             places[fips]["centroid"] = gecos(
1325                 "%s,%s" % (f_intptlat, f_intptlong)
1326             )
1327             places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1328             count += 1
1329     gcounties.close()
1330     print("done (%s lines)." % count)
1331     message = "Reading %s:%s..." % (gcousubs_an, gcousubs_fn)
1332     sys.stdout.write(message)
1333     sys.stdout.flush()
1334     count = 0
1335     gcousubs = zipfile.ZipFile(gcousubs_an).open(gcousubs_fn, "r")
1336     columns = gcousubs.readline().decode("utf-8").strip().split("\t")
1337     for line in gcousubs:
1338         fields = line.decode("utf-8").strip().split("\t")
1339         f_geoid = fields[ columns.index("GEOID") ].strip()
1340         f_name = fields[ columns.index("NAME") ].strip()
1341         f_usps = fields[ columns.index("USPS") ].strip()
1342         f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1343         f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1344         if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1345             fips = "fips%s" % f_geoid
1346             if fips not in places: places[fips] = {}
1347             places[fips]["centroid"] = gecos(
1348                 "%s,%s" % (f_intptlat, f_intptlong)
1349             )
1350             places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1351             count += 1
1352     gcousubs.close()
1353     print("done (%s lines)." % count)
1354     message = "Reading %s:%s..." % (gplace_an, gplace_fn)
1355     sys.stdout.write(message)
1356     sys.stdout.flush()
1357     count = 0
1358     gplace = zipfile.ZipFile(gplace_an).open(gplace_fn, "r")
1359     columns = gplace.readline().decode("utf-8").strip().split("\t")
1360     for line in gplace:
1361         fields = line.decode("utf-8").strip().split("\t")
1362         f_geoid = fields[ columns.index("GEOID") ].strip()
1363         f_name = fields[ columns.index("NAME") ].strip()
1364         f_usps = fields[ columns.index("USPS") ].strip()
1365         f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1366         f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1367         if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1368             fips = "fips%s" % f_geoid
1369             if fips not in places: places[fips] = {}
1370             places[fips]["centroid"] = gecos(
1371                 "%s,%s" % (f_intptlat, f_intptlong)
1372             )
1373             places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1374             count += 1
1375     gplace.close()
1376     print("done (%s lines)." % count)
1377     message = "Reading %s..." % slist_fn
1378     sys.stdout.write(message)
1379     sys.stdout.flush()
1380     count = 0
1381     slist = codecs.open(slist_fn, "rU", "utf-8")
1382     for line in slist:
1383         icao = line.split("#")[0].strip()
1384         if icao:
1385             stations[icao] = {
1386                 "metar": "https://tgftp.nws.noaa.gov/data/observations/"\
1387                     + "metar/decoded/%s.TXT" % icao.upper()
1388             }
1389             count += 1
1390     slist.close()
1391     print("done (%s lines)." % count)
1392     message = "Reading %s..." % nsdcccc_fn
1393     sys.stdout.write(message)
1394     sys.stdout.flush()
1395     count = 0
1396     nsdcccc = codecs.open(nsdcccc_fn, "rU", "utf-8")
1397     for line in nsdcccc:
1398         line = str(line)
1399         fields = line.split(";")
1400         icao = fields[0].strip().lower()
1401         if icao in stations:
1402             description = []
1403             name = " ".join( fields[3].strip().title().split() )
1404             if name: description.append(name)
1405             st = fields[4].strip()
1406             if st: description.append(st)
1407             country = " ".join( fields[5].strip().title().split() )
1408             if country: description.append(country)
1409             if description:
1410                 stations[icao]["description"] = ", ".join(description)
1411             lat, lon = fields[7:9]
1412             if lat and lon:
1413                 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1414             elif "location" not in stations[icao]:
1415                 lat, lon = fields[5:7]
1416                 if lat and lon:
1417                     stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1418         count += 1
1419     nsdcccc.close()
1420     print("done (%s lines)." % count)
1421     message = "Reading %s..." % ourairports_fn
1422     sys.stdout.write(message)
1423     sys.stdout.flush()
1424     count = 0
1425     ourairports = open(ourairports_fn, "rU")
1426     for row in csv.reader(ourairports):
1427         icao = row[12].lower()
1428         if icao in stations:
1429             iata = row[13].lower()
1430             if len(iata) == 3: airports[iata] = { "station": icao }
1431             if "description" not in stations[icao]:
1432                 description = []
1433                 name = row[3]
1434                 if name: description.append(name)
1435                 municipality = row[10]
1436                 if municipality: description.append(municipality)
1437                 region = row[9]
1438                 country = row[8]
1439                 if region:
1440                     if "-" in region:
1441                         c,r = region.split("-", 1)
1442                         if c == country: region = r
1443                     description.append(region)
1444                 if country:
1445                     description.append(country)
1446                 if description:
1447                     stations[icao]["description"] = ", ".join(description)
1448             if "location" not in stations[icao]:
1449                 lat = row[4]
1450                 if lat:
1451                     lon = row[5]
1452                     if lon:
1453                         stations[icao]["location"] = gecos(
1454                             "%s,%s" % (lat, lon)
1455                         )
1456         count += 1
1457     ourairports.close()
1458     print("done (%s lines)." % count)
1459     message = "Reading %s..." % zlist_fn
1460     sys.stdout.write(message)
1461     sys.stdout.flush()
1462     count = 0
1463     zlist = codecs.open(zlist_fn, "rU", "utf-8")
1464     for line in zlist:
1465         line = line.split("#")[0].strip()
1466         if line:
1467             zones[line] = {}
1468             count += 1
1469     zlist.close()
1470     print("done (%s lines)." % count)
1471     message = "Reading %s..." % cpfzcf_fn
1472     sys.stdout.write(message)
1473     sys.stdout.flush()
1474     count = 0
1475     cpfz = {}
1476     cpfzcf = codecs.open(cpfzcf_fn, "rU", "utf-8")
1477     for line in cpfzcf:
1478         fields = line.strip().split("|")
1479         if len(fields) == 11 \
1480             and fields[0] and fields[1] and fields[9] and fields[10]:
1481             zone = "z".join( fields[:2] ).lower()
1482             if zone in zones:
1483                 state = fields[0]
1484                 if state:
1485                     zones[zone]["coastal_flood_statement"] = (
1486                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1487                         "flood/coastal/%s/%s.txt" % (state.lower(), zone))
1488                     zones[zone]["flash_flood_statement"] = (
1489                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1490                         "flash_flood/statement/%s/%s.txt"
1491                         % (state.lower(), zone))
1492                     zones[zone]["flash_flood_warning"] = (
1493                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1494                         "flash_flood/warning/%s/%s.txt"
1495                         % (state.lower(), zone))
1496                     zones[zone]["flash_flood_watch"] = (
1497                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1498                         "flash_flood/watch/%s/%s.txt" % (state.lower(), zone))
1499                     zones[zone]["flood_statement"] = (
1500                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1501                         "flood/statement/%s/%s.txt" % (state.lower(), zone))
1502                     zones[zone]["flood_warning"] = (
1503                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1504                         "flood/warning/%s/%s.txt" % (state.lower(), zone))
1505                     zones[zone]["severe_thunderstorm_warning"] = (
1506                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1507                         "thunderstorm/%s/%s.txt" % (state.lower(), zone))
1508                     zones[zone]["severe_weather_statement"] = (
1509                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1510                         "severe_weather_stmt/%s/%s.txt"
1511                         % (state.lower(), zone))
1512                     zones[zone]["short_term_forecast"] = (
1513                         "https://tgftp.nws.noaa.gov/data/forecasts/nowcast/"
1514                         "%s/%s.txt" % (state.lower(), zone))
1515                     zones[zone]["special_weather_statement"] = (
1516                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1517                         "special_weather_stmt/%s/%s.txt"
1518                         % (state.lower(), zone))
1519                     zones[zone]["state_forecast"] = (
1520                         "https://tgftp.nws.noaa.gov/data/forecasts/state/"
1521                         "%s/%s.txt" % (state.lower(), zone))
1522                     zones[zone]["urgent_weather_message"] = (
1523                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1524                         "non_precip/%s/%s.txt" % (state.lower(), zone))
1525                     zones[zone]["zone_forecast"] = (
1526                         "https://tgftp.nws.noaa.gov/data/forecasts/zone/"
1527                         "%s/%s.txt" % (state.lower(), zone))
1528                 description = fields[3].strip()
1529                 fips = "fips%s"%fields[6]
1530                 county = fields[5]
1531                 if county:
1532                     if description.endswith(county):
1533                         description += " County"
1534                     else:
1535                         description += ", %s County" % county
1536                 description += ", %s, US" % state
1537                 zones[zone]["description"] = description
1538                 zones[zone]["centroid"] = gecos( ",".join( fields[9:11] ) )
1539                 if fips in places and not zones[zone]["centroid"]:
1540                     zones[zone]["centroid"] = places[fips]["centroid"]
1541         count += 1
1542     cpfzcf.close()
1543     print("done (%s lines)." % count)
1544     message = "Reading %s:%s..." % (gzcta_an, gzcta_fn)
1545     sys.stdout.write(message)
1546     sys.stdout.flush()
1547     count = 0
1548     gzcta = zipfile.ZipFile(gzcta_an).open(gzcta_fn, "r")
1549     columns = gzcta.readline().decode("utf-8").strip().split("\t")
1550     for line in gzcta:
1551         fields = line.decode("utf-8").strip().split("\t")
1552         f_geoid = fields[ columns.index("GEOID") ].strip()
1553         f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1554         f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1555         if f_geoid and f_intptlat and f_intptlong:
1556             if f_geoid not in zctas: zctas[f_geoid] = {}
1557             zctas[f_geoid]["centroid"] = gecos(
1558                 "%s,%s" % (f_intptlat, f_intptlong)
1559             )
1560             count += 1
1561     gzcta.close()
1562     print("done (%s lines)." % count)
1563     message = "Reading %s..." % overrides_fn
1564     sys.stdout.write(message)
1565     sys.stdout.flush()
1566     count = 0
1567     added = 0
1568     removed = 0
1569     changed = 0
1570     overrides = configparser.ConfigParser()
1571     overrides.readfp( codecs.open(overrides_fn, "r", "utf8") )
1572     overrideslog = []
1573     for section in overrides.sections():
1574         addopt = 0
1575         chgopt = 0
1576         if section.startswith("-"):
1577             section = section[1:]
1578             delete = True
1579         else: delete = False
1580         if re.match("[A-Za-z]{3}$", section):
1581             if delete:
1582                 if section in airports:
1583                     del( airports[section] )
1584                     logact = "removed airport %s" % section
1585                     removed += 1
1586                 else:
1587                     logact = "tried to remove nonexistent airport %s" % section
1588             else:
1589                 if section in airports:
1590                     logact = "changed airport %s" % section
1591                     changed += 1
1592                 else:
1593                     airports[section] = {}
1594                     logact = "added airport %s" % section
1595                     added += 1
1596                 for key,value in overrides.items(section):
1597                     if key in airports[section]: chgopt += 1
1598                     else: addopt += 1
1599                     if key in ("centroid", "location"):
1600                         airports[section][key] = eval(value)
1601                     else:
1602                         airports[section][key] = value
1603                 if addopt and chgopt:
1604                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1605                 elif addopt: logact += " (+%s options)" % addopt
1606                 elif chgopt: logact += " (!%s options)" % chgopt
1607         elif re.match("[A-Za-z0-9]{4}$", section):
1608             if delete:
1609                 if section in stations:
1610                     del( stations[section] )
1611                     logact = "removed station %s" % section
1612                     removed += 1
1613                 else:
1614                     logact = "tried to remove nonexistent station %s" % section
1615             else:
1616                 if section in stations:
1617                     logact = "changed station %s" % section
1618                     changed += 1
1619                 else:
1620                     stations[section] = {}
1621                     logact = "added station %s" % section
1622                     added += 1
1623                 for key,value in overrides.items(section):
1624                     if key in stations[section]: chgopt += 1
1625                     else: addopt += 1
1626                     if key in ("centroid", "location"):
1627                         stations[section][key] = eval(value)
1628                     else:
1629                         stations[section][key] = value
1630                 if addopt and chgopt:
1631                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1632                 elif addopt: logact += " (+%s options)" % addopt
1633                 elif chgopt: logact += " (!%s options)" % chgopt
1634         elif re.match("[0-9]{5}$", section):
1635             if delete:
1636                 if section in zctas:
1637                     del( zctas[section] )
1638                     logact = "removed zcta %s" % section
1639                     removed += 1
1640                 else:
1641                     logact = "tried to remove nonexistent zcta %s" % section
1642             else:
1643                 if section in zctas:
1644                     logact = "changed zcta %s" % section
1645                     changed += 1
1646                 else:
1647                     zctas[section] = {}
1648                     logact = "added zcta %s" % section
1649                     added += 1
1650                 for key,value in overrides.items(section):
1651                     if key in zctas[section]: chgopt += 1
1652                     else: addopt += 1
1653                     if key in ("centroid", "location"):
1654                         zctas[section][key] = eval(value)
1655                     else:
1656                         zctas[section][key] = value
1657                 if addopt and chgopt:
1658                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1659                 elif addopt: logact += " (+%s options)" % addopt
1660                 elif chgopt: logact += " (!%s options)" % chgopt
1661         elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", section):
1662             if delete:
1663                 if section in zones:
1664                     del( zones[section] )
1665                     logact = "removed zone %s" % section
1666                     removed += 1
1667                 else:
1668                     logact = "tried to remove nonexistent zone %s" % section
1669             else:
1670                 if section in zones:
1671                     logact = "changed zone %s" % section
1672                     changed += 1
1673                 else:
1674                     zones[section] = {}
1675                     logact = "added zone %s" % section
1676                     added += 1
1677                 for key,value in overrides.items(section):
1678                     if key in zones[section]: chgopt += 1
1679                     else: addopt += 1
1680                     if key in ("centroid", "location"):
1681                         zones[section][key] = eval(value)
1682                     else:
1683                         zones[section][key] = value
1684                 if addopt and chgopt:
1685                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1686                 elif addopt: logact += " (+%s options)" % addopt
1687                 elif chgopt: logact += " (!%s options)" % chgopt
1688         elif re.match("fips[0-9]+$", section):
1689             if delete:
1690                 if section in places:
1691                     del( places[section] )
1692                     logact = "removed place %s" % section
1693                     removed += 1
1694                 else:
1695                     logact = "tried to remove nonexistent place %s" % section
1696             else:
1697                 if section in places:
1698                     logact = "changed place %s" % section
1699                     changed += 1
1700                 else:
1701                     places[section] = {}
1702                     logact = "added place %s" % section
1703                     added += 1
1704                 for key,value in overrides.items(section):
1705                     if key in places[section]: chgopt += 1
1706                     else: addopt += 1
1707                     if key in ("centroid", "location"):
1708                         places[section][key] = eval(value)
1709                     else:
1710                         places[section][key] = value
1711                 if addopt and chgopt:
1712                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1713                 elif addopt: logact += " (+%s options)" % addopt
1714                 elif chgopt: logact += " (!%s options)" % chgopt
1715         count += 1
1716         overrideslog.append("%s\n" % logact)
1717     overrideslog.sort()
1718     if os.path.exists(overrideslog_fn):
1719         os.rename(overrideslog_fn, "%s_old"%overrideslog_fn)
1720     overrideslog_fd = codecs.open(overrideslog_fn, "w", "utf8")
1721     import time
1722     overrideslog_fd.write(
1723         '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
1724         '# use, copy, modify, and distribute this software is granted under terms\n'
1725         '# provided in the LICENSE file distributed with this software.\n\n'
1726         % time.gmtime().tm_year)
1727     overrideslog_fd.writelines(overrideslog)
1728     overrideslog_fd.close()
1729     print("done (%s overridden sections: +%s/-%s/!%s)." % (
1730         count,
1731         added,
1732         removed,
1733         changed
1734     ) )
1735     estimate = 2*len(places) + len(stations) + 2*len(zctas) + len(zones)
1736     print(
1737         "Correlating places, stations, ZCTAs and zones (upper bound is %s):" % \
1738             estimate
1739     )
1740     count = 0
1741     milestones = list( range(51) )
1742     message = "   "
1743     sys.stdout.write(message)
1744     sys.stdout.flush()
1745     for fips in places:
1746         centroid = places[fips]["centroid"]
1747         if centroid:
1748             station = closest(centroid, stations, "location", 0.1)
1749         if station[0]:
1750             places[fips]["station"] = station
1751             count += 1
1752             if not count%100:
1753                 level = int(50*count/estimate)
1754                 if level in milestones:
1755                     for remaining in milestones[:milestones.index(level)+1]:
1756                         if remaining%5:
1757                             message = "."
1758                             sys.stdout.write(message)
1759                             sys.stdout.flush()
1760                         else:
1761                             message = "%s%%" % (remaining*2,)
1762                             sys.stdout.write(message)
1763                             sys.stdout.flush()
1764                         milestones.remove(remaining)
1765         if centroid:
1766             zone = closest(centroid, zones, "centroid", 0.1)
1767         if zone[0]:
1768             places[fips]["zone"] = zone
1769             count += 1
1770             if not count%100:
1771                 level = int(50*count/estimate)
1772                 if level in milestones:
1773                     for remaining in milestones[:milestones.index(level)+1]:
1774                         if remaining%5:
1775                             message = "."
1776                             sys.stdout.write(message)
1777                             sys.stdout.flush()
1778                         else:
1779                             message = "%s%%" % (remaining*2,)
1780                             sys.stdout.write(message)
1781                             sys.stdout.flush()
1782                         milestones.remove(remaining)
1783     for station in stations:
1784         if "location" in stations[station]:
1785             location = stations[station]["location"]
1786             if location:
1787                 zone = closest(location, zones, "centroid", 0.1)
1788             if zone[0]:
1789                 stations[station]["zone"] = zone
1790                 count += 1
1791                 if not count%100:
1792                     level = int(50*count/estimate)
1793                     if level in milestones:
1794                         for remaining in milestones[:milestones.index(level)+1]:
1795                             if remaining%5:
1796                                 message = "."
1797                                 sys.stdout.write(message)
1798                                 sys.stdout.flush()
1799                             else:
1800                                 message = "%s%%" % (remaining*2,)
1801                                 sys.stdout.write(message)
1802                                 sys.stdout.flush()
1803                             milestones.remove(remaining)
1804     for zcta in zctas.keys():
1805         centroid = zctas[zcta]["centroid"]
1806         if centroid:
1807             station = closest(centroid, stations, "location", 0.1)
1808         if station[0]:
1809             zctas[zcta]["station"] = station
1810             count += 1
1811             if not count%100:
1812                 level = int(50*count/estimate)
1813                 if level in milestones:
1814                     for remaining in milestones[ : milestones.index(level)+1 ]:
1815                         if remaining%5:
1816                             message = "."
1817                             sys.stdout.write(message)
1818                             sys.stdout.flush()
1819                         else:
1820                             message = "%s%%" % (remaining*2,)
1821                             sys.stdout.write(message)
1822                             sys.stdout.flush()
1823                         milestones.remove(remaining)
1824         if centroid:
1825             zone = closest(centroid, zones, "centroid", 0.1)
1826         if zone[0]:
1827             zctas[zcta]["zone"] = zone
1828             count += 1
1829             if not count%100:
1830                 level = int(50*count/estimate)
1831                 if level in milestones:
1832                     for remaining in milestones[:milestones.index(level)+1]:
1833                         if remaining%5:
1834                             message = "."
1835                             sys.stdout.write(message)
1836                             sys.stdout.flush()
1837                         else:
1838                             message = "%s%%" % (remaining*2,)
1839                             sys.stdout.write(message)
1840                             sys.stdout.flush()
1841                         milestones.remove(remaining)
1842     for zone in zones.keys():
1843         if "centroid" in zones[zone]:
1844             centroid = zones[zone]["centroid"]
1845             if centroid:
1846                 station = closest(centroid, stations, "location", 0.1)
1847             if station[0]:
1848                 zones[zone]["station"] = station
1849                 count += 1
1850                 if not count%100:
1851                     level = int(50*count/estimate)
1852                     if level in milestones:
1853                         for remaining in milestones[:milestones.index(level)+1]:
1854                             if remaining%5:
1855                                 message = "."
1856                                 sys.stdout.write(message)
1857                                 sys.stdout.flush()
1858                             else:
1859                                 message = "%s%%" % (remaining*2,)
1860                                 sys.stdout.write(message)
1861                                 sys.stdout.flush()
1862                             milestones.remove(remaining)
1863     for remaining in milestones:
1864         if remaining%5:
1865             message = "."
1866             sys.stdout.write(message)
1867             sys.stdout.flush()
1868         else:
1869             message = "%s%%" % (remaining*2,)
1870             sys.stdout.write(message)
1871             sys.stdout.flush()
1872     print("\n   done (%s correlations)." % count)
1873     message = "Writing %s..." % airports_fn
1874     sys.stdout.write(message)
1875     sys.stdout.flush()
1876     count = 0
1877     if os.path.exists(airports_fn):
1878         os.rename(airports_fn, "%s_old"%airports_fn)
1879     airports_fd = codecs.open(airports_fn, "w", "utf8")
1880     airports_fd.write(header)
1881     for airport in sorted( airports.keys() ):
1882         airports_fd.write("\n\n[%s]" % airport)
1883         for key, value in sorted( airports[airport].items() ):
1884             if type(value) is float: value = "%.7f"%value
1885             elif type(value) is tuple:
1886                 elements = []
1887                 for element in value:
1888                     if type(element) is float: elements.append("%.7f"%element)
1889                     else: elements.append( repr(element) )
1890                 value = "(%s)"%", ".join(elements)
1891             airports_fd.write( "\n%s = %s" % (key, value) )
1892         count += 1
1893     airports_fd.write("\n")
1894     airports_fd.close()
1895     print("done (%s sections)." % count)
1896     message = "Writing %s..." % places_fn
1897     sys.stdout.write(message)
1898     sys.stdout.flush()
1899     count = 0
1900     if os.path.exists(places_fn):
1901         os.rename(places_fn, "%s_old"%places_fn)
1902     places_fd = codecs.open(places_fn, "w", "utf8")
1903     places_fd.write(header)
1904     for fips in sorted( places.keys() ):
1905         places_fd.write("\n\n[%s]" % fips)
1906         for key, value in sorted( places[fips].items() ):
1907             if type(value) is float: value = "%.7f"%value
1908             elif type(value) is tuple:
1909                 elements = []
1910                 for element in value:
1911                     if type(element) is float: elements.append("%.7f"%element)
1912                     else: elements.append( repr(element) )
1913                 value = "(%s)"%", ".join(elements)
1914             places_fd.write( "\n%s = %s" % (key, value) )
1915         count += 1
1916     places_fd.write("\n")
1917     places_fd.close()
1918     print("done (%s sections)." % count)
1919     message = "Writing %s..." % stations_fn
1920     sys.stdout.write(message)
1921     sys.stdout.flush()
1922     count = 0
1923     if os.path.exists(stations_fn):
1924         os.rename(stations_fn, "%s_old"%stations_fn)
1925     stations_fd = codecs.open(stations_fn, "w", "utf-8")
1926     stations_fd.write(header)
1927     for station in sorted( stations.keys() ):
1928         stations_fd.write("\n\n[%s]" % station)
1929         for key, value in sorted( stations[station].items() ):
1930             if type(value) is float: value = "%.7f"%value
1931             elif type(value) is tuple:
1932                 elements = []
1933                 for element in value:
1934                     if type(element) is float: elements.append("%.7f"%element)
1935                     else: elements.append( repr(element) )
1936                 value = "(%s)"%", ".join(elements)
1937             if type(value) is bytes:
1938                 value = value.decode("utf-8")
1939             stations_fd.write( "\n%s = %s" % (key, value) )
1940         count += 1
1941     stations_fd.write("\n")
1942     stations_fd.close()
1943     print("done (%s sections)." % count)
1944     message = "Writing %s..." % zctas_fn
1945     sys.stdout.write(message)
1946     sys.stdout.flush()
1947     count = 0
1948     if os.path.exists(zctas_fn):
1949         os.rename(zctas_fn, "%s_old"%zctas_fn)
1950     zctas_fd = codecs.open(zctas_fn, "w", "utf8")
1951     zctas_fd.write(header)
1952     for zcta in sorted( zctas.keys() ):
1953         zctas_fd.write("\n\n[%s]" % zcta)
1954         for key, value in sorted( zctas[zcta].items() ):
1955             if type(value) is float: value = "%.7f"%value
1956             elif type(value) is tuple:
1957                 elements = []
1958                 for element in value:
1959                     if type(element) is float: elements.append("%.7f"%element)
1960                     else: elements.append( repr(element) )
1961                 value = "(%s)"%", ".join(elements)
1962             zctas_fd.write( "\n%s = %s" % (key, value) )
1963         count += 1
1964     zctas_fd.write("\n")
1965     zctas_fd.close()
1966     print("done (%s sections)." % count)
1967     message = "Writing %s..." % zones_fn
1968     sys.stdout.write(message)
1969     sys.stdout.flush()
1970     count = 0
1971     if os.path.exists(zones_fn):
1972         os.rename(zones_fn, "%s_old"%zones_fn)
1973     zones_fd = codecs.open(zones_fn, "w", "utf8")
1974     zones_fd.write(header)
1975     for zone in sorted( zones.keys() ):
1976         zones_fd.write("\n\n[%s]" % zone)
1977         for key, value in sorted( zones[zone].items() ):
1978             if type(value) is float: value = "%.7f"%value
1979             elif type(value) is tuple:
1980                 elements = []
1981                 for element in value:
1982                     if type(element) is float: elements.append("%.7f"%element)
1983                     else: elements.append( repr(element) )
1984                 value = "(%s)"%", ".join(elements)
1985             zones_fd.write( "\n%s = %s" % (key, value) )
1986         count += 1
1987     zones_fd.write("\n")
1988     zones_fd.close()
1989     print("done (%s sections)." % count)
1990     message = "Starting QA check..."
1991     sys.stdout.write(message)
1992     sys.stdout.flush()
1993     airports = configparser.ConfigParser()
1994     airports.read(airports_fn)
1995     places = configparser.ConfigParser()
1996     places.read(places_fn)
1997     stations = configparser.ConfigParser()
1998     stations.read(stations_fn)
1999     zctas = configparser.ConfigParser()
2000     zctas.read(zctas_fn)
2001     zones = configparser.ConfigParser()
2002     zones.read(zones_fn)
2003     qalog = []
2004     places_nocentroid = 0
2005     places_nodescription = 0
2006     for place in sorted( places.sections() ):
2007         if not places.has_option(place, "centroid"):
2008             qalog.append("%s: no centroid\n" % place)
2009             places_nocentroid += 1
2010         if not places.has_option(place, "description"):
2011             qalog.append("%s: no description\n" % place)
2012             places_nodescription += 1
2013     stations_nodescription = 0
2014     stations_nolocation = 0
2015     stations_nometar = 0
2016     for station in sorted( stations.sections() ):
2017         if not stations.has_option(station, "description"):
2018             qalog.append("%s: no description\n" % station)
2019             stations_nodescription += 1
2020         if not stations.has_option(station, "location"):
2021             qalog.append("%s: no location\n" % station)
2022             stations_nolocation += 1
2023         if not stations.has_option(station, "metar"):
2024             qalog.append("%s: no metar\n" % station)
2025             stations_nometar += 1
2026     airports_badstation = 0
2027     airports_nostation = 0
2028     for airport in sorted( airports.sections() ):
2029         if not airports.has_option(airport, "station"):
2030             qalog.append("%s: no station\n" % airport)
2031             airports_nostation += 1
2032         else:
2033             station = airports.get(airport, "station")
2034             if station not in stations.sections():
2035                 qalog.append( "%s: bad station %s\n" % (airport, station) )
2036                 airports_badstation += 1
2037     zctas_nocentroid = 0
2038     for zcta in sorted( zctas.sections() ):
2039         if not zctas.has_option(zcta, "centroid"):
2040             qalog.append("%s: no centroid\n" % zcta)
2041             zctas_nocentroid += 1
2042     zones_nocentroid = 0
2043     zones_nodescription = 0
2044     zones_noforecast = 0
2045     zones_overlapping = 0
2046     zonetable = {}
2047     for zone in zones.sections():
2048         if zones.has_option(zone, "centroid"):
2049             zonetable[zone] = {
2050                 "centroid": eval( zones.get(zone, "centroid") )
2051             }
2052     for zone in sorted( zones.sections() ):
2053         if zones.has_option(zone, "centroid"):
2054             zonetable_local = zonetable.copy()
2055             del( zonetable_local[zone] )
2056             centroid = eval( zones.get(zone, "centroid") )
2057             if centroid:
2058                 nearest = closest(centroid, zonetable_local, "centroid", 0.1)
2059             if nearest[1]*radian_to_km < 1:
2060                 qalog.append( "%s: within one km of %s\n" % (
2061                     zone,
2062                     nearest[0]
2063                 ) )
2064                 zones_overlapping += 1
2065         else:
2066             qalog.append("%s: no centroid\n" % zone)
2067             zones_nocentroid += 1
2068         if not zones.has_option(zone, "description"):
2069             qalog.append("%s: no description\n" % zone)
2070             zones_nodescription += 1
2071         if not zones.has_option(zone, "zone_forecast"):
2072             qalog.append("%s: no forecast\n" % zone)
2073             zones_noforecast += 1
2074     if os.path.exists(qalog_fn):
2075         os.rename(qalog_fn, "%s_old"%qalog_fn)
2076     qalog_fd = codecs.open(qalog_fn, "w", "utf8")
2077     import time
2078     qalog_fd.write(
2079         '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
2080         '# use, copy, modify, and distribute this software is granted under terms\n'
2081         '# provided in the LICENSE file distributed with this software.\n\n'
2082         % time.gmtime().tm_year)
2083     qalog_fd.writelines(qalog)
2084     qalog_fd.close()
2085     if qalog:
2086         print("issues found (see %s for details):"%qalog_fn)
2087         if airports_badstation:
2088             print("   %s airports with invalid station"%airports_badstation)
2089         if airports_nostation:
2090             print("   %s airports with no station"%airports_nostation)
2091         if places_nocentroid:
2092             print("   %s places with no centroid"%places_nocentroid)
2093         if places_nodescription:
2094             print("   %s places with no description"%places_nodescription)
2095         if stations_nodescription:
2096             print("   %s stations with no description"%stations_nodescription)
2097         if stations_nolocation:
2098             print("   %s stations with no location"%stations_nolocation)
2099         if stations_nometar:
2100             print("   %s stations with no METAR"%stations_nometar)
2101         if zctas_nocentroid:
2102             print("   %s ZCTAs with no centroid"%zctas_nocentroid)
2103         if zones_nocentroid:
2104             print("   %s zones with no centroid"%zones_nocentroid)
2105         if zones_nodescription:
2106             print("   %s zones with no description"%zones_nodescription)
2107         if zones_noforecast:
2108             print("   %s zones with no forecast"%zones_noforecast)
2109         if zones_overlapping:
2110             print("   %s zones within one km of another"%zones_overlapping)
2111     else: print("no issues found.")
2112     print("Indexing complete!")