Add weather zone hkz000 for Hong Kong Observatory
[weather.git] / weather.py
1 """Contains various object definitions needed by the weather utility."""
2
3 weather_copyright = """\
4 # Copyright (c) 2006-2020 Jeremy Stanley <fungi@yuggoth.org>. Permission to
5 # use, copy, modify, and distribute this software is granted under terms
6 # provided in the LICENSE file distributed with this software.
7 #"""
8
9 weather_version = "2.4"
10
11 radian_to_km = 6372.795484
12 radian_to_mi = 3959.871528
13
14 def pyversion(ref=None):
15     """Determine the Python version and optionally compare to a reference."""
16     import platform
17     ver = platform.python_version()
18     if ref:
19         return [
20             int(x) for x in ver.split(".")[:2]
21         ] >= [
22             int(x) for x in ref.split(".")[:2]
23         ]
24     else: return ver
25
26 class Selections:
27     """An object to contain selection data."""
28     def __init__(self):
29         """Store the config, options and arguments."""
30         self.config = get_config()
31         self.options, self.arguments = get_options(self.config)
32         if self.get_bool("cache") and self.get_bool("cache_search") \
33             and not self.get_bool("longlist"):
34             integrate_search_cache(
35                 self.config,
36                 self.get("cachedir"),
37                 self.get("setpath")
38             )
39         if not self.arguments:
40             if "id" in self.options.__dict__ \
41                 and self.options.__dict__["id"]:
42                 self.arguments.append( self.options.__dict__["id"] )
43                 del( self.options.__dict__["id"] )
44                 import sys
45                 message = "WARNING: the --id option is deprecated and will eventually be removed\n"
46                 sys.stderr.write(message)
47             elif "city" in self.options.__dict__ \
48                 and self.options.__dict__["city"] \
49                 and "st" in self.options.__dict__ \
50                 and self.options.__dict__["st"]:
51                 self.arguments.append(
52                     "^%s city, %s" % (
53                         self.options.__dict__["city"],
54                         self.options.__dict__["st"]
55                     )
56                 )
57                 del( self.options.__dict__["city"] )
58                 del( self.options.__dict__["st"] )
59                 import sys
60                 message = "WARNING: the --city/--st options are deprecated and will eventually be removed\n"
61                 sys.stderr.write(message)
62     def get(self, option, argument=None):
63         """Retrieve data from the config or options."""
64         if argument:
65             if self.config.has_section(argument) and (
66                 self.config.has_option(argument, "city") \
67                     or self.config.has_option(argument, "id") \
68                     or self.config.has_option(argument, "st")
69             ):
70                 self.config.remove_section(argument)
71                 import sys
72                 message = "WARNING: the city/id/st options are now unsupported in aliases\n"
73                 sys.stderr.write(message)
74             if not self.config.has_section(argument):
75                 guessed = guess(
76                     argument,
77                     path=self.get("setpath"),
78                     info=self.get("info"),
79                     cache_search=(
80                         self.get("cache") and self.get("cache_search")
81                     ),
82                     cachedir=self.get("cachedir"),
83                     quiet=self.get_bool("quiet")
84                 )
85                 self.config.add_section(argument)
86                 for item in guessed.items():
87                     self.config.set(argument, *item)
88             if self.config.has_option(argument, option):
89                 return self.config.get(argument, option)
90         if option in self.options.__dict__:
91             return self.options.__dict__[option]
92         else:
93             import os, sys
94             message = "%s error: no URI defined for %s\n" % (
95                 os.path.basename( sys.argv[0] ),
96                 option
97             )
98             sys.stderr.write(message)
99             exit(1)
100     def get_bool(self, option, argument=None):
101         """Get data and coerce to a boolean if necessary."""
102         return bool(self.get(option, argument))
103     def getint(self, option, argument=None):
104         """Get data and coerce to an integer if necessary."""
105         value = self.get(option, argument)
106         if value: return int(value)
107         else: return 0
108
109 def average(coords):
110     """Average a list of coordinates."""
111     x = 0
112     y = 0
113     for coord in coords:
114         x += coord[0]
115         y += coord[1]
116     count = len(coords)
117     return (x/count, y/count)
118
119 def filter_units(line, units="imperial"):
120     """Filter or convert units in a line of text between US/UK and metric."""
121     import re
122     # filter lines with both pressures in the form of "X inches (Y hPa)" or
123     # "X in. Hg (Y hPa)"
124     dual_p = re.match(
125         "(.* )(\d*(\.\d+)? (inches|in\. Hg)) \((\d*(\.\d+)? hPa)\)(.*)",
126         line
127     )
128     if dual_p:
129         preamble, in_hg, i_fr, i_un, hpa, h_fr, trailer = dual_p.groups()
130         if units == "imperial": line = preamble + in_hg + trailer
131         elif units == "metric": line = preamble + hpa + trailer
132     # filter lines with both temperatures in the form of "X F (Y C)"
133     dual_t = re.match(
134         "(.* )(-?\d*(\.\d+)? F) \((-?\d*(\.\d+)? C)\)(.*)",
135         line
136     )
137     if dual_t:
138         preamble, fahrenheit, f_fr, celsius, c_fr, trailer = dual_t.groups()
139         if units == "imperial": line = preamble + fahrenheit + trailer
140         elif units == "metric": line = preamble + celsius + trailer
141     # if metric is desired, convert distances in the form of "X mile(s)" to
142     # "Y kilometer(s)"
143     if units == "metric":
144         imperial_d = re.match(
145             "(.* )(\d+)( mile\(s\))(.*)",
146             line
147         )
148         if imperial_d:
149             preamble, mi, m_u, trailer = imperial_d.groups()
150             line = preamble + str(int(round(int(mi)*1.609344))) \
151                 + " kilometer(s)" + trailer
152     # filter speeds in the form of "X MPH (Y KT)" to just "X MPH"; if metric is
153     # desired, convert to "Z KPH"
154     imperial_s = re.match(
155         "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
156         line
157     )
158     if imperial_s:
159         preamble, mph, m_u, kt, trailer = imperial_s.groups()
160         if units == "imperial": line = preamble + mph + m_u + trailer
161         elif units == "metric": 
162             line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
163                 trailer
164     imperial_s = re.match(
165         "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
166         line
167     )
168     if imperial_s:
169         preamble, mph, m_u, kt, trailer = imperial_s.groups()
170         if units == "imperial": line = preamble + mph + m_u + trailer
171         elif units == "metric": 
172             line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
173                 trailer
174     # if imperial is desired, qualify given forcast temperatures like "X F"; if
175     # metric is desired, convert to "Y C"
176     imperial_t = re.match(
177         "(.* )(High |high |Low |low )(\d+)(\.|,)(.*)",
178         line
179     )
180     if imperial_t:
181         preamble, parameter, fahrenheit, sep, trailer = imperial_t.groups()
182         if units == "imperial":
183             line = preamble + parameter + fahrenheit + " F" + sep + trailer
184         elif units == "metric":
185             line = preamble + parameter \
186                 + str(int(round((int(fahrenheit)-32)*5/9))) + " C" + sep \
187                 + trailer
188     # hand off the resulting line
189     return line
190
191 def get_uri(
192     uri,
193     ignore_fail=False,
194     cache_data=False,
195     cacheage=900,
196     cachedir="."
197 ):
198     """Return a string containing the results of a URI GET."""
199     if pyversion("3"):
200         import urllib, urllib.error, urllib.request
201         URLError = urllib.error.URLError
202         urlopen = urllib.request.urlopen
203     else:
204         import urllib2 as urllib
205         URLError = urllib.URLError
206         urlopen = urllib.urlopen
207     import os, time
208     if cache_data:
209         dcachedir = os.path.join( os.path.expanduser(cachedir), "datacache" )
210         if not os.path.exists(dcachedir):
211             try: os.makedirs(dcachedir)
212             except (IOError, OSError): pass
213         dcache_fn = os.path.join(
214             dcachedir,
215             uri.split(":")[1].replace("/","_")
216         )
217     now = time.time()
218     if cache_data and os.access(dcache_fn, os.R_OK) \
219         and now-cacheage < os.stat(dcache_fn).st_mtime <= now:
220         dcache_fd = open(dcache_fn)
221         data = dcache_fd.read()
222         dcache_fd.close()
223     else:
224         try:
225             if pyversion("3"): data = urlopen(uri).read().decode("utf-8")
226             else: data = urlopen(uri).read()
227         except URLError:
228             if ignore_fail: return ""
229             else:
230                 import os, sys, traceback
231                 message = "%s error: failed to retrieve\n   %s\n   %s" % (
232                         os.path.basename( sys.argv[0] ),
233                         uri,
234                         traceback.format_exception_only(
235                             sys.exc_type,
236                             sys.exc_value
237                         )[0]
238                     )
239                 sys.stderr.write(message)
240                 sys.exit(1)
241         # Some data sources are HTML with the plain text wrapped in pre tags
242         if "<pre>" in data:
243             data = data[data.find("<pre>")+5:data.find("</pre>")]
244         if cache_data:
245             try:
246                 import codecs
247                 dcache_fd = codecs.open(dcache_fn, "w", "utf-8")
248                 dcache_fd.write(data)
249                 dcache_fd.close()
250             except (IOError, OSError): pass
251     return data
252
253 def get_metar(
254     uri=None,
255     verbose=False,
256     quiet=False,
257     headers=None,
258     imperial=False,
259     metric=False,
260     cache_data=False,
261     cacheage=900,
262     cachedir="."
263 ):
264     """Return a summarized METAR for the specified station."""
265     if not uri:
266         import os, sys
267         message = "%s error: METAR URI required for conditions\n" % \
268             os.path.basename( sys.argv[0] )
269         sys.stderr.write(message)
270         sys.exit(1)
271     metar = get_uri(
272         uri,
273         cache_data=cache_data,
274         cacheage=cacheage,
275         cachedir=cachedir
276     )
277     if pyversion("3") and type(metar) is bytes: metar = metar.decode("utf-8")
278     if verbose: return metar
279     else:
280         import re
281         lines = metar.split("\n")
282         if not headers:
283             headers = \
284                 "relative_humidity," \
285                 + "precipitation_last_hour," \
286                 + "sky conditions," \
287                 + "temperature," \
288                 + "heat index," \
289                 + "windchill," \
290                 + "weather," \
291                 + "wind"
292         headerlist = headers.lower().replace("_"," ").split(",")
293         output = []
294         if not quiet:
295             title = "Current conditions at %s"
296             place = lines[0].split(", ")
297             if len(place) > 1:
298                 place = "%s, %s" % ( place[0].title(), place[1] )
299             else: place = "<UNKNOWN>"
300             output.append(title%place)
301             output.append("Last updated " + lines[1])
302         header_match = False
303         for header in headerlist:
304             for line in lines:
305                 if line.lower().startswith(header + ":"):
306                     if re.match(r".*:\d+$", line): line = line[:line.rfind(":")]
307                     if imperial: line = filter_units(line, units="imperial")
308                     elif metric: line = filter_units(line, units="metric")
309                     if quiet: output.append(line)
310                     else: output.append("   " + line)
311                     header_match = True
312         if not header_match:
313             output.append(
314                 "(no conditions matched your header list, try with --verbose)"
315             )
316         return "\n".join(output)
317
318 def get_alert(
319     uri=None,
320     verbose=False,
321     quiet=False,
322     cache_data=False,
323     cacheage=900,
324     cachedir="."
325 ):
326     """Return alert notice for the specified URI."""
327     if not uri:
328         import os, sys
329         message = "%s error: Alert URI required for alerts\n" % \
330             os.path.basename( sys.argv[0] )
331         sys.stderr.write(message)
332         sys.exit(1)
333     alert = get_uri(
334         uri,
335         ignore_fail=True,
336         cache_data=cache_data,
337         cacheage=cacheage,
338         cachedir=cachedir
339     ).strip()
340     if pyversion("3") and type(alert) is bytes: alert = alert.decode("utf-8")
341     if alert:
342         if verbose: return alert
343         else:
344             if alert.find("\nNATIONAL WEATHER SERVICE") == -1:
345                 muted = False
346             else:
347                 muted = True
348             lines = alert.split("\n")
349             import time
350             valid_time = time.strftime("%Y%m%d%H%M")
351             output = []
352             for line in lines:
353                 if line.startswith("Expires:") \
354                     and "Expires:" + valid_time > line:
355                     return ""
356                 if muted and line.startswith("NATIONAL WEATHER SERVICE"):
357                     muted = False
358                     line = ""
359                 elif line == "&&":
360                     line = ""
361                 elif line == "$$":
362                     muted = True
363                 if line and not muted:
364                     if quiet: output.append(line)
365                     else: output.append("   " + line)
366             return "\n".join(output)
367
368 def get_options(config):
369     """Parse the options passed on the command line."""
370
371     # for optparse's builtin -h/--help option
372     usage = \
373         "usage: %prog [options] [alias1|search1 [alias2|search2 [...]]]"
374
375     # for optparse's builtin --version option
376     verstring = "%prog " + weather_version
377
378     # create the parser
379     import optparse
380     option_parser = optparse.OptionParser(usage=usage, version=verstring)
381     # separate options object from list of arguments and return both
382
383     # the -a/--alert option
384     if config.has_option("default", "alert"):
385         default_alert = bool(config.get("default", "alert"))
386     else: default_alert = False
387     option_parser.add_option("-a", "--alert",
388         dest="alert",
389         action="store_true",
390         default=default_alert,
391         help="include local alert notices")
392
393     # the --atypes option
394     if config.has_option("default", "atypes"):
395         default_atypes = config.get("default", "atypes")
396     else:
397         default_atypes = \
398             "coastal_flood_statement," \
399             + "flash_flood_statement," \
400             + "flash_flood_warning," \
401             + "flash_flood_watch," \
402             + "flood_statement," \
403             + "flood_warning," \
404             + "marine_weather_statement," \
405             + "river_statement," \
406             + "severe_thunderstorm_warning," \
407             + "severe_weather_statement," \
408             + "short_term_forecast," \
409             + "special_marine_warning," \
410             + "special_weather_statement," \
411             + "tornado_warning," \
412             + "urgent_weather_message"
413     option_parser.add_option("--atypes",
414         dest="atypes",
415         default=default_atypes,
416         help="list of alert notification types to display")
417
418     # the --build-sets option
419     option_parser.add_option("--build-sets",
420         dest="build_sets",
421         action="store_true",
422         default=False,
423         help="(re)build location correlation sets")
424
425     # the --cacheage option
426     if config.has_option("default", "cacheage"):
427         default_cacheage = config.getint("default", "cacheage")
428     else: default_cacheage = 900
429     option_parser.add_option("--cacheage",
430         dest="cacheage",
431         default=default_cacheage,
432         help="duration in seconds to refresh cached data")
433
434     # the --cachedir option
435     if config.has_option("default", "cachedir"):
436         default_cachedir = config.get("default", "cachedir")
437     else: default_cachedir = "~/.weather"
438     option_parser.add_option("--cachedir",
439         dest="cachedir",
440         default=default_cachedir,
441         help="directory for storing cached searches and data")
442
443     # the -f/--forecast option
444     if config.has_option("default", "forecast"):
445         default_forecast = bool(config.get("default", "forecast"))
446     else: default_forecast = False
447     option_parser.add_option("-f", "--forecast",
448         dest="forecast",
449         action="store_true",
450         default=default_forecast,
451         help="include a local forecast")
452
453     # the --headers option
454     if config.has_option("default", "headers"):
455         default_headers = config.get("default", "headers")
456     else:
457         default_headers = \
458             "temperature," \
459             + "relative_humidity," \
460             + "wind," \
461             + "heat_index," \
462             + "windchill," \
463             + "weather," \
464             + "sky_conditions," \
465             + "precipitation_last_hour"
466     option_parser.add_option("--headers",
467         dest="headers",
468         default=default_headers,
469         help="list of conditions headers to display")
470
471     # the --imperial option
472     if config.has_option("default", "imperial"):
473         default_imperial = bool(config.get("default", "imperial"))
474     else: default_imperial = False
475     option_parser.add_option("--imperial",
476         dest="imperial",
477         action="store_true",
478         default=default_imperial,
479         help="filter/convert conditions for US/UK units")
480
481     # the --info option
482     option_parser.add_option("--info",
483         dest="info",
484         action="store_true",
485         default=False,
486         help="output detailed information for your search")
487
488     # the -l/--list option
489     option_parser.add_option("-l", "--list",
490         dest="list",
491         action="store_true",
492         default=False,
493         help="list all configured aliases and cached searches")
494
495     # the --longlist option
496     option_parser.add_option("--longlist",
497         dest="longlist",
498         action="store_true",
499         default=False,
500         help="display details of all configured aliases")
501
502     # the -m/--metric option
503     if config.has_option("default", "metric"):
504         default_metric = bool(config.get("default", "metric"))
505     else: default_metric = False
506     option_parser.add_option("-m", "--metric",
507         dest="metric",
508         action="store_true",
509         default=default_metric,
510         help="filter/convert conditions for metric units")
511
512     # the -n/--no-conditions option
513     if config.has_option("default", "conditions"):
514         default_conditions = bool(config.get("default", "conditions"))
515     else: default_conditions = True
516     option_parser.add_option("-n", "--no-conditions",
517         dest="conditions",
518         action="store_false",
519         default=default_conditions,
520         help="disable output of current conditions")
521
522     # the --no-cache option
523     if config.has_option("default", "cache"):
524         default_cache = bool(config.get("default", "cache"))
525     else: default_cache = True
526     option_parser.add_option("--no-cache",
527         dest="cache",
528         action="store_false",
529         default=True,
530         help="disable all caching (searches and data)")
531
532     # the --no-cache-data option
533     if config.has_option("default", "cache_data"):
534         default_cache_data = bool(config.get("default", "cache_data"))
535     else: default_cache_data = True
536     option_parser.add_option("--no-cache-data",
537         dest="cache_data",
538         action="store_false",
539         default=True,
540         help="disable retrieved data caching")
541
542     # the --no-cache-search option
543     if config.has_option("default", "cache_search"):
544         default_cache_search = bool(config.get("default", "cache_search"))
545     else: default_cache_search = True
546     option_parser.add_option("--no-cache-search",
547         dest="cache_search",
548         action="store_false",
549         default=True,
550         help="disable search result caching")
551
552     # the -q/--quiet option
553     if config.has_option("default", "quiet"):
554         default_quiet = bool(config.get("default", "quiet"))
555     else: default_quiet = False
556     option_parser.add_option("-q", "--quiet",
557         dest="quiet",
558         action="store_true",
559         default=default_quiet,
560         help="skip preambles and don't indent")
561
562     # the --setpath option
563     if config.has_option("default", "setpath"):
564         default_setpath = config.get("default", "setpath")
565     else: default_setpath = ".:~/.weather"
566     option_parser.add_option("--setpath",
567         dest="setpath",
568         default=default_setpath,
569         help="directory search path for correlation sets")
570
571     # the -v/--verbose option
572     if config.has_option("default", "verbose"):
573         default_verbose = bool(config.get("default", "verbose"))
574     else: default_verbose = False
575     option_parser.add_option("-v", "--verbose",
576         dest="verbose",
577         action="store_true",
578         default=default_verbose,
579         help="show full decoded feeds")
580
581     # deprecated options
582     if config.has_option("default", "city"):
583         default_city = config.get("default", "city")
584     else: default_city = ""
585     option_parser.add_option("-c", "--city",
586         dest="city",
587         default=default_city,
588         help=optparse.SUPPRESS_HELP)
589     if config.has_option("default", "id"):
590         default_id = config.get("default", "id")
591     else: default_id = ""
592     option_parser.add_option("-i", "--id",
593         dest="id",
594         default=default_id,
595         help=optparse.SUPPRESS_HELP)
596     if config.has_option("default", "st"):
597         default_st = config.get("default", "st")
598     else: default_st = ""
599     option_parser.add_option("-s", "--st",
600         dest="st",
601         default=default_st,
602         help=optparse.SUPPRESS_HELP)
603
604     options, arguments = option_parser.parse_args()
605     return options, arguments
606
607 def get_config():
608     """Parse the aliases and configuration."""
609     if pyversion("3"): import configparser
610     else: import ConfigParser as configparser
611     config = configparser.ConfigParser()
612     import os
613     rcfiles = [
614         "/etc/weatherrc",
615         "/etc/weather/weatherrc",
616         os.path.expanduser("~/.weather/weatherrc"),
617         os.path.expanduser("~/.weatherrc"),
618         "weatherrc"
619         ]
620     for rcfile in rcfiles:
621         if os.access(rcfile, os.R_OK): config.read(rcfile)
622     for section in config.sections():
623         if section != section.lower():
624             if config.has_section(section.lower()):
625                 config.remove_section(section.lower())
626             config.add_section(section.lower())
627             for option,value in config.items(section):
628                 config.set(section.lower(), option, value)
629     return config
630
631 def integrate_search_cache(config, cachedir, setpath):
632     """Add cached search results into the configuration."""
633     if pyversion("3"): import configparser
634     else: import ConfigParser as configparser
635     import os, time
636     scache_fn = os.path.join( os.path.expanduser(cachedir), "searches" )
637     if not os.access(scache_fn, os.R_OK): return config
638     scache_fd = open(scache_fn)
639     created = float( scache_fd.readline().split(":")[1].strip().split()[0] )
640     scache_fd.close()
641     now = time.time()
642     datafiles = data_index(setpath)
643     if datafiles:
644         data_freshness = sorted(
645             [ x[1] for x in datafiles.values() ],
646             reverse=True
647         )[0]
648     else: data_freshness = now
649     if created < data_freshness <= now:
650         try:
651             os.remove(scache_fn)
652             print( "[clearing outdated %s]" % scache_fn )
653         except (IOError, OSError):
654             pass
655         return config
656     scache = configparser.ConfigParser()
657     scache.read(scache_fn)
658     for section in scache.sections():
659         if not config.has_section(section):
660             config.add_section(section)
661             for option,value in scache.items(section):
662                 config.set(section, option, value)
663     return config
664
665 def list_aliases(config, detail=False):
666     """Return a formatted list of aliases defined in the config."""
667     if detail:
668         output = "\n# configured alias details..."
669         for section in sorted(config.sections()):
670             output += "\n\n[%s]" % section
671             for item in sorted(config.items(section)):
672                 output += "\n%s = %s" % item
673         output += "\n"
674     else:
675         output = "configured aliases and cached searches..."
676         for section in sorted(config.sections()):
677             if config.has_option(section, "description"):
678                 description = config.get(section, "description")
679             else: description = "(no description provided)"
680             output += "\n   %s: %s" % (section, description)
681     return output
682
683 def data_index(path):
684     import os
685     datafiles = {}
686     for filename in ("airports", "places", "stations", "zctas", "zones"):
687         for dirname in path.split(":"):
688             for extension in ("", ".gz", ".txt"):
689                 candidate = os.path.expanduser(
690                     os.path.join( dirname, "".join( (filename, extension) ) )
691                 )
692                 if os.path.exists(candidate):
693                     datafiles[filename] = (
694                         candidate,
695                         os.stat(candidate).st_mtime
696                     )
697                     break
698             if filename in datafiles:
699                 break
700     return datafiles
701
702 def guess(
703     expression,
704     path=".",
705     max_results=20,
706     info=False,
707     cache_search=False,
708     cacheage=900,
709     cachedir=".",
710     quiet=False
711 ):
712     """Find URIs using airport, gecos, placename, station, ZCTA/ZIP, zone."""
713     import codecs, datetime, time, os, re, sys
714     if pyversion("3"): import configparser
715     else: import ConfigParser as configparser
716     datafiles = data_index(path)
717     if re.match("[A-Za-z]{3}$", expression): searchtype = "airport"
718     elif re.match("[A-Za-z0-9]{4}$", expression): searchtype = "station"
719     elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", expression): searchtype = "zone"
720     elif re.match("[0-9]{5}$", expression): searchtype = "ZCTA"
721     elif re.match(
722         r"[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?, *[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?$",
723         expression
724     ):
725         searchtype = "coordinates"
726     elif re.match(r"(FIPS|fips)\d+$", expression): searchtype = "FIPS"
727     else:
728         searchtype = "name"
729         cache_search = False
730     if cache_search: action = "caching"
731     else: action = "using"
732     if info:
733         scores = [
734             (0.005, "bad"),
735             (0.025, "poor"),
736             (0.160, "suspect"),
737             (0.500, "mediocre"),
738             (0.840, "good"),
739             (0.975, "great"),
740             (0.995, "excellent"),
741             (1.000, "ideal"),
742         ]
743     if not quiet: print("Searching via %s..."%searchtype)
744     stations = configparser.ConfigParser()
745     dataname = "stations"
746     if dataname in datafiles:
747         datafile = datafiles[dataname][0]
748         if datafile.endswith(".gz"):
749             import gzip
750             if pyversion("3"):
751                 stations.read_string(
752                     gzip.open(datafile).read().decode("utf-8") )
753             else: stations.readfp( gzip.open(datafile) )
754         else:
755             stations.read(datafile)
756     else:
757         message = "%s error: can't find \"%s\" data file\n" % (
758             os.path.basename( sys.argv[0] ),
759             dataname
760         )
761         sys.stderr.write(message)
762         exit(1)
763     zones = configparser.ConfigParser()
764     dataname = "zones"
765     if dataname in datafiles:
766         datafile = datafiles[dataname][0]
767         if datafile.endswith(".gz"):
768             import gzip
769             if pyversion("3"):
770                 zones.read_string( gzip.open(datafile).read().decode("utf-8") )
771             else: zones.readfp( gzip.open(datafile) )
772         else:
773             zones.read(datafile)
774     else:
775         message = "%s error: can't find \"%s\" data file\n" % (
776             os.path.basename( sys.argv[0] ),
777             dataname
778         )
779         sys.stderr.write(message)
780         exit(1)
781     search = None
782     station = ("", 0)
783     zone = ("", 0)
784     dataset = None
785     possibilities = []
786     uris = {}
787     if searchtype == "airport":
788         expression = expression.lower()
789         airports = configparser.ConfigParser()
790         dataname = "airports"
791         if dataname in datafiles:
792             datafile = datafiles[dataname][0]
793             if datafile.endswith(".gz"):
794                 import gzip
795                 if pyversion("3"):
796                     airports.read_string(
797                         gzip.open(datafile).read().decode("utf-8") )
798                 else: airports.readfp( gzip.open(datafile) )
799             else:
800                 airports.read(datafile)
801         else:
802             message = "%s error: can't find \"%s\" data file\n" % (
803                 os.path.basename( sys.argv[0] ),
804                 dataname
805             )
806             sys.stderr.write(message)
807             exit(1)
808         if airports.has_section(expression) \
809             and airports.has_option(expression, "station"):
810             search = (expression, "IATA/FAA airport code %s" % expression)
811             station = ( airports.get(expression, "station"), 0 )
812             if stations.has_option(station[0], "zone"):
813                 zone = eval( stations.get(station[0], "zone") )
814                 dataset = stations
815             if not ( info or quiet ) \
816                 and stations.has_option( station[0], "description" ):
817                 print(
818                     "[%s result %s]" % (
819                         action,
820                         stations.get(station[0], "description")
821                     )
822                 )
823         else:
824             message = "No IATA/FAA airport code \"%s\" in the %s file.\n" % (
825                 expression,
826                 datafiles["airports"][0]
827             )
828             sys.stderr.write(message)
829             exit(1)
830     elif searchtype == "station":
831         expression = expression.lower()
832         if stations.has_section(expression):
833             station = (expression, 0)
834             if not search:
835                 search = (expression, "ICAO station code %s" % expression)
836             if stations.has_option(expression, "zone"):
837                 zone = eval( stations.get(expression, "zone") )
838                 dataset = stations
839             if not ( info or quiet ) \
840                 and stations.has_option(expression, "description"):
841                 print(
842                     "[%s result %s]" % (
843                         action,
844                         stations.get(expression, "description")
845                     )
846                 )
847         else:
848             message = "No ICAO weather station \"%s\" in the %s file.\n" % (
849                 expression,
850                 datafiles["stations"][0]
851             )
852             sys.stderr.write(message)
853             exit(1)
854     elif searchtype == "zone":
855         expression = expression.lower()
856         if zones.has_section(expression) \
857             and zones.has_option(expression, "station"):
858             zone = (expression, 0)
859             station = eval( zones.get(expression, "station") )
860             dataset = zones
861             search = (expression, "NWS/NOAA weather zone %s" % expression)
862             if not ( info or quiet ) \
863                 and zones.has_option(expression, "description"):
864                 print(
865                     "[%s result %s]" % (
866                         action,
867                         zones.get(expression, "description")
868                     )
869                 )
870         else:
871             message = "No usable NWS weather zone \"%s\" in the %s file.\n" % (
872                 expression,
873                 datafiles["zones"][0]
874             )
875             sys.stderr.write(message)
876             exit(1)
877     elif searchtype == "ZCTA":
878         zctas = configparser.ConfigParser()
879         dataname = "zctas"
880         if dataname in datafiles:
881             datafile = datafiles[dataname][0]
882             if datafile.endswith(".gz"):
883                 import gzip
884                 if pyversion("3"):
885                     zctas.read_string(
886                         gzip.open(datafile).read().decode("utf-8") )
887                 else: zctas.readfp( gzip.open(datafile) )
888             else:
889                 zctas.read(datafile)
890         else:
891             message = "%s error: can't find \"%s\" data file\n" % (
892                 os.path.basename( sys.argv[0] ),
893                 dataname
894             )
895             sys.stderr.write(message)
896             exit(1)
897         dataset = zctas
898         if zctas.has_section(expression) \
899             and zctas.has_option(expression, "station"):
900             station = eval( zctas.get(expression, "station") )
901             search = (expression, "Census ZCTA (ZIP code) %s" % expression)
902             if zctas.has_option(expression, "zone"):
903                 zone = eval( zctas.get(expression, "zone") )
904         else:
905             message = "No census ZCTA (ZIP code) \"%s\" in the %s file.\n" % (
906                 expression,
907                 datafiles["zctas"][0]
908             )
909             sys.stderr.write(message)
910             exit(1)
911     elif searchtype == "coordinates":
912         search = (expression, "Geographic coordinates %s" % expression)
913         stationtable = {}
914         for station in stations.sections():
915             if stations.has_option(station, "location"):
916                 stationtable[station] = {
917                     "location": eval( stations.get(station, "location") )
918                 }
919         station = closest( gecos(expression), stationtable, "location", 0.1 )
920         if not station[0]:
921             message = "No ICAO weather station found near %s.\n" % expression
922             sys.stderr.write(message)
923             exit(1)
924         zonetable = {}
925         for zone in zones.sections():
926             if zones.has_option(zone, "centroid"):
927                 zonetable[zone] = {
928                     "centroid": eval( zones.get(zone, "centroid") )
929                 }
930         zone = closest( gecos(expression), zonetable, "centroid", 0.1 )
931         if not zone[0]:
932             message = "No NWS weather zone near %s; forecasts unavailable.\n" \
933                 % expression
934             sys.stderr.write(message)
935     elif searchtype in ("FIPS", "name"):
936         places = configparser.ConfigParser()
937         dataname = "places"
938         if dataname in datafiles:
939             datafile = datafiles[dataname][0]
940             if datafile.endswith(".gz"):
941                 import gzip
942                 if pyversion("3"):
943                     places.read_string(
944                         gzip.open(datafile).read().decode("utf-8") )
945                 else: places.readfp( gzip.open(datafile) )
946             else:
947                 places.read(datafile)
948         else:
949             message = "%s error: can't find \"%s\" data file\n" % (
950                 os.path.basename( sys.argv[0] ),
951                 dataname
952             )
953             sys.stderr.write(message)
954             exit(1)
955         dataset = places
956         place = expression.lower()
957         if places.has_section(place) and places.has_option(place, "station"):
958             station = eval( places.get(place, "station") )
959             search = (expression, "Census Place %s" % expression)
960             if places.has_option(place, "description"):
961                 search = (
962                     search[0],
963                     search[1] + ", %s" % places.get(place, "description")
964                 )
965             if places.has_option(place, "zone"):
966                 zone = eval( places.get(place, "zone") )
967             if not ( info or quiet ) \
968                 and places.has_option(place, "description"):
969                 print(
970                     "[%s result %s]" % (
971                         action,
972                         places.get(place, "description")
973                     )
974                 )
975         else:
976             for place in places.sections():
977                 if places.has_option(place, "description") \
978                     and places.has_option(place, "station") \
979                     and re.search(
980                         expression,
981                         places.get(place, "description"),
982                         re.I
983                     ):
984                         possibilities.append(place)
985             for place in stations.sections():
986                 if stations.has_option(place, "description") \
987                     and re.search(
988                         expression,
989                         stations.get(place, "description"),
990                         re.I
991                     ):
992                         possibilities.append(place)
993             for place in zones.sections():
994                 if zones.has_option(place, "description") \
995                     and zones.has_option(place, "station") \
996                     and re.search(
997                         expression,
998                         zones.get(place, "description"),
999                         re.I
1000                     ):
1001                         possibilities.append(place)
1002             if len(possibilities) == 1:
1003                 place = possibilities[0]
1004                 if places.has_section(place):
1005                     station = eval( places.get(place, "station") )
1006                     description = places.get(place, "description")
1007                     if places.has_option(place, "zone"):
1008                         zone = eval( places.get(place, "zone" ) )
1009                     search = ( expression, "%s: %s" % (place, description) )
1010                 elif stations.has_section(place):
1011                     station = (place, 0.0)
1012                     description = stations.get(place, "description")
1013                     if stations.has_option(place, "zone"):
1014                         zone = eval( stations.get(place, "zone" ) )
1015                     search = ( expression, "ICAO station code %s" % place )
1016                 elif zones.has_section(place):
1017                     station = eval( zones.get(place, "station") )
1018                     description = zones.get(place, "description")
1019                     zone = (place, 0.0)
1020                     search = ( expression, "NWS/NOAA weather zone %s" % place )
1021                 if not ( info or quiet ):
1022                     print( "[%s result %s]" % (action, description) )
1023             if not possibilities and not station[0]:
1024                 message = "No FIPS code/census area match in the %s file.\n" % (
1025                     datafiles["places"][0]
1026                 )
1027                 sys.stderr.write(message)
1028                 exit(1)
1029     if station[0]:
1030         uris["metar"] = stations.get( station[0], "metar" )
1031         if zone[0]:
1032             for key,value in zones.items( zone[0] ):
1033                 if key not in ("centroid", "description", "station"):
1034                     uris[key] = value
1035     elif possibilities:
1036         count = len(possibilities)
1037         if count <= max_results:
1038             print( "Your search is ambiguous, returning %s matches:" % count )
1039             for place in sorted(possibilities):
1040                 if places.has_section(place):
1041                     print(
1042                         "   [%s] %s" % (
1043                             place,
1044                             places.get(place, "description")
1045                         )
1046                     )
1047                 elif stations.has_section(place):
1048                     print(
1049                         "   [%s] %s" % (
1050                             place,
1051                             stations.get(place, "description")
1052                         )
1053                     )
1054                 elif zones.has_section(place):
1055                     print(
1056                         "   [%s] %s" % (
1057                             place,
1058                             zones.get(place, "description")
1059                         )
1060                     )
1061         else:
1062             print(
1063                 "Your search is too ambiguous, returning %s matches." % count
1064             )
1065         exit(0)
1066     if info:
1067         stationlist = []
1068         zonelist = []
1069         if dataset:
1070             for section in dataset.sections():
1071                 if dataset.has_option(section, "station"):
1072                     stationlist.append(
1073                         eval( dataset.get(section, "station") )[1]
1074                     )
1075                 if dataset.has_option(section, "zone"):
1076                     zonelist.append( eval( dataset.get(section, "zone") )[1] )
1077         stationlist.sort()
1078         zonelist.sort()
1079         scount = len(stationlist)
1080         zcount = len(zonelist)
1081         sranks = []
1082         zranks = []
1083         for score in scores:
1084             if stationlist:
1085                 sranks.append( stationlist[ int( (1-score[0]) * scount ) ] )
1086             if zonelist:
1087                 zranks.append( zonelist[ int( (1-score[0]) * zcount ) ] )
1088         description = search[1]
1089         uris["description"] = description
1090         print(
1091             "%s\n%s" % ( description, "-" * len(description) )
1092         )
1093         print(
1094             "%s: %s" % (
1095                 station[0],
1096                 stations.get( station[0], "description" )
1097             )
1098         )
1099         km = radian_to_km*station[1]
1100         mi = radian_to_mi*station[1]
1101         if sranks and not description.startswith("ICAO station code "):
1102             for index in range(0, len(scores)):
1103                 if station[1] >= sranks[index]:
1104                     score = scores[index][1]
1105                     break
1106             print(
1107                 "   (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1108             )
1109         elif searchtype is "coordinates":
1110             print( "   (%.3gkm, %.3gmi)" % (km, mi) )
1111         if zone[0]:
1112             print(
1113                 "%s: %s" % ( zone[0], zones.get( zone[0], "description" ) )
1114             )
1115         km = radian_to_km*zone[1]
1116         mi = radian_to_mi*zone[1]
1117         if zranks and not description.startswith("NWS/NOAA weather zone "):
1118             for index in range(0, len(scores)):
1119                 if zone[1] >= zranks[index]:
1120                     score = scores[index][1]
1121                     break
1122             print(
1123                 "   (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1124             )
1125         elif searchtype is "coordinates" and zone[0]:
1126             print( "   (%.3gkm, %.3gmi)" % (km, mi) )
1127     if cache_search:
1128         now = time.time()
1129         nowstamp = "%s (%s)" % (
1130             now,
1131             datetime.datetime.isoformat(
1132                 datetime.datetime.fromtimestamp(now),
1133                 " "
1134             )
1135         )
1136         search_cache = ["\n"]
1137         search_cache.append( "[%s]\n" % search[0] ) 
1138         search_cache.append( "description = cached %s\n" % nowstamp )
1139         for uriname in sorted(uris.keys()):
1140             search_cache.append( "%s = %s\n" % ( uriname, uris[uriname] ) )
1141         real_cachedir = os.path.expanduser(cachedir)
1142         if not os.path.exists(real_cachedir):
1143             try: os.makedirs(real_cachedir)
1144             except (IOError, OSError): pass
1145         scache_fn = os.path.join(real_cachedir, "searches")
1146         if not os.path.exists(scache_fn):
1147             then = sorted(
1148                     [ x[1] for x in datafiles.values() ],
1149                     reverse=True
1150                 )[0]
1151             thenstamp = "%s (%s)" % (
1152                 then,
1153                 datetime.datetime.isoformat(
1154                     datetime.datetime.fromtimestamp(then),
1155                     " "
1156                 )
1157             )
1158             search_cache.insert(
1159                 0,
1160                 "# based on data files from: %s\n" % thenstamp
1161             )
1162         try:
1163             scache_existing = configparser.ConfigParser()
1164             scache_existing.read(scache_fn)
1165             if not scache_existing.has_section(search[0]):
1166                 scache_fd = codecs.open(scache_fn, "a", "utf-8")
1167                 scache_fd.writelines(search_cache)
1168                 scache_fd.close()
1169         except (IOError, OSError): pass
1170     if not info:
1171         return(uris)
1172
1173 def closest(position, nodes, fieldname, angle=None):
1174     import math
1175     if not angle: angle = 2*math.pi
1176     match = None
1177     for name in nodes:
1178         if fieldname in nodes[name]:
1179             node = nodes[name][fieldname]
1180             if node and abs( position[0]-node[0] ) < angle:
1181                 if abs( position[1]-node[1] ) < angle \
1182                     or abs( abs( position[1]-node[1] ) - 2*math.pi ) < angle:
1183                     if position == node:
1184                         angle = 0
1185                         match = name
1186                     else:
1187                         candidate = math.acos(
1188                             math.sin( position[0] ) * math.sin( node[0] ) \
1189                                 + math.cos( position[0] ) \
1190                                 * math.cos( node[0] ) \
1191                                 * math.cos( position[1] - node[1] )
1192                             )
1193                         if candidate < angle:
1194                             angle = candidate
1195                             match = name
1196     if match: match = str(match)
1197     return (match, angle)
1198
1199 def gecos(formatted):
1200     import math, re
1201     coordinates = formatted.split(",")
1202     for coordinate in range(0, 2):
1203         degrees, foo, minutes, bar, seconds, hemisphere = re.match(
1204             r"([\+-]?\d+\.?\d*)(-(\d+))?(-(\d+))?([ensw]?)$",
1205             coordinates[coordinate].strip().lower()
1206         ).groups()
1207         value = float(degrees)
1208         if minutes: value += float(minutes)/60
1209         if seconds: value += float(seconds)/3600
1210         if hemisphere and hemisphere in "sw": value *= -1
1211         coordinates[coordinate] = math.radians(value)
1212     return tuple(coordinates)
1213
1214 def correlate():
1215     import codecs, csv, datetime, hashlib, os, re, sys, tarfile, time, zipfile
1216     if pyversion("3"): import configparser
1217     else: import ConfigParser as configparser
1218     for filename in os.listdir("."):
1219         if re.match("[0-9]{4}_Gaz_counties_national.zip$", filename):
1220             gcounties_an = filename
1221             gcounties_fn = filename[:-4] + ".txt"
1222         elif re.match("[0-9]{4}_Gaz_cousubs_national.zip$", filename):
1223             gcousubs_an = filename
1224             gcousubs_fn = filename[:-4] + ".txt"
1225         elif re.match("[0-9]{4}_Gaz_place_national.zip$", filename):
1226             gplace_an = filename
1227             gplace_fn = filename[:-4] + ".txt"
1228         elif re.match("[0-9]{4}_Gaz_zcta_national.zip$", filename):
1229             gzcta_an = filename
1230             gzcta_fn = filename[:-4] + ".txt"
1231         elif re.match("bp[0-9]{2}[a-z]{2}[0-9]{2}.dbx$", filename):
1232             cpfzcf_fn = filename
1233     nsdcccc_fn = "nsd_cccc.txt"
1234     ourairports_fn = "airports.csv"
1235     overrides_fn = "overrides.conf"
1236     overrideslog_fn = "overrides.log"
1237     slist_fn = "slist"
1238     zlist_fn = "zlist"
1239     qalog_fn = "qa.log"
1240     airports_fn = "airports"
1241     places_fn = "places"
1242     stations_fn = "stations"
1243     zctas_fn = "zctas"
1244     zones_fn = "zones"
1245     header = """\
1246 %s
1247 # generated by %s on %s from these public domain sources:
1248 #
1249 # https://www.census.gov/geographies/reference-files/time-series/geo/gazetteer-files.html
1250 # %s %s %s
1251 # %s %s %s
1252 # %s %s %s
1253 # %s %s %s
1254 #
1255 # https://www.weather.gov/gis/ZoneCounty/
1256 # %s %s %s
1257 #
1258 # https://tgftp.nws.noaa.gov/data/
1259 # %s %s %s
1260 #
1261 # https://ourairports.com/data/
1262 # %s %s %s
1263 #
1264 # ...and these manually-generated or hand-compiled adjustments:
1265 # %s %s %s
1266 # %s %s %s
1267 # %s %s %s\
1268 """ % (
1269         weather_copyright,
1270         os.path.basename( sys.argv[0] ),
1271         datetime.date.isoformat(
1272             datetime.datetime.fromtimestamp( time.time() )
1273         ),
1274         hashlib.md5( open(gcounties_an, "rb").read() ).hexdigest(),
1275         datetime.date.isoformat(
1276             datetime.datetime.fromtimestamp( os.path.getmtime(gcounties_an) )
1277         ),
1278         gcounties_an,
1279         hashlib.md5( open(gcousubs_an, "rb").read() ).hexdigest(),
1280         datetime.date.isoformat(
1281             datetime.datetime.fromtimestamp( os.path.getmtime(gcousubs_an) )
1282         ),
1283         gcousubs_an,
1284         hashlib.md5( open(gplace_an, "rb").read() ).hexdigest(),
1285         datetime.date.isoformat(
1286             datetime.datetime.fromtimestamp( os.path.getmtime(gplace_an) )
1287         ),
1288         gplace_an,
1289         hashlib.md5( open(gzcta_an, "rb").read() ).hexdigest(),
1290         datetime.date.isoformat(
1291             datetime.datetime.fromtimestamp( os.path.getmtime(gzcta_an) )
1292         ),
1293         gzcta_an,
1294         hashlib.md5( open(cpfzcf_fn, "rb").read() ).hexdigest(),
1295         datetime.date.isoformat(
1296             datetime.datetime.fromtimestamp( os.path.getmtime(cpfzcf_fn) )
1297         ),
1298         cpfzcf_fn,
1299         hashlib.md5( open(nsdcccc_fn, "rb").read() ).hexdigest(),
1300         datetime.date.isoformat(
1301             datetime.datetime.fromtimestamp( os.path.getmtime(nsdcccc_fn) )
1302         ),
1303         nsdcccc_fn,
1304         hashlib.md5( open(ourairports_fn, "rb").read() ).hexdigest(),
1305         datetime.date.isoformat(
1306             datetime.datetime.fromtimestamp( os.path.getmtime(ourairports_fn) )
1307         ),
1308         ourairports_fn,
1309         hashlib.md5( open(overrides_fn, "rb").read() ).hexdigest(),
1310         datetime.date.isoformat(
1311             datetime.datetime.fromtimestamp( os.path.getmtime(overrides_fn) )
1312         ),
1313         overrides_fn,
1314         hashlib.md5( open(slist_fn, "rb").read() ).hexdigest(),
1315         datetime.date.isoformat(
1316             datetime.datetime.fromtimestamp( os.path.getmtime(slist_fn) )
1317         ),
1318         slist_fn,
1319         hashlib.md5( open(zlist_fn, "rb").read() ).hexdigest(),
1320         datetime.date.isoformat(
1321             datetime.datetime.fromtimestamp( os.path.getmtime(zlist_fn) )
1322         ),
1323         zlist_fn
1324     )
1325     airports = {}
1326     places = {}
1327     stations = {}
1328     zctas = {}
1329     zones = {}
1330     message = "Reading %s:%s..." % (gcounties_an, gcounties_fn)
1331     sys.stdout.write(message)
1332     sys.stdout.flush()
1333     count = 0
1334     gcounties = zipfile.ZipFile(gcounties_an).open(gcounties_fn, "rU")
1335     columns = gcounties.readline().decode("utf-8").strip().split("\t")
1336     for line in gcounties:
1337         fields = line.decode("utf-8").strip().split("\t")
1338         f_geoid = fields[ columns.index("GEOID") ].strip()
1339         f_name = fields[ columns.index("NAME") ].strip()
1340         f_usps = fields[ columns.index("USPS") ].strip()
1341         f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1342         f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1343         if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1344             fips = "fips%s" % f_geoid
1345             if fips not in places: places[fips] = {}
1346             places[fips]["centroid"] = gecos(
1347                 "%s,%s" % (f_intptlat, f_intptlong)
1348             )
1349             places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1350             count += 1
1351     gcounties.close()
1352     print("done (%s lines)." % count)
1353     message = "Reading %s:%s..." % (gcousubs_an, gcousubs_fn)
1354     sys.stdout.write(message)
1355     sys.stdout.flush()
1356     count = 0
1357     gcousubs = zipfile.ZipFile(gcousubs_an).open(gcousubs_fn, "rU")
1358     columns = gcousubs.readline().decode("utf-8").strip().split("\t")
1359     for line in gcousubs:
1360         fields = line.decode("utf-8").strip().split("\t")
1361         f_geoid = fields[ columns.index("GEOID") ].strip()
1362         f_name = fields[ columns.index("NAME") ].strip()
1363         f_usps = fields[ columns.index("USPS") ].strip()
1364         f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1365         f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1366         if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1367             fips = "fips%s" % f_geoid
1368             if fips not in places: places[fips] = {}
1369             places[fips]["centroid"] = gecos(
1370                 "%s,%s" % (f_intptlat, f_intptlong)
1371             )
1372             places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1373             count += 1
1374     gcousubs.close()
1375     print("done (%s lines)." % count)
1376     message = "Reading %s:%s..." % (gplace_an, gplace_fn)
1377     sys.stdout.write(message)
1378     sys.stdout.flush()
1379     count = 0
1380     gplace = zipfile.ZipFile(gplace_an).open(gplace_fn, "rU")
1381     columns = gplace.readline().decode("utf-8").strip().split("\t")
1382     for line in gplace:
1383         fields = line.decode("utf-8").strip().split("\t")
1384         f_geoid = fields[ columns.index("GEOID") ].strip()
1385         f_name = fields[ columns.index("NAME") ].strip()
1386         f_usps = fields[ columns.index("USPS") ].strip()
1387         f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1388         f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1389         if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1390             fips = "fips%s" % f_geoid
1391             if fips not in places: places[fips] = {}
1392             places[fips]["centroid"] = gecos(
1393                 "%s,%s" % (f_intptlat, f_intptlong)
1394             )
1395             places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1396             count += 1
1397     gplace.close()
1398     print("done (%s lines)." % count)
1399     message = "Reading %s..." % slist_fn
1400     sys.stdout.write(message)
1401     sys.stdout.flush()
1402     count = 0
1403     slist = codecs.open(slist_fn, "rU", "utf-8")
1404     for line in slist:
1405         icao = line.split("#")[0].strip()
1406         if icao:
1407             stations[icao] = {
1408                 "metar": "https://tgftp.nws.noaa.gov/data/observations/"\
1409                     + "metar/decoded/%s.TXT" % icao.upper()
1410             }
1411             count += 1
1412     slist.close()
1413     print("done (%s lines)." % count)
1414     message = "Reading %s..." % nsdcccc_fn
1415     sys.stdout.write(message)
1416     sys.stdout.flush()
1417     count = 0
1418     nsdcccc = codecs.open(nsdcccc_fn, "rU", "utf-8")
1419     for line in nsdcccc:
1420         line = str(line)
1421         fields = line.split(";")
1422         icao = fields[0].strip().lower()
1423         if icao in stations:
1424             description = []
1425             name = " ".join( fields[3].strip().title().split() )
1426             if name: description.append(name)
1427             st = fields[4].strip()
1428             if st: description.append(st)
1429             country = " ".join( fields[5].strip().title().split() )
1430             if country: description.append(country)
1431             if description:
1432                 stations[icao]["description"] = ", ".join(description)
1433             lat, lon = fields[7:9]
1434             if lat and lon:
1435                 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1436             elif "location" not in stations[icao]:
1437                 lat, lon = fields[5:7]
1438                 if lat and lon:
1439                     stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1440         count += 1
1441     nsdcccc.close()
1442     print("done (%s lines)." % count)
1443     message = "Reading %s..." % ourairports_fn
1444     sys.stdout.write(message)
1445     sys.stdout.flush()
1446     count = 0
1447     ourairports = open(ourairports_fn, "rU")
1448     for row in csv.reader(ourairports):
1449         icao = row[12].decode('utf-8').lower()
1450         if icao in stations:
1451             iata = row[13].decode('utf-8').lower()
1452             if len(iata) == 3: airports[iata] = { "station": icao }
1453             if "description" not in stations[icao]:
1454                 description = []
1455                 name = row[3].decode('utf-8')
1456                 if name: description.append(name)
1457                 municipality = row[10].decode('utf-8')
1458                 if municipality: description.append(municipality)
1459                 region = row[9].decode('utf-8')
1460                 country = row[8].decode('utf-8')
1461                 if region:
1462                     if "-" in region:
1463                         c,r = region.split("-", 1)
1464                         if c == country: region = r
1465                     description.append(region)
1466                 if country:
1467                     description.append(country)
1468                 if description:
1469                     stations[icao]["description"] = ", ".join(description)
1470             if "location" not in stations[icao]:
1471                 lat = row[4].decode('utf-8')
1472                 if lat:
1473                     lon = row[5].decode('utf-8')
1474                     if lon:
1475                         stations[icao]["location"] = gecos(
1476                             "%s,%s" % (lat, lon)
1477                         )
1478         count += 1
1479     ourairports.close()
1480     print("done (%s lines)." % count)
1481     message = "Reading %s..." % zlist_fn
1482     sys.stdout.write(message)
1483     sys.stdout.flush()
1484     count = 0
1485     zlist = codecs.open(zlist_fn, "rU", "utf-8")
1486     for line in zlist:
1487         line = line.split("#")[0].strip()
1488         if line:
1489             zones[line] = {}
1490             count += 1
1491     zlist.close()
1492     print("done (%s lines)." % count)
1493     message = "Reading %s..." % cpfzcf_fn
1494     sys.stdout.write(message)
1495     sys.stdout.flush()
1496     count = 0
1497     cpfz = {}
1498     cpfzcf = codecs.open(cpfzcf_fn, "rU", "utf-8")
1499     for line in cpfzcf:
1500         fields = line.strip().split("|")
1501         if len(fields) == 11 \
1502             and fields[0] and fields[1] and fields[9] and fields[10]:
1503             zone = "z".join( fields[:2] ).lower()
1504             if zone in zones:
1505                 state = fields[0]
1506                 if state:
1507                     zones[zone]["coastal_flood_statement"] = (
1508                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1509                         "flood/coastal/%s/%s.txt" % (state.lower(), zone))
1510                     zones[zone]["flash_flood_statement"] = (
1511                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1512                         "flash_flood/statement/%s/%s.txt"
1513                         % (state.lower(), zone))
1514                     zones[zone]["flash_flood_warning"] = (
1515                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1516                         "flash_flood/warning/%s/%s.txt"
1517                         % (state.lower(), zone))
1518                     zones[zone]["flash_flood_watch"] = (
1519                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1520                         "flash_flood/watch/%s/%s.txt" % (state.lower(), zone))
1521                     zones[zone]["flood_statement"] = (
1522                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1523                         "flood/statement/%s/%s.txt" % (state.lower(), zone))
1524                     zones[zone]["flood_warning"] = (
1525                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1526                         "flood/warning/%s/%s.txt" % (state.lower(), zone))
1527                     zones[zone]["severe_thunderstorm_warning"] = (
1528                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1529                         "thunderstorm/%s/%s.txt" % (state.lower(), zone))
1530                     zones[zone]["severe_weather_statement"] = (
1531                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1532                         "severe_weather_stmt/%s/%s.txt"
1533                         % (state.lower(), zone))
1534                     zones[zone]["short_term_forecast"] = (
1535                         "https://tgftp.nws.noaa.gov/data/forecasts/nowcast/"
1536                         "%s/%s.txt" % (state.lower(), zone))
1537                     zones[zone]["special_weather_statement"] = (
1538                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1539                         "special_weather_stmt/%s/%s.txt"
1540                         % (state.lower(), zone))
1541                     zones[zone]["state_forecast"] = (
1542                         "https://tgftp.nws.noaa.gov/data/forecasts/state/"
1543                         "%s/%s.txt" % (state.lower(), zone))
1544                     zones[zone]["urgent_weather_message"] = (
1545                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1546                         "non_precip/%s/%s.txt" % (state.lower(), zone))
1547                     zones[zone]["zone_forecast"] = (
1548                         "https://tgftp.nws.noaa.gov/data/forecasts/zone/"
1549                         "%s/%s.txt" % (state.lower(), zone))
1550                 description = fields[3].strip()
1551                 fips = "fips%s"%fields[6]
1552                 county = fields[5]
1553                 if county:
1554                     if description.endswith(county):
1555                         description += " County"
1556                     else:
1557                         description += ", %s County" % county
1558                 description += ", %s, US" % state
1559                 zones[zone]["description"] = description
1560                 zones[zone]["centroid"] = gecos( ",".join( fields[9:11] ) )
1561                 if fips in places and not zones[zone]["centroid"]:
1562                     zones[zone]["centroid"] = places[fips]["centroid"]
1563         count += 1
1564     cpfzcf.close()
1565     print("done (%s lines)." % count)
1566     message = "Reading %s:%s..." % (gzcta_an, gzcta_fn)
1567     sys.stdout.write(message)
1568     sys.stdout.flush()
1569     count = 0
1570     gzcta = zipfile.ZipFile(gzcta_an).open(gzcta_fn, "rU")
1571     columns = gzcta.readline().decode("utf-8").strip().split("\t")
1572     for line in gzcta:
1573         fields = line.decode("utf-8").strip().split("\t")
1574         f_geoid = fields[ columns.index("GEOID") ].strip()
1575         f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1576         f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1577         if f_geoid and f_intptlat and f_intptlong:
1578             if f_geoid not in zctas: zctas[f_geoid] = {}
1579             zctas[f_geoid]["centroid"] = gecos(
1580                 "%s,%s" % (f_intptlat, f_intptlong)
1581             )
1582             count += 1
1583     gzcta.close()
1584     print("done (%s lines)." % count)
1585     message = "Reading %s..." % overrides_fn
1586     sys.stdout.write(message)
1587     sys.stdout.flush()
1588     count = 0
1589     added = 0
1590     removed = 0
1591     changed = 0
1592     overrides = configparser.ConfigParser()
1593     overrides.readfp( codecs.open(overrides_fn, "r", "utf8") )
1594     overrideslog = []
1595     for section in overrides.sections():
1596         addopt = 0
1597         chgopt = 0
1598         if section.startswith("-"):
1599             section = section[1:]
1600             delete = True
1601         else: delete = False
1602         if re.match("[A-Za-z]{3}$", section):
1603             if delete:
1604                 if section in airports:
1605                     del( airports[section] )
1606                     logact = "removed airport %s" % section
1607                     removed += 1
1608                 else:
1609                     logact = "tried to remove nonexistent airport %s" % section
1610             else:
1611                 if section in airports:
1612                     logact = "changed airport %s" % section
1613                     changed += 1
1614                 else:
1615                     airports[section] = {}
1616                     logact = "added airport %s" % section
1617                     added += 1
1618                 for key,value in overrides.items(section):
1619                     if key in airports[section]: chgopt += 1
1620                     else: addopt += 1
1621                     if key in ("centroid", "location"):
1622                         airports[section][key] = eval(value)
1623                     else:
1624                         airports[section][key] = value
1625                 if addopt and chgopt:
1626                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1627                 elif addopt: logact += " (+%s options)" % addopt
1628                 elif chgopt: logact += " (!%s options)" % chgopt
1629         elif re.match("[A-Za-z0-9]{4}$", section):
1630             if delete:
1631                 if section in stations:
1632                     del( stations[section] )
1633                     logact = "removed station %s" % section
1634                     removed += 1
1635                 else:
1636                     logact = "tried to remove nonexistent station %s" % section
1637             else:
1638                 if section in stations:
1639                     logact = "changed station %s" % section
1640                     changed += 1
1641                 else:
1642                     stations[section] = {}
1643                     logact = "added station %s" % section
1644                     added += 1
1645                 for key,value in overrides.items(section):
1646                     if key in stations[section]: chgopt += 1
1647                     else: addopt += 1
1648                     if key in ("centroid", "location"):
1649                         stations[section][key] = eval(value)
1650                     else:
1651                         stations[section][key] = value
1652                 if addopt and chgopt:
1653                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1654                 elif addopt: logact += " (+%s options)" % addopt
1655                 elif chgopt: logact += " (!%s options)" % chgopt
1656         elif re.match("[0-9]{5}$", section):
1657             if delete:
1658                 if section in zctas:
1659                     del( zctas[section] )
1660                     logact = "removed zcta %s" % section
1661                     removed += 1
1662                 else:
1663                     logact = "tried to remove nonexistent zcta %s" % section
1664             else:
1665                 if section in zctas:
1666                     logact = "changed zcta %s" % section
1667                     changed += 1
1668                 else:
1669                     zctas[section] = {}
1670                     logact = "added zcta %s" % section
1671                     added += 1
1672                 for key,value in overrides.items(section):
1673                     if key in zctas[section]: chgopt += 1
1674                     else: addopt += 1
1675                     if key in ("centroid", "location"):
1676                         zctas[section][key] = eval(value)
1677                     else:
1678                         zctas[section][key] = value
1679                 if addopt and chgopt:
1680                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1681                 elif addopt: logact += " (+%s options)" % addopt
1682                 elif chgopt: logact += " (!%s options)" % chgopt
1683         elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", section):
1684             if delete:
1685                 if section in zones:
1686                     del( zones[section] )
1687                     logact = "removed zone %s" % section
1688                     removed += 1
1689                 else:
1690                     logact = "tried to remove nonexistent zone %s" % section
1691             else:
1692                 if section in zones:
1693                     logact = "changed zone %s" % section
1694                     changed += 1
1695                 else:
1696                     zones[section] = {}
1697                     logact = "added zone %s" % section
1698                     added += 1
1699                 for key,value in overrides.items(section):
1700                     if key in zones[section]: chgopt += 1
1701                     else: addopt += 1
1702                     if key in ("centroid", "location"):
1703                         zones[section][key] = eval(value)
1704                     else:
1705                         zones[section][key] = value
1706                 if addopt and chgopt:
1707                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1708                 elif addopt: logact += " (+%s options)" % addopt
1709                 elif chgopt: logact += " (!%s options)" % chgopt
1710         elif re.match("fips[0-9]+$", section):
1711             if delete:
1712                 if section in places:
1713                     del( places[section] )
1714                     logact = "removed place %s" % section
1715                     removed += 1
1716                 else:
1717                     logact = "tried to remove nonexistent place %s" % section
1718             else:
1719                 if section in places:
1720                     logact = "changed place %s" % section
1721                     changed += 1
1722                 else:
1723                     places[section] = {}
1724                     logact = "added place %s" % section
1725                     added += 1
1726                 for key,value in overrides.items(section):
1727                     if key in places[section]: chgopt += 1
1728                     else: addopt += 1
1729                     if key in ("centroid", "location"):
1730                         places[section][key] = eval(value)
1731                     else:
1732                         places[section][key] = value
1733                 if addopt and chgopt:
1734                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1735                 elif addopt: logact += " (+%s options)" % addopt
1736                 elif chgopt: logact += " (!%s options)" % chgopt
1737         count += 1
1738         overrideslog.append("%s\n" % logact)
1739     overrideslog.sort()
1740     if os.path.exists(overrideslog_fn):
1741         os.rename(overrideslog_fn, "%s_old"%overrideslog_fn)
1742     overrideslog_fd = codecs.open(overrideslog_fn, "w", "utf8")
1743     overrideslog_fd.writelines(overrideslog)
1744     overrideslog_fd.close()
1745     print("done (%s overridden sections: +%s/-%s/!%s)." % (
1746         count,
1747         added,
1748         removed,
1749         changed
1750     ) )
1751     estimate = 2*len(places) + len(stations) + 2*len(zctas) + len(zones)
1752     print(
1753         "Correlating places, stations, ZCTAs and zones (upper bound is %s):" % \
1754             estimate
1755     )
1756     count = 0
1757     milestones = list( range(51) )
1758     message = "   "
1759     sys.stdout.write(message)
1760     sys.stdout.flush()
1761     for fips in places:
1762         centroid = places[fips]["centroid"]
1763         if centroid:
1764             station = closest(centroid, stations, "location", 0.1)
1765         if station[0]:
1766             places[fips]["station"] = station
1767             count += 1
1768             if not count%100:
1769                 level = int(50*count/estimate)
1770                 if level in milestones:
1771                     for remaining in milestones[:milestones.index(level)+1]:
1772                         if remaining%5:
1773                             message = "."
1774                             sys.stdout.write(message)
1775                             sys.stdout.flush()
1776                         else:
1777                             message = "%s%%" % (remaining*2,)
1778                             sys.stdout.write(message)
1779                             sys.stdout.flush()
1780                         milestones.remove(remaining)
1781         if centroid:
1782             zone = closest(centroid, zones, "centroid", 0.1)
1783         if zone[0]:
1784             places[fips]["zone"] = zone
1785             count += 1
1786             if not count%100:
1787                 level = int(50*count/estimate)
1788                 if level in milestones:
1789                     for remaining in milestones[:milestones.index(level)+1]:
1790                         if remaining%5:
1791                             message = "."
1792                             sys.stdout.write(message)
1793                             sys.stdout.flush()
1794                         else:
1795                             message = "%s%%" % (remaining*2,)
1796                             sys.stdout.write(message)
1797                             sys.stdout.flush()
1798                         milestones.remove(remaining)
1799     for station in stations:
1800         if "location" in stations[station]:
1801             location = stations[station]["location"]
1802             if location:
1803                 zone = closest(location, zones, "centroid", 0.1)
1804             if zone[0]:
1805                 stations[station]["zone"] = zone
1806                 count += 1
1807                 if not count%100:
1808                     level = int(50*count/estimate)
1809                     if level in milestones:
1810                         for remaining in milestones[:milestones.index(level)+1]:
1811                             if remaining%5:
1812                                 message = "."
1813                                 sys.stdout.write(message)
1814                                 sys.stdout.flush()
1815                             else:
1816                                 message = "%s%%" % (remaining*2,)
1817                                 sys.stdout.write(message)
1818                                 sys.stdout.flush()
1819                             milestones.remove(remaining)
1820     for zcta in zctas.keys():
1821         centroid = zctas[zcta]["centroid"]
1822         if centroid:
1823             station = closest(centroid, stations, "location", 0.1)
1824         if station[0]:
1825             zctas[zcta]["station"] = station
1826             count += 1
1827             if not count%100:
1828                 level = int(50*count/estimate)
1829                 if level in milestones:
1830                     for remaining in milestones[ : milestones.index(level)+1 ]:
1831                         if remaining%5:
1832                             message = "."
1833                             sys.stdout.write(message)
1834                             sys.stdout.flush()
1835                         else:
1836                             message = "%s%%" % (remaining*2,)
1837                             sys.stdout.write(message)
1838                             sys.stdout.flush()
1839                         milestones.remove(remaining)
1840         if centroid:
1841             zone = closest(centroid, zones, "centroid", 0.1)
1842         if zone[0]:
1843             zctas[zcta]["zone"] = zone
1844             count += 1
1845             if not count%100:
1846                 level = int(50*count/estimate)
1847                 if level in milestones:
1848                     for remaining in milestones[:milestones.index(level)+1]:
1849                         if remaining%5:
1850                             message = "."
1851                             sys.stdout.write(message)
1852                             sys.stdout.flush()
1853                         else:
1854                             message = "%s%%" % (remaining*2,)
1855                             sys.stdout.write(message)
1856                             sys.stdout.flush()
1857                         milestones.remove(remaining)
1858     for zone in zones.keys():
1859         if "centroid" in zones[zone]:
1860             centroid = zones[zone]["centroid"]
1861             if centroid:
1862                 station = closest(centroid, stations, "location", 0.1)
1863             if station[0]:
1864                 zones[zone]["station"] = station
1865                 count += 1
1866                 if not count%100:
1867                     level = int(50*count/estimate)
1868                     if level in milestones:
1869                         for remaining in milestones[:milestones.index(level)+1]:
1870                             if remaining%5:
1871                                 message = "."
1872                                 sys.stdout.write(message)
1873                                 sys.stdout.flush()
1874                             else:
1875                                 message = "%s%%" % (remaining*2,)
1876                                 sys.stdout.write(message)
1877                                 sys.stdout.flush()
1878                             milestones.remove(remaining)
1879     for remaining in milestones:
1880         if remaining%5:
1881             message = "."
1882             sys.stdout.write(message)
1883             sys.stdout.flush()
1884         else:
1885             message = "%s%%" % (remaining*2,)
1886             sys.stdout.write(message)
1887             sys.stdout.flush()
1888     print("\n   done (%s correlations)." % count)
1889     message = "Writing %s..." % airports_fn
1890     sys.stdout.write(message)
1891     sys.stdout.flush()
1892     count = 0
1893     if os.path.exists(airports_fn):
1894         os.rename(airports_fn, "%s_old"%airports_fn)
1895     airports_fd = codecs.open(airports_fn, "w", "utf8")
1896     airports_fd.write(header)
1897     for airport in sorted( airports.keys() ):
1898         airports_fd.write("\n\n[%s]" % airport)
1899         for key, value in sorted( airports[airport].items() ):
1900             if type(value) is float: value = "%.7f"%value
1901             elif type(value) is tuple:
1902                 elements = []
1903                 for element in value:
1904                     if type(element) is float: elements.append("%.7f"%element)
1905                     else: elements.append( repr(element) )
1906                 value = "(%s)"%", ".join(elements)
1907             airports_fd.write( "\n%s = %s" % (key, value) )
1908         count += 1
1909     airports_fd.write("\n")
1910     airports_fd.close()
1911     print("done (%s sections)." % count)
1912     message = "Writing %s..." % places_fn
1913     sys.stdout.write(message)
1914     sys.stdout.flush()
1915     count = 0
1916     if os.path.exists(places_fn):
1917         os.rename(places_fn, "%s_old"%places_fn)
1918     places_fd = codecs.open(places_fn, "w", "utf8")
1919     places_fd.write(header)
1920     for fips in sorted( places.keys() ):
1921         places_fd.write("\n\n[%s]" % fips)
1922         for key, value in sorted( places[fips].items() ):
1923             if type(value) is float: value = "%.7f"%value
1924             elif type(value) is tuple:
1925                 elements = []
1926                 for element in value:
1927                     if type(element) is float: elements.append("%.7f"%element)
1928                     else: elements.append( repr(element) )
1929                 value = "(%s)"%", ".join(elements)
1930             places_fd.write( "\n%s = %s" % (key, value) )
1931         count += 1
1932     places_fd.write("\n")
1933     places_fd.close()
1934     print("done (%s sections)." % count)
1935     message = "Writing %s..." % stations_fn
1936     sys.stdout.write(message)
1937     sys.stdout.flush()
1938     count = 0
1939     if os.path.exists(stations_fn):
1940         os.rename(stations_fn, "%s_old"%stations_fn)
1941     stations_fd = codecs.open(stations_fn, "w", "utf-8")
1942     stations_fd.write(header)
1943     for station in sorted( stations.keys() ):
1944         stations_fd.write("\n\n[%s]" % station)
1945         for key, value in sorted( stations[station].items() ):
1946             if type(value) is float: value = "%.7f"%value
1947             elif type(value) is tuple:
1948                 elements = []
1949                 for element in value:
1950                     if type(element) is float: elements.append("%.7f"%element)
1951                     else: elements.append( repr(element) )
1952                 value = "(%s)"%", ".join(elements)
1953             stations_fd.write( "\n%s = %s" % (key, value) )
1954         count += 1
1955     stations_fd.write("\n")
1956     stations_fd.close()
1957     print("done (%s sections)." % count)
1958     message = "Writing %s..." % zctas_fn
1959     sys.stdout.write(message)
1960     sys.stdout.flush()
1961     count = 0
1962     if os.path.exists(zctas_fn):
1963         os.rename(zctas_fn, "%s_old"%zctas_fn)
1964     zctas_fd = codecs.open(zctas_fn, "w", "utf8")
1965     zctas_fd.write(header)
1966     for zcta in sorted( zctas.keys() ):
1967         zctas_fd.write("\n\n[%s]" % zcta)
1968         for key, value in sorted( zctas[zcta].items() ):
1969             if type(value) is float: value = "%.7f"%value
1970             elif type(value) is tuple:
1971                 elements = []
1972                 for element in value:
1973                     if type(element) is float: elements.append("%.7f"%element)
1974                     else: elements.append( repr(element) )
1975                 value = "(%s)"%", ".join(elements)
1976             zctas_fd.write( "\n%s = %s" % (key, value) )
1977         count += 1
1978     zctas_fd.write("\n")
1979     zctas_fd.close()
1980     print("done (%s sections)." % count)
1981     message = "Writing %s..." % zones_fn
1982     sys.stdout.write(message)
1983     sys.stdout.flush()
1984     count = 0
1985     if os.path.exists(zones_fn):
1986         os.rename(zones_fn, "%s_old"%zones_fn)
1987     zones_fd = codecs.open(zones_fn, "w", "utf8")
1988     zones_fd.write(header)
1989     for zone in sorted( zones.keys() ):
1990         zones_fd.write("\n\n[%s]" % zone)
1991         for key, value in sorted( zones[zone].items() ):
1992             if type(value) is float: value = "%.7f"%value
1993             elif type(value) is tuple:
1994                 elements = []
1995                 for element in value:
1996                     if type(element) is float: elements.append("%.7f"%element)
1997                     else: elements.append( repr(element) )
1998                 value = "(%s)"%", ".join(elements)
1999             zones_fd.write( "\n%s = %s" % (key, value) )
2000         count += 1
2001     zones_fd.write("\n")
2002     zones_fd.close()
2003     print("done (%s sections)." % count)
2004     message = "Starting QA check..."
2005     sys.stdout.write(message)
2006     sys.stdout.flush()
2007     airports = configparser.ConfigParser()
2008     airports.read(airports_fn)
2009     places = configparser.ConfigParser()
2010     places.read(places_fn)
2011     stations = configparser.ConfigParser()
2012     stations.read(stations_fn)
2013     zctas = configparser.ConfigParser()
2014     zctas.read(zctas_fn)
2015     zones = configparser.ConfigParser()
2016     zones.read(zones_fn)
2017     qalog = []
2018     places_nocentroid = 0
2019     places_nodescription = 0
2020     for place in sorted( places.sections() ):
2021         if not places.has_option(place, "centroid"):
2022             qalog.append("%s: no centroid\n" % place)
2023             places_nocentroid += 1
2024         if not places.has_option(place, "description"):
2025             qalog.append("%s: no description\n" % place)
2026             places_nodescription += 1
2027     stations_nodescription = 0
2028     stations_nolocation = 0
2029     stations_nometar = 0
2030     for station in sorted( stations.sections() ):
2031         if not stations.has_option(station, "description"):
2032             qalog.append("%s: no description\n" % station)
2033             stations_nodescription += 1
2034         if not stations.has_option(station, "location"):
2035             qalog.append("%s: no location\n" % station)
2036             stations_nolocation += 1
2037         if not stations.has_option(station, "metar"):
2038             qalog.append("%s: no metar\n" % station)
2039             stations_nometar += 1
2040     airports_badstation = 0
2041     airports_nostation = 0
2042     for airport in sorted( airports.sections() ):
2043         if not airports.has_option(airport, "station"):
2044             qalog.append("%s: no station\n" % airport)
2045             airports_nostation += 1
2046         else:
2047             station = airports.get(airport, "station")
2048             if station not in stations.sections():
2049                 qalog.append( "%s: bad station %s\n" % (airport, station) )
2050                 airports_badstation += 1
2051     zctas_nocentroid = 0
2052     for zcta in sorted( zctas.sections() ):
2053         if not zctas.has_option(zcta, "centroid"):
2054             qalog.append("%s: no centroid\n" % zcta)
2055             zctas_nocentroid += 1
2056     zones_nocentroid = 0
2057     zones_nodescription = 0
2058     zones_noforecast = 0
2059     zones_overlapping = 0
2060     zonetable = {}
2061     for zone in zones.sections():
2062         if zones.has_option(zone, "centroid"):
2063             zonetable[zone] = {
2064                 "centroid": eval( zones.get(zone, "centroid") )
2065             }
2066     for zone in sorted( zones.sections() ):
2067         if zones.has_option(zone, "centroid"):
2068             zonetable_local = zonetable.copy()
2069             del( zonetable_local[zone] )
2070             centroid = eval( zones.get(zone, "centroid") )
2071             if centroid:
2072                 nearest = closest(centroid, zonetable_local, "centroid", 0.1)
2073             if nearest[1]*radian_to_km < 1:
2074                 qalog.append( "%s: within one km of %s\n" % (
2075                     zone,
2076                     nearest[0]
2077                 ) )
2078                 zones_overlapping += 1
2079         else:
2080             qalog.append("%s: no centroid\n" % zone)
2081             zones_nocentroid += 1
2082         if not zones.has_option(zone, "description"):
2083             qalog.append("%s: no description\n" % zone)
2084             zones_nodescription += 1
2085         if not zones.has_option(zone, "zone_forecast"):
2086             qalog.append("%s: no forecast\n" % zone)
2087             zones_noforecast += 1
2088     if os.path.exists(qalog_fn):
2089         os.rename(qalog_fn, "%s_old"%qalog_fn)
2090     qalog_fd = codecs.open(qalog_fn, "w", "utf8")
2091     qalog_fd.writelines(qalog)
2092     qalog_fd.close()
2093     if qalog:
2094         print("issues found (see %s for details):"%qalog_fn)
2095         if airports_badstation:
2096             print("   %s airports with invalid station"%airports_badstation)
2097         if airports_nostation:
2098             print("   %s airports with no station"%airports_nostation)
2099         if places_nocentroid:
2100             print("   %s places with no centroid"%places_nocentroid)
2101         if places_nodescription:
2102             print("   %s places with no description"%places_nodescription)
2103         if stations_nodescription:
2104             print("   %s stations with no description"%stations_nodescription)
2105         if stations_nolocation:
2106             print("   %s stations with no location"%stations_nolocation)
2107         if stations_nometar:
2108             print("   %s stations with no METAR"%stations_nometar)
2109         if zctas_nocentroid:
2110             print("   %s ZCTAs with no centroid"%zctas_nocentroid)
2111         if zones_nocentroid:
2112             print("   %s zones with no centroid"%zones_nocentroid)
2113         if zones_nodescription:
2114             print("   %s zones with no description"%zones_nodescription)
2115         if zones_noforecast:
2116             print("   %s zones with no forecast"%zones_noforecast)
2117         if zones_overlapping:
2118             print("   %s zones within one km of another"%zones_overlapping)
2119     else: print("no issues found.")
2120     print("Indexing complete!")