Correct default_atypes to match what's generated
[weather.git] / weather.py
1 """Contains various object definitions needed by the weather utility."""
2
3 weather_copyright = """\
4 # Copyright (c) 2006-2020 Jeremy Stanley <fungi@yuggoth.org>. Permission to
5 # use, copy, modify, and distribute this software is granted under terms
6 # provided in the LICENSE file distributed with this software.
7 #"""
8
9 weather_version = "2.4"
10
11 radian_to_km = 6372.795484
12 radian_to_mi = 3959.871528
13
14 def pyversion(ref=None):
15     """Determine the Python version and optionally compare to a reference."""
16     import platform
17     ver = platform.python_version()
18     if ref:
19         return [
20             int(x) for x in ver.split(".")[:2]
21         ] >= [
22             int(x) for x in ref.split(".")[:2]
23         ]
24     else: return ver
25
26 class Selections:
27     """An object to contain selection data."""
28     def __init__(self):
29         """Store the config, options and arguments."""
30         self.config = get_config()
31         self.options, self.arguments = get_options(self.config)
32         if self.get_bool("cache") and self.get_bool("cache_search") \
33             and not self.get_bool("longlist"):
34             integrate_search_cache(
35                 self.config,
36                 self.get("cachedir"),
37                 self.get("setpath")
38             )
39         if not self.arguments:
40             if "id" in self.options.__dict__ \
41                 and self.options.__dict__["id"]:
42                 self.arguments.append( self.options.__dict__["id"] )
43                 del( self.options.__dict__["id"] )
44                 import sys
45                 message = "WARNING: the --id option is deprecated and will eventually be removed\n"
46                 sys.stderr.write(message)
47             elif "city" in self.options.__dict__ \
48                 and self.options.__dict__["city"] \
49                 and "st" in self.options.__dict__ \
50                 and self.options.__dict__["st"]:
51                 self.arguments.append(
52                     "^%s city, %s" % (
53                         self.options.__dict__["city"],
54                         self.options.__dict__["st"]
55                     )
56                 )
57                 del( self.options.__dict__["city"] )
58                 del( self.options.__dict__["st"] )
59                 import sys
60                 message = "WARNING: the --city/--st options are deprecated and will eventually be removed\n"
61                 sys.stderr.write(message)
62     def get(self, option, argument=None):
63         """Retrieve data from the config or options."""
64         if argument:
65             if self.config.has_section(argument) and (
66                 self.config.has_option(argument, "city") \
67                     or self.config.has_option(argument, "id") \
68                     or self.config.has_option(argument, "st")
69             ):
70                 self.config.remove_section(argument)
71                 import sys
72                 message = "WARNING: the city/id/st options are now unsupported in aliases\n"
73                 sys.stderr.write(message)
74             if not self.config.has_section(argument):
75                 guessed = guess(
76                     argument,
77                     path=self.get("setpath"),
78                     info=self.get("info"),
79                     cache_search=(
80                         self.get("cache") and self.get("cache_search")
81                     ),
82                     cachedir=self.get("cachedir"),
83                     quiet=self.get_bool("quiet")
84                 )
85                 self.config.add_section(argument)
86                 for item in guessed.items():
87                     self.config.set(argument, *item)
88             if self.config.has_option(argument, option):
89                 return self.config.get(argument, option)
90         if option in self.options.__dict__:
91             return self.options.__dict__[option]
92         else:
93             import os, sys
94             message = "%s error: no URI defined for %s\n" % (
95                 os.path.basename( sys.argv[0] ),
96                 option
97             )
98             sys.stderr.write(message)
99             exit(1)
100     def get_bool(self, option, argument=None):
101         """Get data and coerce to a boolean if necessary."""
102         return bool(self.get(option, argument))
103     def getint(self, option, argument=None):
104         """Get data and coerce to an integer if necessary."""
105         value = self.get(option, argument)
106         if value: return int(value)
107         else: return 0
108
109 def average(coords):
110     """Average a list of coordinates."""
111     x = 0
112     y = 0
113     for coord in coords:
114         x += coord[0]
115         y += coord[1]
116     count = len(coords)
117     return (x/count, y/count)
118
119 def filter_units(line, units="imperial"):
120     """Filter or convert units in a line of text between US/UK and metric."""
121     import re
122     # filter lines with both pressures in the form of "X inches (Y hPa)" or
123     # "X in. Hg (Y hPa)"
124     dual_p = re.match(
125         "(.* )(\d*(\.\d+)? (inches|in\. Hg)) \((\d*(\.\d+)? hPa)\)(.*)",
126         line
127     )
128     if dual_p:
129         preamble, in_hg, i_fr, i_un, hpa, h_fr, trailer = dual_p.groups()
130         if units == "imperial": line = preamble + in_hg + trailer
131         elif units == "metric": line = preamble + hpa + trailer
132     # filter lines with both temperatures in the form of "X F (Y C)"
133     dual_t = re.match(
134         "(.* )(-?\d*(\.\d+)? F) \((-?\d*(\.\d+)? C)\)(.*)",
135         line
136     )
137     if dual_t:
138         preamble, fahrenheit, f_fr, celsius, c_fr, trailer = dual_t.groups()
139         if units == "imperial": line = preamble + fahrenheit + trailer
140         elif units == "metric": line = preamble + celsius + trailer
141     # if metric is desired, convert distances in the form of "X mile(s)" to
142     # "Y kilometer(s)"
143     if units == "metric":
144         imperial_d = re.match(
145             "(.* )(\d+)( mile\(s\))(.*)",
146             line
147         )
148         if imperial_d:
149             preamble, mi, m_u, trailer = imperial_d.groups()
150             line = preamble + str(int(round(int(mi)*1.609344))) \
151                 + " kilometer(s)" + trailer
152     # filter speeds in the form of "X MPH (Y KT)" to just "X MPH"; if metric is
153     # desired, convert to "Z KPH"
154     imperial_s = re.match(
155         "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
156         line
157     )
158     if imperial_s:
159         preamble, mph, m_u, kt, trailer = imperial_s.groups()
160         if units == "imperial": line = preamble + mph + m_u + trailer
161         elif units == "metric": 
162             line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
163                 trailer
164     imperial_s = re.match(
165         "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
166         line
167     )
168     if imperial_s:
169         preamble, mph, m_u, kt, trailer = imperial_s.groups()
170         if units == "imperial": line = preamble + mph + m_u + trailer
171         elif units == "metric": 
172             line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
173                 trailer
174     # if imperial is desired, qualify given forcast temperatures like "X F"; if
175     # metric is desired, convert to "Y C"
176     imperial_t = re.match(
177         "(.* )(High |high |Low |low )(\d+)(\.|,)(.*)",
178         line
179     )
180     if imperial_t:
181         preamble, parameter, fahrenheit, sep, trailer = imperial_t.groups()
182         if units == "imperial":
183             line = preamble + parameter + fahrenheit + " F" + sep + trailer
184         elif units == "metric":
185             line = preamble + parameter \
186                 + str(int(round((int(fahrenheit)-32)*5/9))) + " C" + sep \
187                 + trailer
188     # hand off the resulting line
189     return line
190
191 def get_uri(
192     uri,
193     ignore_fail=False,
194     cache_data=False,
195     cacheage=900,
196     cachedir="."
197 ):
198     """Return a string containing the results of a URI GET."""
199     if pyversion("3"):
200         import urllib, urllib.error, urllib.request
201         URLError = urllib.error.URLError
202         urlopen = urllib.request.urlopen
203     else:
204         import urllib2 as urllib
205         URLError = urllib.URLError
206         urlopen = urllib.urlopen
207     import os, time
208     if cache_data:
209         dcachedir = os.path.join( os.path.expanduser(cachedir), "datacache" )
210         if not os.path.exists(dcachedir):
211             try: os.makedirs(dcachedir)
212             except (IOError, OSError): pass
213         dcache_fn = os.path.join(
214             dcachedir,
215             uri.split(":",1)[1].replace("/","_")
216         )
217     now = time.time()
218     if cache_data and os.access(dcache_fn, os.R_OK) \
219         and now-cacheage < os.stat(dcache_fn).st_mtime <= now:
220         dcache_fd = open(dcache_fn)
221         data = dcache_fd.read()
222         dcache_fd.close()
223     else:
224         try:
225             data = urlopen(uri).read().decode("utf-8")
226         except URLError:
227             if ignore_fail: return ""
228             else:
229                 import os, sys, traceback
230                 message = "%s error: failed to retrieve\n   %s\n   %s" % (
231                         os.path.basename( sys.argv[0] ),
232                         uri,
233                         traceback.format_exception_only(
234                             sys.exc_type,
235                             sys.exc_value
236                         )[0]
237                     )
238                 sys.stderr.write(message)
239                 sys.exit(1)
240         # Some data sources are HTML with the plain text wrapped in pre tags
241         if "<pre>" in data:
242             data = data[data.find("<pre>")+5:data.find("</pre>")]
243         if cache_data:
244             try:
245                 import codecs
246                 dcache_fd = codecs.open(dcache_fn, "w", "utf-8")
247                 dcache_fd.write(data)
248                 dcache_fd.close()
249             except (IOError, OSError): pass
250     return data
251
252 def get_metar(
253     uri=None,
254     verbose=False,
255     quiet=False,
256     headers=None,
257     imperial=False,
258     metric=False,
259     cache_data=False,
260     cacheage=900,
261     cachedir="."
262 ):
263     """Return a summarized METAR for the specified station."""
264     if not uri:
265         import os, sys
266         message = "%s error: METAR URI required for conditions\n" % \
267             os.path.basename( sys.argv[0] )
268         sys.stderr.write(message)
269         sys.exit(1)
270     metar = get_uri(
271         uri,
272         cache_data=cache_data,
273         cacheage=cacheage,
274         cachedir=cachedir
275     )
276     if pyversion("3") and type(metar) is bytes: metar = metar.decode("utf-8")
277     if verbose: return metar
278     else:
279         import re
280         lines = metar.split("\n")
281         if not headers:
282             headers = \
283                 "relative_humidity," \
284                 + "precipitation_last_hour," \
285                 + "sky conditions," \
286                 + "temperature," \
287                 + "heat index," \
288                 + "windchill," \
289                 + "weather," \
290                 + "wind"
291         headerlist = headers.lower().replace("_"," ").split(",")
292         output = []
293         if not quiet:
294             title = "Current conditions at %s"
295             place = lines[0].split(", ")
296             if len(place) > 1:
297                 place = "%s, %s" % ( place[0].title(), place[1] )
298             else: place = "<UNKNOWN>"
299             output.append(title%place)
300             output.append("Last updated " + lines[1])
301         header_match = False
302         for header in headerlist:
303             for line in lines:
304                 if line.lower().startswith(header + ":"):
305                     if re.match(r".*:\d+$", line): line = line[:line.rfind(":")]
306                     if imperial: line = filter_units(line, units="imperial")
307                     elif metric: line = filter_units(line, units="metric")
308                     if quiet: output.append(line)
309                     else: output.append("   " + line)
310                     header_match = True
311         if not header_match:
312             output.append(
313                 "(no conditions matched your header list, try with --verbose)"
314             )
315         return "\n".join(output)
316
317 def get_alert(
318     uri=None,
319     verbose=False,
320     quiet=False,
321     cache_data=False,
322     cacheage=900,
323     cachedir="."
324 ):
325     """Return alert notice for the specified URI."""
326     if not uri:
327         import os, sys
328         message = "%s error: Alert URI required for alerts\n" % \
329             os.path.basename( sys.argv[0] )
330         sys.stderr.write(message)
331         sys.exit(1)
332     alert = get_uri(
333         uri,
334         ignore_fail=True,
335         cache_data=cache_data,
336         cacheage=cacheage,
337         cachedir=cachedir
338     ).strip()
339     if pyversion("3") and type(alert) is bytes: alert = alert.decode("utf-8")
340     if alert:
341         if verbose: return alert
342         else:
343             if alert.find("\nNATIONAL WEATHER SERVICE") == -1:
344                 muted = False
345             else:
346                 muted = True
347             lines = alert.split("\n")
348             import time
349             valid_time = time.strftime("%Y%m%d%H%M")
350             output = []
351             for line in lines:
352                 if line.startswith("Expires:") \
353                     and "Expires:" + valid_time > line:
354                     return ""
355                 if muted and line.startswith("NATIONAL WEATHER SERVICE"):
356                     muted = False
357                     line = ""
358                 elif line == "&&":
359                     line = ""
360                 elif line == "$$":
361                     muted = True
362                 if line and not muted:
363                     if quiet: output.append(line)
364                     else: output.append("   " + line)
365             return "\n".join(output)
366
367 def get_options(config):
368     """Parse the options passed on the command line."""
369
370     # for optparse's builtin -h/--help option
371     usage = \
372         "usage: %prog [options] [alias1|search1 [alias2|search2 [...]]]"
373
374     # for optparse's builtin --version option
375     verstring = "%prog " + weather_version
376
377     # create the parser
378     import optparse
379     option_parser = optparse.OptionParser(usage=usage, version=verstring)
380     # separate options object from list of arguments and return both
381
382     # the -a/--alert option
383     if config.has_option("default", "alert"):
384         default_alert = bool(config.get("default", "alert"))
385     else: default_alert = False
386     option_parser.add_option("-a", "--alert",
387         dest="alert",
388         action="store_true",
389         default=default_alert,
390         help="include local alert notices")
391
392     # the --atypes option
393     if config.has_option("default", "atypes"):
394         default_atypes = config.get("default", "atypes")
395     else:
396         default_atypes = \
397             "coastal_flood_statement," \
398             + "flash_flood_statement," \
399             + "flash_flood_warning," \
400             + "flash_flood_watch," \
401             + "flood_statement," \
402             + "flood_warning," \
403             + "severe_thunderstorm_warning," \
404             + "severe_weather_statement," \
405             + "special_weather_statement," \
406             + "urgent_weather_message"
407     option_parser.add_option("--atypes",
408         dest="atypes",
409         default=default_atypes,
410         help="list of alert notification types to display")
411
412     # the --build-sets option
413     option_parser.add_option("--build-sets",
414         dest="build_sets",
415         action="store_true",
416         default=False,
417         help="(re)build location correlation sets")
418
419     # the --cacheage option
420     if config.has_option("default", "cacheage"):
421         default_cacheage = config.getint("default", "cacheage")
422     else: default_cacheage = 900
423     option_parser.add_option("--cacheage",
424         dest="cacheage",
425         default=default_cacheage,
426         help="duration in seconds to refresh cached data")
427
428     # the --cachedir option
429     if config.has_option("default", "cachedir"):
430         default_cachedir = config.get("default", "cachedir")
431     else: default_cachedir = "~/.weather"
432     option_parser.add_option("--cachedir",
433         dest="cachedir",
434         default=default_cachedir,
435         help="directory for storing cached searches and data")
436
437     # the -f/--forecast option
438     if config.has_option("default", "forecast"):
439         default_forecast = bool(config.get("default", "forecast"))
440     else: default_forecast = False
441     option_parser.add_option("-f", "--forecast",
442         dest="forecast",
443         action="store_true",
444         default=default_forecast,
445         help="include a local forecast")
446
447     # the --headers option
448     if config.has_option("default", "headers"):
449         default_headers = config.get("default", "headers")
450     else:
451         default_headers = \
452             "temperature," \
453             + "relative_humidity," \
454             + "wind," \
455             + "heat_index," \
456             + "windchill," \
457             + "weather," \
458             + "sky_conditions," \
459             + "precipitation_last_hour"
460     option_parser.add_option("--headers",
461         dest="headers",
462         default=default_headers,
463         help="list of conditions headers to display")
464
465     # the --imperial option
466     if config.has_option("default", "imperial"):
467         default_imperial = bool(config.get("default", "imperial"))
468     else: default_imperial = False
469     option_parser.add_option("--imperial",
470         dest="imperial",
471         action="store_true",
472         default=default_imperial,
473         help="filter/convert conditions for US/UK units")
474
475     # the --info option
476     option_parser.add_option("--info",
477         dest="info",
478         action="store_true",
479         default=False,
480         help="output detailed information for your search")
481
482     # the -l/--list option
483     option_parser.add_option("-l", "--list",
484         dest="list",
485         action="store_true",
486         default=False,
487         help="list all configured aliases and cached searches")
488
489     # the --longlist option
490     option_parser.add_option("--longlist",
491         dest="longlist",
492         action="store_true",
493         default=False,
494         help="display details of all configured aliases")
495
496     # the -m/--metric option
497     if config.has_option("default", "metric"):
498         default_metric = bool(config.get("default", "metric"))
499     else: default_metric = False
500     option_parser.add_option("-m", "--metric",
501         dest="metric",
502         action="store_true",
503         default=default_metric,
504         help="filter/convert conditions for metric units")
505
506     # the -n/--no-conditions option
507     if config.has_option("default", "conditions"):
508         default_conditions = bool(config.get("default", "conditions"))
509     else: default_conditions = True
510     option_parser.add_option("-n", "--no-conditions",
511         dest="conditions",
512         action="store_false",
513         default=default_conditions,
514         help="disable output of current conditions")
515
516     # the --no-cache option
517     if config.has_option("default", "cache"):
518         default_cache = bool(config.get("default", "cache"))
519     else: default_cache = True
520     option_parser.add_option("--no-cache",
521         dest="cache",
522         action="store_false",
523         default=True,
524         help="disable all caching (searches and data)")
525
526     # the --no-cache-data option
527     if config.has_option("default", "cache_data"):
528         default_cache_data = bool(config.get("default", "cache_data"))
529     else: default_cache_data = True
530     option_parser.add_option("--no-cache-data",
531         dest="cache_data",
532         action="store_false",
533         default=True,
534         help="disable retrieved data caching")
535
536     # the --no-cache-search option
537     if config.has_option("default", "cache_search"):
538         default_cache_search = bool(config.get("default", "cache_search"))
539     else: default_cache_search = True
540     option_parser.add_option("--no-cache-search",
541         dest="cache_search",
542         action="store_false",
543         default=True,
544         help="disable search result caching")
545
546     # the -q/--quiet option
547     if config.has_option("default", "quiet"):
548         default_quiet = bool(config.get("default", "quiet"))
549     else: default_quiet = False
550     option_parser.add_option("-q", "--quiet",
551         dest="quiet",
552         action="store_true",
553         default=default_quiet,
554         help="skip preambles and don't indent")
555
556     # the --setpath option
557     if config.has_option("default", "setpath"):
558         default_setpath = config.get("default", "setpath")
559     else: default_setpath = ".:~/.weather"
560     option_parser.add_option("--setpath",
561         dest="setpath",
562         default=default_setpath,
563         help="directory search path for correlation sets")
564
565     # the -v/--verbose option
566     if config.has_option("default", "verbose"):
567         default_verbose = bool(config.get("default", "verbose"))
568     else: default_verbose = False
569     option_parser.add_option("-v", "--verbose",
570         dest="verbose",
571         action="store_true",
572         default=default_verbose,
573         help="show full decoded feeds")
574
575     # deprecated options
576     if config.has_option("default", "city"):
577         default_city = config.get("default", "city")
578     else: default_city = ""
579     option_parser.add_option("-c", "--city",
580         dest="city",
581         default=default_city,
582         help=optparse.SUPPRESS_HELP)
583     if config.has_option("default", "id"):
584         default_id = config.get("default", "id")
585     else: default_id = ""
586     option_parser.add_option("-i", "--id",
587         dest="id",
588         default=default_id,
589         help=optparse.SUPPRESS_HELP)
590     if config.has_option("default", "st"):
591         default_st = config.get("default", "st")
592     else: default_st = ""
593     option_parser.add_option("-s", "--st",
594         dest="st",
595         default=default_st,
596         help=optparse.SUPPRESS_HELP)
597
598     options, arguments = option_parser.parse_args()
599     return options, arguments
600
601 def get_config():
602     """Parse the aliases and configuration."""
603     if pyversion("3"): import configparser
604     else: import ConfigParser as configparser
605     config = configparser.ConfigParser()
606     import os
607     rcfiles = [
608         "/etc/weatherrc",
609         "/etc/weather/weatherrc",
610         os.path.expanduser("~/.weather/weatherrc"),
611         os.path.expanduser("~/.weatherrc"),
612         "weatherrc"
613         ]
614     for rcfile in rcfiles:
615         if os.access(rcfile, os.R_OK): config.read(rcfile)
616     for section in config.sections():
617         if section != section.lower():
618             if config.has_section(section.lower()):
619                 config.remove_section(section.lower())
620             config.add_section(section.lower())
621             for option,value in config.items(section):
622                 config.set(section.lower(), option, value)
623     return config
624
625 def integrate_search_cache(config, cachedir, setpath):
626     """Add cached search results into the configuration."""
627     if pyversion("3"): import configparser
628     else: import ConfigParser as configparser
629     import os, time
630     scache_fn = os.path.join( os.path.expanduser(cachedir), "searches" )
631     if not os.access(scache_fn, os.R_OK): return config
632     scache_fd = open(scache_fn)
633     created = float( scache_fd.readline().split(":")[1].strip().split()[0] )
634     scache_fd.close()
635     now = time.time()
636     datafiles = data_index(setpath)
637     if datafiles:
638         data_freshness = sorted(
639             [ x[1] for x in datafiles.values() ],
640             reverse=True
641         )[0]
642     else: data_freshness = now
643     if created < data_freshness <= now:
644         try:
645             os.remove(scache_fn)
646             print( "[clearing outdated %s]" % scache_fn )
647         except (IOError, OSError):
648             pass
649         return config
650     scache = configparser.ConfigParser()
651     scache.read(scache_fn)
652     for section in scache.sections():
653         if not config.has_section(section):
654             config.add_section(section)
655             for option,value in scache.items(section):
656                 config.set(section, option, value)
657     return config
658
659 def list_aliases(config, detail=False):
660     """Return a formatted list of aliases defined in the config."""
661     if detail:
662         output = "\n# configured alias details..."
663         for section in sorted(config.sections()):
664             output += "\n\n[%s]" % section
665             for item in sorted(config.items(section)):
666                 output += "\n%s = %s" % item
667         output += "\n"
668     else:
669         output = "configured aliases and cached searches..."
670         for section in sorted(config.sections()):
671             if config.has_option(section, "description"):
672                 description = config.get(section, "description")
673             else: description = "(no description provided)"
674             output += "\n   %s: %s" % (section, description)
675     return output
676
677 def data_index(path):
678     import os
679     datafiles = {}
680     for filename in ("airports", "places", "stations", "zctas", "zones"):
681         for dirname in path.split(":"):
682             for extension in ("", ".gz", ".txt"):
683                 candidate = os.path.expanduser(
684                     os.path.join( dirname, "".join( (filename, extension) ) )
685                 )
686                 if os.path.exists(candidate):
687                     datafiles[filename] = (
688                         candidate,
689                         os.stat(candidate).st_mtime
690                     )
691                     break
692             if filename in datafiles:
693                 break
694     return datafiles
695
696 def guess(
697     expression,
698     path=".",
699     max_results=20,
700     info=False,
701     cache_search=False,
702     cacheage=900,
703     cachedir=".",
704     quiet=False
705 ):
706     """Find URIs using airport, gecos, placename, station, ZCTA/ZIP, zone."""
707     import codecs, datetime, time, os, re, sys
708     if pyversion("3"): import configparser
709     else: import ConfigParser as configparser
710     datafiles = data_index(path)
711     if re.match("[A-Za-z]{3}$", expression): searchtype = "airport"
712     elif re.match("[A-Za-z0-9]{4}$", expression): searchtype = "station"
713     elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", expression): searchtype = "zone"
714     elif re.match("[0-9]{5}$", expression): searchtype = "ZCTA"
715     elif re.match(
716         r"[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?, *[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?$",
717         expression
718     ):
719         searchtype = "coordinates"
720     elif re.match(r"(FIPS|fips)\d+$", expression): searchtype = "FIPS"
721     else:
722         searchtype = "name"
723         cache_search = False
724     if cache_search: action = "caching"
725     else: action = "using"
726     if info:
727         scores = [
728             (0.005, "bad"),
729             (0.025, "poor"),
730             (0.160, "suspect"),
731             (0.500, "mediocre"),
732             (0.840, "good"),
733             (0.975, "great"),
734             (0.995, "excellent"),
735             (1.000, "ideal"),
736         ]
737     if not quiet: print("Searching via %s..."%searchtype)
738     stations = configparser.ConfigParser()
739     dataname = "stations"
740     if dataname in datafiles:
741         datafile = datafiles[dataname][0]
742         if datafile.endswith(".gz"):
743             import gzip
744             if pyversion("3"):
745                 stations.read_string(
746                     gzip.open(datafile).read().decode("utf-8") )
747             else: stations.readfp( gzip.open(datafile) )
748         else:
749             stations.read(datafile)
750     else:
751         message = "%s error: can't find \"%s\" data file\n" % (
752             os.path.basename( sys.argv[0] ),
753             dataname
754         )
755         sys.stderr.write(message)
756         exit(1)
757     zones = configparser.ConfigParser()
758     dataname = "zones"
759     if dataname in datafiles:
760         datafile = datafiles[dataname][0]
761         if datafile.endswith(".gz"):
762             import gzip
763             if pyversion("3"):
764                 zones.read_string( gzip.open(datafile).read().decode("utf-8") )
765             else: zones.readfp( gzip.open(datafile) )
766         else:
767             zones.read(datafile)
768     else:
769         message = "%s error: can't find \"%s\" data file\n" % (
770             os.path.basename( sys.argv[0] ),
771             dataname
772         )
773         sys.stderr.write(message)
774         exit(1)
775     search = None
776     station = ("", 0)
777     zone = ("", 0)
778     dataset = None
779     possibilities = []
780     uris = {}
781     if searchtype == "airport":
782         expression = expression.lower()
783         airports = configparser.ConfigParser()
784         dataname = "airports"
785         if dataname in datafiles:
786             datafile = datafiles[dataname][0]
787             if datafile.endswith(".gz"):
788                 import gzip
789                 if pyversion("3"):
790                     airports.read_string(
791                         gzip.open(datafile).read().decode("utf-8") )
792                 else: airports.readfp( gzip.open(datafile) )
793             else:
794                 airports.read(datafile)
795         else:
796             message = "%s error: can't find \"%s\" data file\n" % (
797                 os.path.basename( sys.argv[0] ),
798                 dataname
799             )
800             sys.stderr.write(message)
801             exit(1)
802         if airports.has_section(expression) \
803             and airports.has_option(expression, "station"):
804             search = (expression, "IATA/FAA airport code %s" % expression)
805             station = ( airports.get(expression, "station"), 0 )
806             if stations.has_option(station[0], "zone"):
807                 zone = eval( stations.get(station[0], "zone") )
808                 dataset = stations
809             if not ( info or quiet ) \
810                 and stations.has_option( station[0], "description" ):
811                 print(
812                     "[%s result %s]" % (
813                         action,
814                         stations.get(station[0], "description")
815                     )
816                 )
817         else:
818             message = "No IATA/FAA airport code \"%s\" in the %s file.\n" % (
819                 expression,
820                 datafiles["airports"][0]
821             )
822             sys.stderr.write(message)
823             exit(1)
824     elif searchtype == "station":
825         expression = expression.lower()
826         if stations.has_section(expression):
827             station = (expression, 0)
828             if not search:
829                 search = (expression, "ICAO station code %s" % expression)
830             if stations.has_option(expression, "zone"):
831                 zone = eval( stations.get(expression, "zone") )
832                 dataset = stations
833             if not ( info or quiet ) \
834                 and stations.has_option(expression, "description"):
835                 print(
836                     "[%s result %s]" % (
837                         action,
838                         stations.get(expression, "description")
839                     )
840                 )
841         else:
842             message = "No ICAO weather station \"%s\" in the %s file.\n" % (
843                 expression,
844                 datafiles["stations"][0]
845             )
846             sys.stderr.write(message)
847             exit(1)
848     elif searchtype == "zone":
849         expression = expression.lower()
850         if zones.has_section(expression) \
851             and zones.has_option(expression, "station"):
852             zone = (expression, 0)
853             station = eval( zones.get(expression, "station") )
854             dataset = zones
855             search = (expression, "NWS/NOAA weather zone %s" % expression)
856             if not ( info or quiet ) \
857                 and zones.has_option(expression, "description"):
858                 print(
859                     "[%s result %s]" % (
860                         action,
861                         zones.get(expression, "description")
862                     )
863                 )
864         else:
865             message = "No usable NWS weather zone \"%s\" in the %s file.\n" % (
866                 expression,
867                 datafiles["zones"][0]
868             )
869             sys.stderr.write(message)
870             exit(1)
871     elif searchtype == "ZCTA":
872         zctas = configparser.ConfigParser()
873         dataname = "zctas"
874         if dataname in datafiles:
875             datafile = datafiles[dataname][0]
876             if datafile.endswith(".gz"):
877                 import gzip
878                 if pyversion("3"):
879                     zctas.read_string(
880                         gzip.open(datafile).read().decode("utf-8") )
881                 else: zctas.readfp( gzip.open(datafile) )
882             else:
883                 zctas.read(datafile)
884         else:
885             message = "%s error: can't find \"%s\" data file\n" % (
886                 os.path.basename( sys.argv[0] ),
887                 dataname
888             )
889             sys.stderr.write(message)
890             exit(1)
891         dataset = zctas
892         if zctas.has_section(expression) \
893             and zctas.has_option(expression, "station"):
894             station = eval( zctas.get(expression, "station") )
895             search = (expression, "Census ZCTA (ZIP code) %s" % expression)
896             if zctas.has_option(expression, "zone"):
897                 zone = eval( zctas.get(expression, "zone") )
898         else:
899             message = "No census ZCTA (ZIP code) \"%s\" in the %s file.\n" % (
900                 expression,
901                 datafiles["zctas"][0]
902             )
903             sys.stderr.write(message)
904             exit(1)
905     elif searchtype == "coordinates":
906         search = (expression, "Geographic coordinates %s" % expression)
907         stationtable = {}
908         for station in stations.sections():
909             if stations.has_option(station, "location"):
910                 stationtable[station] = {
911                     "location": eval( stations.get(station, "location") )
912                 }
913         station = closest( gecos(expression), stationtable, "location", 0.1 )
914         if not station[0]:
915             message = "No ICAO weather station found near %s.\n" % expression
916             sys.stderr.write(message)
917             exit(1)
918         zonetable = {}
919         for zone in zones.sections():
920             if zones.has_option(zone, "centroid"):
921                 zonetable[zone] = {
922                     "centroid": eval( zones.get(zone, "centroid") )
923                 }
924         zone = closest( gecos(expression), zonetable, "centroid", 0.1 )
925         if not zone[0]:
926             message = "No NWS weather zone near %s; forecasts unavailable.\n" \
927                 % expression
928             sys.stderr.write(message)
929     elif searchtype in ("FIPS", "name"):
930         places = configparser.ConfigParser()
931         dataname = "places"
932         if dataname in datafiles:
933             datafile = datafiles[dataname][0]
934             if datafile.endswith(".gz"):
935                 import gzip
936                 if pyversion("3"):
937                     places.read_string(
938                         gzip.open(datafile).read().decode("utf-8") )
939                 else: places.readfp( gzip.open(datafile) )
940             else:
941                 places.read(datafile)
942         else:
943             message = "%s error: can't find \"%s\" data file\n" % (
944                 os.path.basename( sys.argv[0] ),
945                 dataname
946             )
947             sys.stderr.write(message)
948             exit(1)
949         dataset = places
950         place = expression.lower()
951         if places.has_section(place) and places.has_option(place, "station"):
952             station = eval( places.get(place, "station") )
953             search = (expression, "Census Place %s" % expression)
954             if places.has_option(place, "description"):
955                 search = (
956                     search[0],
957                     search[1] + ", %s" % places.get(place, "description")
958                 )
959             if places.has_option(place, "zone"):
960                 zone = eval( places.get(place, "zone") )
961             if not ( info or quiet ) \
962                 and places.has_option(place, "description"):
963                 print(
964                     "[%s result %s]" % (
965                         action,
966                         places.get(place, "description")
967                     )
968                 )
969         else:
970             for place in places.sections():
971                 if places.has_option(place, "description") \
972                     and places.has_option(place, "station") \
973                     and re.search(
974                         expression,
975                         places.get(place, "description"),
976                         re.I
977                     ):
978                         possibilities.append(place)
979             for place in stations.sections():
980                 if stations.has_option(place, "description") \
981                     and re.search(
982                         expression,
983                         stations.get(place, "description"),
984                         re.I
985                     ):
986                         possibilities.append(place)
987             for place in zones.sections():
988                 if zones.has_option(place, "description") \
989                     and zones.has_option(place, "station") \
990                     and re.search(
991                         expression,
992                         zones.get(place, "description"),
993                         re.I
994                     ):
995                         possibilities.append(place)
996             if len(possibilities) == 1:
997                 place = possibilities[0]
998                 if places.has_section(place):
999                     station = eval( places.get(place, "station") )
1000                     description = places.get(place, "description")
1001                     if places.has_option(place, "zone"):
1002                         zone = eval( places.get(place, "zone" ) )
1003                     search = ( expression, "%s: %s" % (place, description) )
1004                 elif stations.has_section(place):
1005                     station = (place, 0.0)
1006                     description = stations.get(place, "description")
1007                     if stations.has_option(place, "zone"):
1008                         zone = eval( stations.get(place, "zone" ) )
1009                     search = ( expression, "ICAO station code %s" % place )
1010                 elif zones.has_section(place):
1011                     station = eval( zones.get(place, "station") )
1012                     description = zones.get(place, "description")
1013                     zone = (place, 0.0)
1014                     search = ( expression, "NWS/NOAA weather zone %s" % place )
1015                 if not ( info or quiet ):
1016                     print( "[%s result %s]" % (action, description) )
1017             if not possibilities and not station[0]:
1018                 message = "No FIPS code/census area match in the %s file.\n" % (
1019                     datafiles["places"][0]
1020                 )
1021                 sys.stderr.write(message)
1022                 exit(1)
1023     if station[0]:
1024         uris["metar"] = stations.get( station[0], "metar" )
1025         if zone[0]:
1026             for key,value in zones.items( zone[0] ):
1027                 if key not in ("centroid", "description", "station"):
1028                     uris[key] = value
1029     elif possibilities:
1030         count = len(possibilities)
1031         if count <= max_results:
1032             print( "Your search is ambiguous, returning %s matches:" % count )
1033             for place in sorted(possibilities):
1034                 if places.has_section(place):
1035                     print(
1036                         "   [%s] %s" % (
1037                             place,
1038                             places.get(place, "description")
1039                         )
1040                     )
1041                 elif stations.has_section(place):
1042                     print(
1043                         "   [%s] %s" % (
1044                             place,
1045                             stations.get(place, "description")
1046                         )
1047                     )
1048                 elif zones.has_section(place):
1049                     print(
1050                         "   [%s] %s" % (
1051                             place,
1052                             zones.get(place, "description")
1053                         )
1054                     )
1055         else:
1056             print(
1057                 "Your search is too ambiguous, returning %s matches." % count
1058             )
1059         exit(0)
1060     if info:
1061         stationlist = []
1062         zonelist = []
1063         if dataset:
1064             for section in dataset.sections():
1065                 if dataset.has_option(section, "station"):
1066                     stationlist.append(
1067                         eval( dataset.get(section, "station") )[1]
1068                     )
1069                 if dataset.has_option(section, "zone"):
1070                     zonelist.append( eval( dataset.get(section, "zone") )[1] )
1071         stationlist.sort()
1072         zonelist.sort()
1073         scount = len(stationlist)
1074         zcount = len(zonelist)
1075         sranks = []
1076         zranks = []
1077         for score in scores:
1078             if stationlist:
1079                 sranks.append( stationlist[ int( (1-score[0]) * scount ) ] )
1080             if zonelist:
1081                 zranks.append( zonelist[ int( (1-score[0]) * zcount ) ] )
1082         description = search[1]
1083         uris["description"] = description
1084         print(
1085             "%s\n%s" % ( description, "-" * len(description) )
1086         )
1087         print(
1088             "%s: %s" % (
1089                 station[0],
1090                 stations.get( station[0], "description" )
1091             )
1092         )
1093         km = radian_to_km*station[1]
1094         mi = radian_to_mi*station[1]
1095         if sranks and not description.startswith("ICAO station code "):
1096             for index in range(0, len(scores)):
1097                 if station[1] >= sranks[index]:
1098                     score = scores[index][1]
1099                     break
1100             print(
1101                 "   (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1102             )
1103         elif searchtype == "coordinates":
1104             print( "   (%.3gkm, %.3gmi)" % (km, mi) )
1105         if zone[0]:
1106             print(
1107                 "%s: %s" % ( zone[0], zones.get( zone[0], "description" ) )
1108             )
1109         km = radian_to_km*zone[1]
1110         mi = radian_to_mi*zone[1]
1111         if zranks and not description.startswith("NWS/NOAA weather zone "):
1112             for index in range(0, len(scores)):
1113                 if zone[1] >= zranks[index]:
1114                     score = scores[index][1]
1115                     break
1116             print(
1117                 "   (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1118             )
1119         elif searchtype == "coordinates" and zone[0]:
1120             print( "   (%.3gkm, %.3gmi)" % (km, mi) )
1121     if cache_search:
1122         now = time.time()
1123         nowstamp = "%s (%s)" % (
1124             now,
1125             datetime.datetime.isoformat(
1126                 datetime.datetime.fromtimestamp(now),
1127                 " "
1128             )
1129         )
1130         search_cache = ["\n"]
1131         search_cache.append( "[%s]\n" % search[0] ) 
1132         search_cache.append( "cached = %s\n" % nowstamp )
1133         for uriname in sorted(uris.keys()):
1134             search_cache.append( "%s = %s\n" % ( uriname, uris[uriname] ) )
1135         real_cachedir = os.path.expanduser(cachedir)
1136         if not os.path.exists(real_cachedir):
1137             try: os.makedirs(real_cachedir)
1138             except (IOError, OSError): pass
1139         scache_fn = os.path.join(real_cachedir, "searches")
1140         if not os.path.exists(scache_fn):
1141             then = sorted(
1142                     [ x[1] for x in datafiles.values() ],
1143                     reverse=True
1144                 )[0]
1145             thenstamp = "%s (%s)" % (
1146                 then,
1147                 datetime.datetime.isoformat(
1148                     datetime.datetime.fromtimestamp(then),
1149                     " "
1150                 )
1151             )
1152             search_cache.insert(
1153                 0,
1154                 "# based on data files from: %s\n" % thenstamp
1155             )
1156         try:
1157             scache_existing = configparser.ConfigParser()
1158             scache_existing.read(scache_fn)
1159             if not scache_existing.has_section(search[0]):
1160                 scache_fd = codecs.open(scache_fn, "a", "utf-8")
1161                 scache_fd.writelines(search_cache)
1162                 scache_fd.close()
1163         except (IOError, OSError): pass
1164     if not info:
1165         return(uris)
1166
1167 def closest(position, nodes, fieldname, angle=None):
1168     import math
1169     if not angle: angle = 2*math.pi
1170     match = None
1171     for name in nodes:
1172         if fieldname in nodes[name]:
1173             node = nodes[name][fieldname]
1174             if node and abs( position[0]-node[0] ) < angle:
1175                 if abs( position[1]-node[1] ) < angle \
1176                     or abs( abs( position[1]-node[1] ) - 2*math.pi ) < angle:
1177                     if position == node:
1178                         angle = 0
1179                         match = name
1180                     else:
1181                         candidate = math.acos(
1182                             math.sin( position[0] ) * math.sin( node[0] ) \
1183                                 + math.cos( position[0] ) \
1184                                 * math.cos( node[0] ) \
1185                                 * math.cos( position[1] - node[1] )
1186                             )
1187                         if candidate < angle:
1188                             angle = candidate
1189                             match = name
1190     if match: match = str(match)
1191     return (match, angle)
1192
1193 def gecos(formatted):
1194     import math, re
1195     coordinates = formatted.split(",")
1196     for coordinate in range(0, 2):
1197         degrees, foo, minutes, bar, seconds, hemisphere = re.match(
1198             r"([\+-]?\d+\.?\d*)(-(\d+))?(-(\d+))?([ensw]?)$",
1199             coordinates[coordinate].strip().lower()
1200         ).groups()
1201         value = float(degrees)
1202         if minutes: value += float(minutes)/60
1203         if seconds: value += float(seconds)/3600
1204         if hemisphere and hemisphere in "sw": value *= -1
1205         coordinates[coordinate] = math.radians(value)
1206     return tuple(coordinates)
1207
1208 def correlate():
1209     import codecs, csv, datetime, hashlib, os, re, sys, tarfile, time, zipfile
1210     if pyversion("3"): import configparser
1211     else: import ConfigParser as configparser
1212     for filename in os.listdir("."):
1213         if re.match("[0-9]{4}_Gaz_counties_national.zip$", filename):
1214             gcounties_an = filename
1215             gcounties_fn = filename[:-4] + ".txt"
1216         elif re.match("[0-9]{4}_Gaz_cousubs_national.zip$", filename):
1217             gcousubs_an = filename
1218             gcousubs_fn = filename[:-4] + ".txt"
1219         elif re.match("[0-9]{4}_Gaz_place_national.zip$", filename):
1220             gplace_an = filename
1221             gplace_fn = filename[:-4] + ".txt"
1222         elif re.match("[0-9]{4}_Gaz_zcta_national.zip$", filename):
1223             gzcta_an = filename
1224             gzcta_fn = filename[:-4] + ".txt"
1225         elif re.match("bp[0-9]{2}[a-z]{2}[0-9]{2}.dbx$", filename):
1226             cpfzcf_fn = filename
1227     nsdcccc_fn = "nsd_cccc.txt"
1228     ourairports_fn = "airports.csv"
1229     overrides_fn = "overrides.conf"
1230     overrideslog_fn = "overrides.log"
1231     slist_fn = "slist"
1232     zlist_fn = "zlist"
1233     qalog_fn = "qa.log"
1234     airports_fn = "airports"
1235     places_fn = "places"
1236     stations_fn = "stations"
1237     zctas_fn = "zctas"
1238     zones_fn = "zones"
1239     header = """\
1240 %s
1241 # generated by %s on %s from these public domain sources:
1242 #
1243 # https://www.census.gov/geographies/reference-files/time-series/geo/gazetteer-files.html
1244 # %s %s %s
1245 # %s %s %s
1246 # %s %s %s
1247 # %s %s %s
1248 #
1249 # https://www.weather.gov/gis/ZoneCounty/
1250 # %s %s %s
1251 #
1252 # https://tgftp.nws.noaa.gov/data/
1253 # %s %s %s
1254 #
1255 # https://ourairports.com/data/
1256 # %s %s %s
1257 #
1258 # ...and these manually-generated or hand-compiled adjustments:
1259 # %s %s %s
1260 # %s %s %s
1261 # %s %s %s\
1262 """ % (
1263         weather_copyright,
1264         os.path.basename( sys.argv[0] ),
1265         datetime.date.isoformat(
1266             datetime.datetime.utcfromtimestamp( int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) )
1267         ),
1268         hashlib.md5( open(gcounties_an, "rb").read() ).hexdigest(),
1269         datetime.date.isoformat(
1270             datetime.datetime.utcfromtimestamp( os.path.getmtime(gcounties_an) )
1271         ),
1272         gcounties_an,
1273         hashlib.md5( open(gcousubs_an, "rb").read() ).hexdigest(),
1274         datetime.date.isoformat(
1275             datetime.datetime.utcfromtimestamp( os.path.getmtime(gcousubs_an) )
1276         ),
1277         gcousubs_an,
1278         hashlib.md5( open(gplace_an, "rb").read() ).hexdigest(),
1279         datetime.date.isoformat(
1280             datetime.datetime.utcfromtimestamp( os.path.getmtime(gplace_an) )
1281         ),
1282         gplace_an,
1283         hashlib.md5( open(gzcta_an, "rb").read() ).hexdigest(),
1284         datetime.date.isoformat(
1285             datetime.datetime.utcfromtimestamp( os.path.getmtime(gzcta_an) )
1286         ),
1287         gzcta_an,
1288         hashlib.md5( open(cpfzcf_fn, "rb").read() ).hexdigest(),
1289         datetime.date.isoformat(
1290             datetime.datetime.utcfromtimestamp( os.path.getmtime(cpfzcf_fn) )
1291         ),
1292         cpfzcf_fn,
1293         hashlib.md5( open(nsdcccc_fn, "rb").read() ).hexdigest(),
1294         datetime.date.isoformat(
1295             datetime.datetime.utcfromtimestamp( os.path.getmtime(nsdcccc_fn) )
1296         ),
1297         nsdcccc_fn,
1298         hashlib.md5( open(ourairports_fn, "rb").read() ).hexdigest(),
1299         datetime.date.isoformat(
1300             datetime.datetime.utcfromtimestamp( os.path.getmtime(ourairports_fn) )
1301         ),
1302         ourairports_fn,
1303         hashlib.md5( open(overrides_fn, "rb").read() ).hexdigest(),
1304         datetime.date.isoformat(
1305             datetime.datetime.utcfromtimestamp( os.path.getmtime(overrides_fn) )
1306         ),
1307         overrides_fn,
1308         hashlib.md5( open(slist_fn, "rb").read() ).hexdigest(),
1309         datetime.date.isoformat(
1310             datetime.datetime.utcfromtimestamp( os.path.getmtime(slist_fn) )
1311         ),
1312         slist_fn,
1313         hashlib.md5( open(zlist_fn, "rb").read() ).hexdigest(),
1314         datetime.date.isoformat(
1315             datetime.datetime.utcfromtimestamp( os.path.getmtime(zlist_fn) )
1316         ),
1317         zlist_fn
1318     )
1319     airports = {}
1320     places = {}
1321     stations = {}
1322     zctas = {}
1323     zones = {}
1324     message = "Reading %s:%s..." % (gcounties_an, gcounties_fn)
1325     sys.stdout.write(message)
1326     sys.stdout.flush()
1327     count = 0
1328     gcounties = zipfile.ZipFile(gcounties_an).open(gcounties_fn, "r")
1329     columns = gcounties.readline().decode("utf-8").strip().split("\t")
1330     for line in gcounties:
1331         fields = line.decode("utf-8").strip().split("\t")
1332         f_geoid = fields[ columns.index("GEOID") ].strip()
1333         f_name = fields[ columns.index("NAME") ].strip()
1334         f_usps = fields[ columns.index("USPS") ].strip()
1335         f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1336         f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1337         if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1338             fips = "fips%s" % f_geoid
1339             if fips not in places: places[fips] = {}
1340             places[fips]["centroid"] = gecos(
1341                 "%s,%s" % (f_intptlat, f_intptlong)
1342             )
1343             places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1344             count += 1
1345     gcounties.close()
1346     print("done (%s lines)." % count)
1347     message = "Reading %s:%s..." % (gcousubs_an, gcousubs_fn)
1348     sys.stdout.write(message)
1349     sys.stdout.flush()
1350     count = 0
1351     gcousubs = zipfile.ZipFile(gcousubs_an).open(gcousubs_fn, "r")
1352     columns = gcousubs.readline().decode("utf-8").strip().split("\t")
1353     for line in gcousubs:
1354         fields = line.decode("utf-8").strip().split("\t")
1355         f_geoid = fields[ columns.index("GEOID") ].strip()
1356         f_name = fields[ columns.index("NAME") ].strip()
1357         f_usps = fields[ columns.index("USPS") ].strip()
1358         f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1359         f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1360         if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1361             fips = "fips%s" % f_geoid
1362             if fips not in places: places[fips] = {}
1363             places[fips]["centroid"] = gecos(
1364                 "%s,%s" % (f_intptlat, f_intptlong)
1365             )
1366             places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1367             count += 1
1368     gcousubs.close()
1369     print("done (%s lines)." % count)
1370     message = "Reading %s:%s..." % (gplace_an, gplace_fn)
1371     sys.stdout.write(message)
1372     sys.stdout.flush()
1373     count = 0
1374     gplace = zipfile.ZipFile(gplace_an).open(gplace_fn, "r")
1375     columns = gplace.readline().decode("utf-8").strip().split("\t")
1376     for line in gplace:
1377         fields = line.decode("utf-8").strip().split("\t")
1378         f_geoid = fields[ columns.index("GEOID") ].strip()
1379         f_name = fields[ columns.index("NAME") ].strip()
1380         f_usps = fields[ columns.index("USPS") ].strip()
1381         f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1382         f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1383         if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1384             fips = "fips%s" % f_geoid
1385             if fips not in places: places[fips] = {}
1386             places[fips]["centroid"] = gecos(
1387                 "%s,%s" % (f_intptlat, f_intptlong)
1388             )
1389             places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1390             count += 1
1391     gplace.close()
1392     print("done (%s lines)." % count)
1393     message = "Reading %s..." % slist_fn
1394     sys.stdout.write(message)
1395     sys.stdout.flush()
1396     count = 0
1397     slist = codecs.open(slist_fn, "rU", "utf-8")
1398     for line in slist:
1399         icao = line.split("#")[0].strip()
1400         if icao:
1401             stations[icao] = {
1402                 "metar": "https://tgftp.nws.noaa.gov/data/observations/"\
1403                     + "metar/decoded/%s.TXT" % icao.upper()
1404             }
1405             count += 1
1406     slist.close()
1407     print("done (%s lines)." % count)
1408     message = "Reading %s..." % nsdcccc_fn
1409     sys.stdout.write(message)
1410     sys.stdout.flush()
1411     count = 0
1412     nsdcccc = codecs.open(nsdcccc_fn, "rU", "utf-8")
1413     for line in nsdcccc:
1414         line = str(line)
1415         fields = line.split(";")
1416         icao = fields[0].strip().lower()
1417         if icao in stations:
1418             description = []
1419             name = " ".join( fields[3].strip().title().split() )
1420             if name: description.append(name)
1421             st = fields[4].strip()
1422             if st: description.append(st)
1423             country = " ".join( fields[5].strip().title().split() )
1424             if country: description.append(country)
1425             if description:
1426                 stations[icao]["description"] = ", ".join(description)
1427             lat, lon = fields[7:9]
1428             if lat and lon:
1429                 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1430             elif "location" not in stations[icao]:
1431                 lat, lon = fields[5:7]
1432                 if lat and lon:
1433                     stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1434         count += 1
1435     nsdcccc.close()
1436     print("done (%s lines)." % count)
1437     message = "Reading %s..." % ourairports_fn
1438     sys.stdout.write(message)
1439     sys.stdout.flush()
1440     count = 0
1441     ourairports = open(ourairports_fn, "rU")
1442     for row in csv.reader(ourairports):
1443         icao = row[12].lower()
1444         if icao in stations:
1445             iata = row[13].lower()
1446             if len(iata) == 3: airports[iata] = { "station": icao }
1447             if "description" not in stations[icao]:
1448                 description = []
1449                 name = row[3]
1450                 if name: description.append(name)
1451                 municipality = row[10]
1452                 if municipality: description.append(municipality)
1453                 region = row[9]
1454                 country = row[8]
1455                 if region:
1456                     if "-" in region:
1457                         c,r = region.split("-", 1)
1458                         if c == country: region = r
1459                     description.append(region)
1460                 if country:
1461                     description.append(country)
1462                 if description:
1463                     stations[icao]["description"] = ", ".join(description)
1464             if "location" not in stations[icao]:
1465                 lat = row[4]
1466                 if lat:
1467                     lon = row[5]
1468                     if lon:
1469                         stations[icao]["location"] = gecos(
1470                             "%s,%s" % (lat, lon)
1471                         )
1472         count += 1
1473     ourairports.close()
1474     print("done (%s lines)." % count)
1475     message = "Reading %s..." % zlist_fn
1476     sys.stdout.write(message)
1477     sys.stdout.flush()
1478     count = 0
1479     zlist = codecs.open(zlist_fn, "rU", "utf-8")
1480     for line in zlist:
1481         line = line.split("#")[0].strip()
1482         if line:
1483             zones[line] = {}
1484             count += 1
1485     zlist.close()
1486     print("done (%s lines)." % count)
1487     message = "Reading %s..." % cpfzcf_fn
1488     sys.stdout.write(message)
1489     sys.stdout.flush()
1490     count = 0
1491     cpfz = {}
1492     cpfzcf = codecs.open(cpfzcf_fn, "rU", "utf-8")
1493     for line in cpfzcf:
1494         fields = line.strip().split("|")
1495         if len(fields) == 11 \
1496             and fields[0] and fields[1] and fields[9] and fields[10]:
1497             zone = "z".join( fields[:2] ).lower()
1498             if zone in zones:
1499                 state = fields[0]
1500                 if state:
1501                     zones[zone]["coastal_flood_statement"] = (
1502                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1503                         "flood/coastal/%s/%s.txt" % (state.lower(), zone))
1504                     zones[zone]["flash_flood_statement"] = (
1505                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1506                         "flash_flood/statement/%s/%s.txt"
1507                         % (state.lower(), zone))
1508                     zones[zone]["flash_flood_warning"] = (
1509                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1510                         "flash_flood/warning/%s/%s.txt"
1511                         % (state.lower(), zone))
1512                     zones[zone]["flash_flood_watch"] = (
1513                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1514                         "flash_flood/watch/%s/%s.txt" % (state.lower(), zone))
1515                     zones[zone]["flood_statement"] = (
1516                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1517                         "flood/statement/%s/%s.txt" % (state.lower(), zone))
1518                     zones[zone]["flood_warning"] = (
1519                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1520                         "flood/warning/%s/%s.txt" % (state.lower(), zone))
1521                     zones[zone]["severe_thunderstorm_warning"] = (
1522                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1523                         "thunderstorm/%s/%s.txt" % (state.lower(), zone))
1524                     zones[zone]["severe_weather_statement"] = (
1525                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1526                         "severe_weather_stmt/%s/%s.txt"
1527                         % (state.lower(), zone))
1528                     zones[zone]["short_term_forecast"] = (
1529                         "https://tgftp.nws.noaa.gov/data/forecasts/nowcast/"
1530                         "%s/%s.txt" % (state.lower(), zone))
1531                     zones[zone]["special_weather_statement"] = (
1532                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1533                         "special_weather_stmt/%s/%s.txt"
1534                         % (state.lower(), zone))
1535                     zones[zone]["state_forecast"] = (
1536                         "https://tgftp.nws.noaa.gov/data/forecasts/state/"
1537                         "%s/%s.txt" % (state.lower(), zone))
1538                     zones[zone]["urgent_weather_message"] = (
1539                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1540                         "non_precip/%s/%s.txt" % (state.lower(), zone))
1541                     zones[zone]["zone_forecast"] = (
1542                         "https://tgftp.nws.noaa.gov/data/forecasts/zone/"
1543                         "%s/%s.txt" % (state.lower(), zone))
1544                 description = fields[3].strip()
1545                 fips = "fips%s"%fields[6]
1546                 county = fields[5]
1547                 if county:
1548                     if description.endswith(county):
1549                         description += " County"
1550                     else:
1551                         description += ", %s County" % county
1552                 description += ", %s, US" % state
1553                 zones[zone]["description"] = description
1554                 zones[zone]["centroid"] = gecos( ",".join( fields[9:11] ) )
1555                 if fips in places and not zones[zone]["centroid"]:
1556                     zones[zone]["centroid"] = places[fips]["centroid"]
1557         count += 1
1558     cpfzcf.close()
1559     print("done (%s lines)." % count)
1560     message = "Reading %s:%s..." % (gzcta_an, gzcta_fn)
1561     sys.stdout.write(message)
1562     sys.stdout.flush()
1563     count = 0
1564     gzcta = zipfile.ZipFile(gzcta_an).open(gzcta_fn, "r")
1565     columns = gzcta.readline().decode("utf-8").strip().split("\t")
1566     for line in gzcta:
1567         fields = line.decode("utf-8").strip().split("\t")
1568         f_geoid = fields[ columns.index("GEOID") ].strip()
1569         f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1570         f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1571         if f_geoid and f_intptlat and f_intptlong:
1572             if f_geoid not in zctas: zctas[f_geoid] = {}
1573             zctas[f_geoid]["centroid"] = gecos(
1574                 "%s,%s" % (f_intptlat, f_intptlong)
1575             )
1576             count += 1
1577     gzcta.close()
1578     print("done (%s lines)." % count)
1579     message = "Reading %s..." % overrides_fn
1580     sys.stdout.write(message)
1581     sys.stdout.flush()
1582     count = 0
1583     added = 0
1584     removed = 0
1585     changed = 0
1586     overrides = configparser.ConfigParser()
1587     overrides.readfp( codecs.open(overrides_fn, "r", "utf8") )
1588     overrideslog = []
1589     for section in overrides.sections():
1590         addopt = 0
1591         chgopt = 0
1592         if section.startswith("-"):
1593             section = section[1:]
1594             delete = True
1595         else: delete = False
1596         if re.match("[A-Za-z]{3}$", section):
1597             if delete:
1598                 if section in airports:
1599                     del( airports[section] )
1600                     logact = "removed airport %s" % section
1601                     removed += 1
1602                 else:
1603                     logact = "tried to remove nonexistent airport %s" % section
1604             else:
1605                 if section in airports:
1606                     logact = "changed airport %s" % section
1607                     changed += 1
1608                 else:
1609                     airports[section] = {}
1610                     logact = "added airport %s" % section
1611                     added += 1
1612                 for key,value in overrides.items(section):
1613                     if key in airports[section]: chgopt += 1
1614                     else: addopt += 1
1615                     if key in ("centroid", "location"):
1616                         airports[section][key] = eval(value)
1617                     else:
1618                         airports[section][key] = value
1619                 if addopt and chgopt:
1620                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1621                 elif addopt: logact += " (+%s options)" % addopt
1622                 elif chgopt: logact += " (!%s options)" % chgopt
1623         elif re.match("[A-Za-z0-9]{4}$", section):
1624             if delete:
1625                 if section in stations:
1626                     del( stations[section] )
1627                     logact = "removed station %s" % section
1628                     removed += 1
1629                 else:
1630                     logact = "tried to remove nonexistent station %s" % section
1631             else:
1632                 if section in stations:
1633                     logact = "changed station %s" % section
1634                     changed += 1
1635                 else:
1636                     stations[section] = {}
1637                     logact = "added station %s" % section
1638                     added += 1
1639                 for key,value in overrides.items(section):
1640                     if key in stations[section]: chgopt += 1
1641                     else: addopt += 1
1642                     if key in ("centroid", "location"):
1643                         stations[section][key] = eval(value)
1644                     else:
1645                         stations[section][key] = value
1646                 if addopt and chgopt:
1647                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1648                 elif addopt: logact += " (+%s options)" % addopt
1649                 elif chgopt: logact += " (!%s options)" % chgopt
1650         elif re.match("[0-9]{5}$", section):
1651             if delete:
1652                 if section in zctas:
1653                     del( zctas[section] )
1654                     logact = "removed zcta %s" % section
1655                     removed += 1
1656                 else:
1657                     logact = "tried to remove nonexistent zcta %s" % section
1658             else:
1659                 if section in zctas:
1660                     logact = "changed zcta %s" % section
1661                     changed += 1
1662                 else:
1663                     zctas[section] = {}
1664                     logact = "added zcta %s" % section
1665                     added += 1
1666                 for key,value in overrides.items(section):
1667                     if key in zctas[section]: chgopt += 1
1668                     else: addopt += 1
1669                     if key in ("centroid", "location"):
1670                         zctas[section][key] = eval(value)
1671                     else:
1672                         zctas[section][key] = value
1673                 if addopt and chgopt:
1674                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1675                 elif addopt: logact += " (+%s options)" % addopt
1676                 elif chgopt: logact += " (!%s options)" % chgopt
1677         elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", section):
1678             if delete:
1679                 if section in zones:
1680                     del( zones[section] )
1681                     logact = "removed zone %s" % section
1682                     removed += 1
1683                 else:
1684                     logact = "tried to remove nonexistent zone %s" % section
1685             else:
1686                 if section in zones:
1687                     logact = "changed zone %s" % section
1688                     changed += 1
1689                 else:
1690                     zones[section] = {}
1691                     logact = "added zone %s" % section
1692                     added += 1
1693                 for key,value in overrides.items(section):
1694                     if key in zones[section]: chgopt += 1
1695                     else: addopt += 1
1696                     if key in ("centroid", "location"):
1697                         zones[section][key] = eval(value)
1698                     else:
1699                         zones[section][key] = value
1700                 if addopt and chgopt:
1701                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1702                 elif addopt: logact += " (+%s options)" % addopt
1703                 elif chgopt: logact += " (!%s options)" % chgopt
1704         elif re.match("fips[0-9]+$", section):
1705             if delete:
1706                 if section in places:
1707                     del( places[section] )
1708                     logact = "removed place %s" % section
1709                     removed += 1
1710                 else:
1711                     logact = "tried to remove nonexistent place %s" % section
1712             else:
1713                 if section in places:
1714                     logact = "changed place %s" % section
1715                     changed += 1
1716                 else:
1717                     places[section] = {}
1718                     logact = "added place %s" % section
1719                     added += 1
1720                 for key,value in overrides.items(section):
1721                     if key in places[section]: chgopt += 1
1722                     else: addopt += 1
1723                     if key in ("centroid", "location"):
1724                         places[section][key] = eval(value)
1725                     else:
1726                         places[section][key] = value
1727                 if addopt and chgopt:
1728                     logact += " (+%s/!%s options)" % (addopt, chgopt)
1729                 elif addopt: logact += " (+%s options)" % addopt
1730                 elif chgopt: logact += " (!%s options)" % chgopt
1731         count += 1
1732         overrideslog.append("%s\n" % logact)
1733     overrideslog.sort()
1734     if os.path.exists(overrideslog_fn):
1735         os.rename(overrideslog_fn, "%s_old"%overrideslog_fn)
1736     overrideslog_fd = codecs.open(overrideslog_fn, "w", "utf8")
1737     import time
1738     overrideslog_fd.write(
1739         '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
1740         '# use, copy, modify, and distribute this software is granted under terms\n'
1741         '# provided in the LICENSE file distributed with this software.\n\n'
1742         % time.gmtime().tm_year)
1743     overrideslog_fd.writelines(overrideslog)
1744     overrideslog_fd.close()
1745     print("done (%s overridden sections: +%s/-%s/!%s)." % (
1746         count,
1747         added,
1748         removed,
1749         changed
1750     ) )
1751     estimate = 2*len(places) + len(stations) + 2*len(zctas) + len(zones)
1752     print(
1753         "Correlating places, stations, ZCTAs and zones (upper bound is %s):" % \
1754             estimate
1755     )
1756     count = 0
1757     milestones = list( range(51) )
1758     message = "   "
1759     sys.stdout.write(message)
1760     sys.stdout.flush()
1761     for fips in places:
1762         centroid = places[fips]["centroid"]
1763         if centroid:
1764             station = closest(centroid, stations, "location", 0.1)
1765         if station[0]:
1766             places[fips]["station"] = station
1767             count += 1
1768             if not count%100:
1769                 level = int(50*count/estimate)
1770                 if level in milestones:
1771                     for remaining in milestones[:milestones.index(level)+1]:
1772                         if remaining%5:
1773                             message = "."
1774                             sys.stdout.write(message)
1775                             sys.stdout.flush()
1776                         else:
1777                             message = "%s%%" % (remaining*2,)
1778                             sys.stdout.write(message)
1779                             sys.stdout.flush()
1780                         milestones.remove(remaining)
1781         if centroid:
1782             zone = closest(centroid, zones, "centroid", 0.1)
1783         if zone[0]:
1784             places[fips]["zone"] = zone
1785             count += 1
1786             if not count%100:
1787                 level = int(50*count/estimate)
1788                 if level in milestones:
1789                     for remaining in milestones[:milestones.index(level)+1]:
1790                         if remaining%5:
1791                             message = "."
1792                             sys.stdout.write(message)
1793                             sys.stdout.flush()
1794                         else:
1795                             message = "%s%%" % (remaining*2,)
1796                             sys.stdout.write(message)
1797                             sys.stdout.flush()
1798                         milestones.remove(remaining)
1799     for station in stations:
1800         if "location" in stations[station]:
1801             location = stations[station]["location"]
1802             if location:
1803                 zone = closest(location, zones, "centroid", 0.1)
1804             if zone[0]:
1805                 stations[station]["zone"] = zone
1806                 count += 1
1807                 if not count%100:
1808                     level = int(50*count/estimate)
1809                     if level in milestones:
1810                         for remaining in milestones[:milestones.index(level)+1]:
1811                             if remaining%5:
1812                                 message = "."
1813                                 sys.stdout.write(message)
1814                                 sys.stdout.flush()
1815                             else:
1816                                 message = "%s%%" % (remaining*2,)
1817                                 sys.stdout.write(message)
1818                                 sys.stdout.flush()
1819                             milestones.remove(remaining)
1820     for zcta in zctas.keys():
1821         centroid = zctas[zcta]["centroid"]
1822         if centroid:
1823             station = closest(centroid, stations, "location", 0.1)
1824         if station[0]:
1825             zctas[zcta]["station"] = station
1826             count += 1
1827             if not count%100:
1828                 level = int(50*count/estimate)
1829                 if level in milestones:
1830                     for remaining in milestones[ : milestones.index(level)+1 ]:
1831                         if remaining%5:
1832                             message = "."
1833                             sys.stdout.write(message)
1834                             sys.stdout.flush()
1835                         else:
1836                             message = "%s%%" % (remaining*2,)
1837                             sys.stdout.write(message)
1838                             sys.stdout.flush()
1839                         milestones.remove(remaining)
1840         if centroid:
1841             zone = closest(centroid, zones, "centroid", 0.1)
1842         if zone[0]:
1843             zctas[zcta]["zone"] = zone
1844             count += 1
1845             if not count%100:
1846                 level = int(50*count/estimate)
1847                 if level in milestones:
1848                     for remaining in milestones[:milestones.index(level)+1]:
1849                         if remaining%5:
1850                             message = "."
1851                             sys.stdout.write(message)
1852                             sys.stdout.flush()
1853                         else:
1854                             message = "%s%%" % (remaining*2,)
1855                             sys.stdout.write(message)
1856                             sys.stdout.flush()
1857                         milestones.remove(remaining)
1858     for zone in zones.keys():
1859         if "centroid" in zones[zone]:
1860             centroid = zones[zone]["centroid"]
1861             if centroid:
1862                 station = closest(centroid, stations, "location", 0.1)
1863             if station[0]:
1864                 zones[zone]["station"] = station
1865                 count += 1
1866                 if not count%100:
1867                     level = int(50*count/estimate)
1868                     if level in milestones:
1869                         for remaining in milestones[:milestones.index(level)+1]:
1870                             if remaining%5:
1871                                 message = "."
1872                                 sys.stdout.write(message)
1873                                 sys.stdout.flush()
1874                             else:
1875                                 message = "%s%%" % (remaining*2,)
1876                                 sys.stdout.write(message)
1877                                 sys.stdout.flush()
1878                             milestones.remove(remaining)
1879     for remaining in milestones:
1880         if remaining%5:
1881             message = "."
1882             sys.stdout.write(message)
1883             sys.stdout.flush()
1884         else:
1885             message = "%s%%" % (remaining*2,)
1886             sys.stdout.write(message)
1887             sys.stdout.flush()
1888     print("\n   done (%s correlations)." % count)
1889     message = "Writing %s..." % airports_fn
1890     sys.stdout.write(message)
1891     sys.stdout.flush()
1892     count = 0
1893     if os.path.exists(airports_fn):
1894         os.rename(airports_fn, "%s_old"%airports_fn)
1895     airports_fd = codecs.open(airports_fn, "w", "utf8")
1896     airports_fd.write(header)
1897     for airport in sorted( airports.keys() ):
1898         airports_fd.write("\n\n[%s]" % airport)
1899         for key, value in sorted( airports[airport].items() ):
1900             if type(value) is float: value = "%.7f"%value
1901             elif type(value) is tuple:
1902                 elements = []
1903                 for element in value:
1904                     if type(element) is float: elements.append("%.7f"%element)
1905                     else: elements.append( repr(element) )
1906                 value = "(%s)"%", ".join(elements)
1907             airports_fd.write( "\n%s = %s" % (key, value) )
1908         count += 1
1909     airports_fd.write("\n")
1910     airports_fd.close()
1911     print("done (%s sections)." % count)
1912     message = "Writing %s..." % places_fn
1913     sys.stdout.write(message)
1914     sys.stdout.flush()
1915     count = 0
1916     if os.path.exists(places_fn):
1917         os.rename(places_fn, "%s_old"%places_fn)
1918     places_fd = codecs.open(places_fn, "w", "utf8")
1919     places_fd.write(header)
1920     for fips in sorted( places.keys() ):
1921         places_fd.write("\n\n[%s]" % fips)
1922         for key, value in sorted( places[fips].items() ):
1923             if type(value) is float: value = "%.7f"%value
1924             elif type(value) is tuple:
1925                 elements = []
1926                 for element in value:
1927                     if type(element) is float: elements.append("%.7f"%element)
1928                     else: elements.append( repr(element) )
1929                 value = "(%s)"%", ".join(elements)
1930             places_fd.write( "\n%s = %s" % (key, value) )
1931         count += 1
1932     places_fd.write("\n")
1933     places_fd.close()
1934     print("done (%s sections)." % count)
1935     message = "Writing %s..." % stations_fn
1936     sys.stdout.write(message)
1937     sys.stdout.flush()
1938     count = 0
1939     if os.path.exists(stations_fn):
1940         os.rename(stations_fn, "%s_old"%stations_fn)
1941     stations_fd = codecs.open(stations_fn, "w", "utf-8")
1942     stations_fd.write(header)
1943     for station in sorted( stations.keys() ):
1944         stations_fd.write("\n\n[%s]" % station)
1945         for key, value in sorted( stations[station].items() ):
1946             if type(value) is float: value = "%.7f"%value
1947             elif type(value) is tuple:
1948                 elements = []
1949                 for element in value:
1950                     if type(element) is float: elements.append("%.7f"%element)
1951                     else: elements.append( repr(element) )
1952                 value = "(%s)"%", ".join(elements)
1953             if type(value) is bytes:
1954                 value = value.decode("utf-8")
1955             stations_fd.write( "\n%s = %s" % (key, value) )
1956         count += 1
1957     stations_fd.write("\n")
1958     stations_fd.close()
1959     print("done (%s sections)." % count)
1960     message = "Writing %s..." % zctas_fn
1961     sys.stdout.write(message)
1962     sys.stdout.flush()
1963     count = 0
1964     if os.path.exists(zctas_fn):
1965         os.rename(zctas_fn, "%s_old"%zctas_fn)
1966     zctas_fd = codecs.open(zctas_fn, "w", "utf8")
1967     zctas_fd.write(header)
1968     for zcta in sorted( zctas.keys() ):
1969         zctas_fd.write("\n\n[%s]" % zcta)
1970         for key, value in sorted( zctas[zcta].items() ):
1971             if type(value) is float: value = "%.7f"%value
1972             elif type(value) is tuple:
1973                 elements = []
1974                 for element in value:
1975                     if type(element) is float: elements.append("%.7f"%element)
1976                     else: elements.append( repr(element) )
1977                 value = "(%s)"%", ".join(elements)
1978             zctas_fd.write( "\n%s = %s" % (key, value) )
1979         count += 1
1980     zctas_fd.write("\n")
1981     zctas_fd.close()
1982     print("done (%s sections)." % count)
1983     message = "Writing %s..." % zones_fn
1984     sys.stdout.write(message)
1985     sys.stdout.flush()
1986     count = 0
1987     if os.path.exists(zones_fn):
1988         os.rename(zones_fn, "%s_old"%zones_fn)
1989     zones_fd = codecs.open(zones_fn, "w", "utf8")
1990     zones_fd.write(header)
1991     for zone in sorted( zones.keys() ):
1992         zones_fd.write("\n\n[%s]" % zone)
1993         for key, value in sorted( zones[zone].items() ):
1994             if type(value) is float: value = "%.7f"%value
1995             elif type(value) is tuple:
1996                 elements = []
1997                 for element in value:
1998                     if type(element) is float: elements.append("%.7f"%element)
1999                     else: elements.append( repr(element) )
2000                 value = "(%s)"%", ".join(elements)
2001             zones_fd.write( "\n%s = %s" % (key, value) )
2002         count += 1
2003     zones_fd.write("\n")
2004     zones_fd.close()
2005     print("done (%s sections)." % count)
2006     message = "Starting QA check..."
2007     sys.stdout.write(message)
2008     sys.stdout.flush()
2009     airports = configparser.ConfigParser()
2010     airports.read(airports_fn)
2011     places = configparser.ConfigParser()
2012     places.read(places_fn)
2013     stations = configparser.ConfigParser()
2014     stations.read(stations_fn)
2015     zctas = configparser.ConfigParser()
2016     zctas.read(zctas_fn)
2017     zones = configparser.ConfigParser()
2018     zones.read(zones_fn)
2019     qalog = []
2020     places_nocentroid = 0
2021     places_nodescription = 0
2022     for place in sorted( places.sections() ):
2023         if not places.has_option(place, "centroid"):
2024             qalog.append("%s: no centroid\n" % place)
2025             places_nocentroid += 1
2026         if not places.has_option(place, "description"):
2027             qalog.append("%s: no description\n" % place)
2028             places_nodescription += 1
2029     stations_nodescription = 0
2030     stations_nolocation = 0
2031     stations_nometar = 0
2032     for station in sorted( stations.sections() ):
2033         if not stations.has_option(station, "description"):
2034             qalog.append("%s: no description\n" % station)
2035             stations_nodescription += 1
2036         if not stations.has_option(station, "location"):
2037             qalog.append("%s: no location\n" % station)
2038             stations_nolocation += 1
2039         if not stations.has_option(station, "metar"):
2040             qalog.append("%s: no metar\n" % station)
2041             stations_nometar += 1
2042     airports_badstation = 0
2043     airports_nostation = 0
2044     for airport in sorted( airports.sections() ):
2045         if not airports.has_option(airport, "station"):
2046             qalog.append("%s: no station\n" % airport)
2047             airports_nostation += 1
2048         else:
2049             station = airports.get(airport, "station")
2050             if station not in stations.sections():
2051                 qalog.append( "%s: bad station %s\n" % (airport, station) )
2052                 airports_badstation += 1
2053     zctas_nocentroid = 0
2054     for zcta in sorted( zctas.sections() ):
2055         if not zctas.has_option(zcta, "centroid"):
2056             qalog.append("%s: no centroid\n" % zcta)
2057             zctas_nocentroid += 1
2058     zones_nocentroid = 0
2059     zones_nodescription = 0
2060     zones_noforecast = 0
2061     zones_overlapping = 0
2062     zonetable = {}
2063     for zone in zones.sections():
2064         if zones.has_option(zone, "centroid"):
2065             zonetable[zone] = {
2066                 "centroid": eval( zones.get(zone, "centroid") )
2067             }
2068     for zone in sorted( zones.sections() ):
2069         if zones.has_option(zone, "centroid"):
2070             zonetable_local = zonetable.copy()
2071             del( zonetable_local[zone] )
2072             centroid = eval( zones.get(zone, "centroid") )
2073             if centroid:
2074                 nearest = closest(centroid, zonetable_local, "centroid", 0.1)
2075             if nearest[1]*radian_to_km < 1:
2076                 qalog.append( "%s: within one km of %s\n" % (
2077                     zone,
2078                     nearest[0]
2079                 ) )
2080                 zones_overlapping += 1
2081         else:
2082             qalog.append("%s: no centroid\n" % zone)
2083             zones_nocentroid += 1
2084         if not zones.has_option(zone, "description"):
2085             qalog.append("%s: no description\n" % zone)
2086             zones_nodescription += 1
2087         if not zones.has_option(zone, "zone_forecast"):
2088             qalog.append("%s: no forecast\n" % zone)
2089             zones_noforecast += 1
2090     if os.path.exists(qalog_fn):
2091         os.rename(qalog_fn, "%s_old"%qalog_fn)
2092     qalog_fd = codecs.open(qalog_fn, "w", "utf8")
2093     import time
2094     qalog_fd.write(
2095         '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
2096         '# use, copy, modify, and distribute this software is granted under terms\n'
2097         '# provided in the LICENSE file distributed with this software.\n\n'
2098         % time.gmtime().tm_year)
2099     qalog_fd.writelines(qalog)
2100     qalog_fd.close()
2101     if qalog:
2102         print("issues found (see %s for details):"%qalog_fn)
2103         if airports_badstation:
2104             print("   %s airports with invalid station"%airports_badstation)
2105         if airports_nostation:
2106             print("   %s airports with no station"%airports_nostation)
2107         if places_nocentroid:
2108             print("   %s places with no centroid"%places_nocentroid)
2109         if places_nodescription:
2110             print("   %s places with no description"%places_nodescription)
2111         if stations_nodescription:
2112             print("   %s stations with no description"%stations_nodescription)
2113         if stations_nolocation:
2114             print("   %s stations with no location"%stations_nolocation)
2115         if stations_nometar:
2116             print("   %s stations with no METAR"%stations_nometar)
2117         if zctas_nocentroid:
2118             print("   %s ZCTAs with no centroid"%zctas_nocentroid)
2119         if zones_nocentroid:
2120             print("   %s zones with no centroid"%zones_nocentroid)
2121         if zones_nodescription:
2122             print("   %s zones with no description"%zones_nodescription)
2123         if zones_noforecast:
2124             print("   %s zones with no forecast"%zones_noforecast)
2125         if zones_overlapping:
2126             print("   %s zones within one km of another"%zones_overlapping)
2127     else: print("no issues found.")
2128     print("Indexing complete!")