1 """Contains various object definitions needed by the weather utility."""
3 weather_copyright = """\
4 # Copyright (c) 2006-2020 Jeremy Stanley <fungi@yuggoth.org>. Permission to
5 # use, copy, modify, and distribute this software is granted under terms
6 # provided in the LICENSE file distributed with this software.
9 weather_version = "2.4"
11 radian_to_km = 6372.795484
12 radian_to_mi = 3959.871528
14 def pyversion(ref=None):
15 """Determine the Python version and optionally compare to a reference."""
17 ver = platform.python_version()
20 int(x) for x in ver.split(".")[:2]
22 int(x) for x in ref.split(".")[:2]
27 """An object to contain selection data."""
29 """Store the config, options and arguments."""
30 self.config = get_config()
31 self.options, self.arguments = get_options(self.config)
32 if self.get_bool("cache") and self.get_bool("cache_search") \
33 and not self.get_bool("longlist"):
34 integrate_search_cache(
39 if not self.arguments:
40 if "id" in self.options.__dict__ \
41 and self.options.__dict__["id"]:
42 self.arguments.append( self.options.__dict__["id"] )
43 del( self.options.__dict__["id"] )
45 message = "WARNING: the --id option is deprecated and will eventually be removed\n"
46 sys.stderr.write(message)
47 elif "city" in self.options.__dict__ \
48 and self.options.__dict__["city"] \
49 and "st" in self.options.__dict__ \
50 and self.options.__dict__["st"]:
51 self.arguments.append(
53 self.options.__dict__["city"],
54 self.options.__dict__["st"]
57 del( self.options.__dict__["city"] )
58 del( self.options.__dict__["st"] )
60 message = "WARNING: the --city/--st options are deprecated and will eventually be removed\n"
61 sys.stderr.write(message)
62 def get(self, option, argument=None):
63 """Retrieve data from the config or options."""
65 if self.config.has_section(argument) and (
66 self.config.has_option(argument, "city") \
67 or self.config.has_option(argument, "id") \
68 or self.config.has_option(argument, "st")
70 self.config.remove_section(argument)
72 message = "WARNING: the city/id/st options are now unsupported in aliases\n"
73 sys.stderr.write(message)
74 if not self.config.has_section(argument):
77 path=self.get("setpath"),
78 info=self.get("info"),
80 self.get("cache") and self.get("cache_search")
82 cachedir=self.get("cachedir"),
83 quiet=self.get_bool("quiet")
85 self.config.add_section(argument)
86 for item in guessed.items():
87 self.config.set(argument, *item)
88 if self.config.has_option(argument, option):
89 return self.config.get(argument, option)
90 if option in self.options.__dict__:
91 return self.options.__dict__[option]
93 message = "WARNING: no URI defined for %s\n" % option
94 sys.stderr.write(message)
96 def get_bool(self, option, argument=None):
97 """Get data and coerce to a boolean if necessary."""
98 return bool(self.get(option, argument))
99 def getint(self, option, argument=None):
100 """Get data and coerce to an integer if necessary."""
101 value = self.get(option, argument)
102 if value: return int(value)
106 """Average a list of coordinates."""
113 return (x/count, y/count)
115 def filter_units(line, units="imperial"):
116 """Filter or convert units in a line of text between US/UK and metric."""
118 # filter lines with both pressures in the form of "X inches (Y hPa)" or
121 "(.* )(\d*(\.\d+)? (inches|in\. Hg)) \((\d*(\.\d+)? hPa)\)(.*)",
125 preamble, in_hg, i_fr, i_un, hpa, h_fr, trailer = dual_p.groups()
126 if units == "imperial": line = preamble + in_hg + trailer
127 elif units == "metric": line = preamble + hpa + trailer
128 # filter lines with both temperatures in the form of "X F (Y C)"
130 "(.* )(-?\d*(\.\d+)? F) \((-?\d*(\.\d+)? C)\)(.*)",
134 preamble, fahrenheit, f_fr, celsius, c_fr, trailer = dual_t.groups()
135 if units == "imperial": line = preamble + fahrenheit + trailer
136 elif units == "metric": line = preamble + celsius + trailer
137 # if metric is desired, convert distances in the form of "X mile(s)" to
139 if units == "metric":
140 imperial_d = re.match(
141 "(.* )(\d+)( mile\(s\))(.*)",
145 preamble, mi, m_u, trailer = imperial_d.groups()
146 line = preamble + str(int(round(int(mi)*1.609344))) \
147 + " kilometer(s)" + trailer
148 # filter speeds in the form of "X MPH (Y KT)" to just "X MPH"; if metric is
149 # desired, convert to "Z KPH"
150 imperial_s = re.match(
151 "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
155 preamble, mph, m_u, kt, trailer = imperial_s.groups()
156 if units == "imperial": line = preamble + mph + m_u + trailer
157 elif units == "metric":
158 line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
160 imperial_s = re.match(
161 "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
165 preamble, mph, m_u, kt, trailer = imperial_s.groups()
166 if units == "imperial": line = preamble + mph + m_u + trailer
167 elif units == "metric":
168 line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
170 # if imperial is desired, qualify given forcast temperatures like "X F"; if
171 # metric is desired, convert to "Y C"
172 imperial_t = re.match(
173 "(.* )(High |high |Low |low )(\d+)(\.|,)(.*)",
177 preamble, parameter, fahrenheit, sep, trailer = imperial_t.groups()
178 if units == "imperial":
179 line = preamble + parameter + fahrenheit + " F" + sep + trailer
180 elif units == "metric":
181 line = preamble + parameter \
182 + str(int(round((int(fahrenheit)-32)*5/9))) + " C" + sep \
184 # hand off the resulting line
194 """Return a string containing the results of a URI GET."""
196 import urllib, urllib.error, urllib.request
197 URLError = urllib.error.URLError
198 urlopen = urllib.request.urlopen
200 import urllib2 as urllib
201 URLError = urllib.URLError
202 urlopen = urllib.urlopen
205 dcachedir = os.path.join( os.path.expanduser(cachedir), "datacache" )
206 if not os.path.exists(dcachedir):
207 try: os.makedirs(dcachedir)
208 except (IOError, OSError): pass
209 dcache_fn = os.path.join(
211 uri.split(":",1)[1].replace("/","_")
214 if cache_data and os.access(dcache_fn, os.R_OK) \
215 and now-cacheage < os.stat(dcache_fn).st_mtime <= now:
216 dcache_fd = open(dcache_fn)
217 data = dcache_fd.read()
221 data = urlopen(uri).read().decode("utf-8")
223 if ignore_fail: return ""
225 import os, sys, traceback
226 message = "%s error: failed to retrieve\n %s\n %s" % (
227 os.path.basename( sys.argv[0] ),
229 traceback.format_exception_only(
234 sys.stderr.write(message)
236 # Some data sources are HTML with the plain text wrapped in pre tags
238 data = data[data.find("<pre>")+5:data.find("</pre>")]
242 dcache_fd = codecs.open(dcache_fn, "w", "utf-8")
243 dcache_fd.write(data)
245 except (IOError, OSError): pass
259 """Return a summarized METAR for the specified station."""
262 message = "%s error: METAR URI required for conditions\n" % \
263 os.path.basename( sys.argv[0] )
264 sys.stderr.write(message)
268 cache_data=cache_data,
272 if pyversion("3") and type(metar) is bytes: metar = metar.decode("utf-8")
273 if verbose: return metar
276 lines = metar.split("\n")
279 "relative_humidity," \
280 + "precipitation_last_hour," \
281 + "sky conditions," \
287 headerlist = headers.lower().replace("_"," ").split(",")
290 title = "Current conditions at %s"
291 place = lines[0].split(", ")
293 place = "%s, %s" % ( place[0].title(), place[1] )
294 else: place = "<UNKNOWN>"
295 output.append(title%place)
296 output.append("Last updated " + lines[1])
298 for header in headerlist:
300 if line.lower().startswith(header + ":"):
301 if re.match(r".*:\d+$", line): line = line[:line.rfind(":")]
302 if imperial: line = filter_units(line, units="imperial")
303 elif metric: line = filter_units(line, units="metric")
304 if quiet: output.append(line)
305 else: output.append(" " + line)
309 "(no conditions matched your header list, try with --verbose)"
311 return "\n".join(output)
321 """Return alert notice for the specified URI."""
327 cache_data=cache_data,
331 if pyversion("3") and type(alert) is bytes: alert = alert.decode("utf-8")
333 if verbose: return alert
335 if alert.find("\nNATIONAL WEATHER SERVICE") == -1:
339 lines = alert.split("\n")
341 valid_time = time.strftime("%Y%m%d%H%M")
344 if line.startswith("Expires:") \
345 and "Expires:" + valid_time > line:
347 if muted and line.startswith("NATIONAL WEATHER SERVICE"):
354 if line and not muted:
355 if quiet: output.append(line)
356 else: output.append(" " + line)
357 return "\n".join(output)
359 def get_options(config):
360 """Parse the options passed on the command line."""
362 # for optparse's builtin -h/--help option
364 "usage: %prog [options] [alias1|search1 [alias2|search2 [...]]]"
366 # for optparse's builtin --version option
367 verstring = "%prog " + weather_version
371 option_parser = optparse.OptionParser(usage=usage, version=verstring)
372 # separate options object from list of arguments and return both
374 # the -a/--alert option
375 if config.has_option("default", "alert"):
376 default_alert = bool(config.get("default", "alert"))
377 else: default_alert = False
378 option_parser.add_option("-a", "--alert",
381 default=default_alert,
382 help="include local alert notices")
384 # the --atypes option
385 if config.has_option("default", "atypes"):
386 default_atypes = config.get("default", "atypes")
389 "coastal_flood_statement," \
390 + "flash_flood_statement," \
391 + "flash_flood_warning," \
392 + "flash_flood_watch," \
393 + "flood_statement," \
395 + "severe_thunderstorm_warning," \
396 + "severe_weather_statement," \
397 + "special_weather_statement," \
398 + "urgent_weather_message"
399 option_parser.add_option("--atypes",
401 default=default_atypes,
402 help="list of alert notification types to display")
404 # the --build-sets option
405 option_parser.add_option("--build-sets",
409 help="(re)build location correlation sets")
411 # the --cacheage option
412 if config.has_option("default", "cacheage"):
413 default_cacheage = config.getint("default", "cacheage")
414 else: default_cacheage = 900
415 option_parser.add_option("--cacheage",
417 default=default_cacheage,
418 help="duration in seconds to refresh cached data")
420 # the --cachedir option
421 if config.has_option("default", "cachedir"):
422 default_cachedir = config.get("default", "cachedir")
423 else: default_cachedir = "~/.weather"
424 option_parser.add_option("--cachedir",
426 default=default_cachedir,
427 help="directory for storing cached searches and data")
429 # the -f/--forecast option
430 if config.has_option("default", "forecast"):
431 default_forecast = bool(config.get("default", "forecast"))
432 else: default_forecast = False
433 option_parser.add_option("-f", "--forecast",
436 default=default_forecast,
437 help="include a local forecast")
439 # the --headers option
440 if config.has_option("default", "headers"):
441 default_headers = config.get("default", "headers")
445 + "relative_humidity," \
450 + "sky_conditions," \
451 + "precipitation_last_hour"
452 option_parser.add_option("--headers",
454 default=default_headers,
455 help="list of conditions headers to display")
457 # the --imperial option
458 if config.has_option("default", "imperial"):
459 default_imperial = bool(config.get("default", "imperial"))
460 else: default_imperial = False
461 option_parser.add_option("--imperial",
464 default=default_imperial,
465 help="filter/convert conditions for US/UK units")
468 option_parser.add_option("--info",
472 help="output detailed information for your search")
474 # the -l/--list option
475 option_parser.add_option("-l", "--list",
479 help="list all configured aliases and cached searches")
481 # the --longlist option
482 option_parser.add_option("--longlist",
486 help="display details of all configured aliases")
488 # the -m/--metric option
489 if config.has_option("default", "metric"):
490 default_metric = bool(config.get("default", "metric"))
491 else: default_metric = False
492 option_parser.add_option("-m", "--metric",
495 default=default_metric,
496 help="filter/convert conditions for metric units")
498 # the -n/--no-conditions option
499 if config.has_option("default", "conditions"):
500 default_conditions = bool(config.get("default", "conditions"))
501 else: default_conditions = True
502 option_parser.add_option("-n", "--no-conditions",
504 action="store_false",
505 default=default_conditions,
506 help="disable output of current conditions")
508 # the --no-cache option
509 if config.has_option("default", "cache"):
510 default_cache = bool(config.get("default", "cache"))
511 else: default_cache = True
512 option_parser.add_option("--no-cache",
514 action="store_false",
516 help="disable all caching (searches and data)")
518 # the --no-cache-data option
519 if config.has_option("default", "cache_data"):
520 default_cache_data = bool(config.get("default", "cache_data"))
521 else: default_cache_data = True
522 option_parser.add_option("--no-cache-data",
524 action="store_false",
526 help="disable retrieved data caching")
528 # the --no-cache-search option
529 if config.has_option("default", "cache_search"):
530 default_cache_search = bool(config.get("default", "cache_search"))
531 else: default_cache_search = True
532 option_parser.add_option("--no-cache-search",
534 action="store_false",
536 help="disable search result caching")
538 # the -q/--quiet option
539 if config.has_option("default", "quiet"):
540 default_quiet = bool(config.get("default", "quiet"))
541 else: default_quiet = False
542 option_parser.add_option("-q", "--quiet",
545 default=default_quiet,
546 help="skip preambles and don't indent")
548 # the --setpath option
549 if config.has_option("default", "setpath"):
550 default_setpath = config.get("default", "setpath")
551 else: default_setpath = ".:~/.weather"
552 option_parser.add_option("--setpath",
554 default=default_setpath,
555 help="directory search path for correlation sets")
557 # the -v/--verbose option
558 if config.has_option("default", "verbose"):
559 default_verbose = bool(config.get("default", "verbose"))
560 else: default_verbose = False
561 option_parser.add_option("-v", "--verbose",
564 default=default_verbose,
565 help="show full decoded feeds")
568 if config.has_option("default", "city"):
569 default_city = config.get("default", "city")
570 else: default_city = ""
571 option_parser.add_option("-c", "--city",
573 default=default_city,
574 help=optparse.SUPPRESS_HELP)
575 if config.has_option("default", "id"):
576 default_id = config.get("default", "id")
577 else: default_id = ""
578 option_parser.add_option("-i", "--id",
581 help=optparse.SUPPRESS_HELP)
582 if config.has_option("default", "st"):
583 default_st = config.get("default", "st")
584 else: default_st = ""
585 option_parser.add_option("-s", "--st",
588 help=optparse.SUPPRESS_HELP)
590 options, arguments = option_parser.parse_args()
591 return options, arguments
594 """Parse the aliases and configuration."""
595 if pyversion("3"): import configparser
596 else: import ConfigParser as configparser
597 config = configparser.ConfigParser()
601 "/etc/weather/weatherrc",
602 os.path.expanduser("~/.weather/weatherrc"),
603 os.path.expanduser("~/.weatherrc"),
606 for rcfile in rcfiles:
607 if os.access(rcfile, os.R_OK): config.read(rcfile)
608 for section in config.sections():
609 if section != section.lower():
610 if config.has_section(section.lower()):
611 config.remove_section(section.lower())
612 config.add_section(section.lower())
613 for option,value in config.items(section):
614 config.set(section.lower(), option, value)
617 def integrate_search_cache(config, cachedir, setpath):
618 """Add cached search results into the configuration."""
619 if pyversion("3"): import configparser
620 else: import ConfigParser as configparser
622 scache_fn = os.path.join( os.path.expanduser(cachedir), "searches" )
623 if not os.access(scache_fn, os.R_OK): return config
624 scache_fd = open(scache_fn)
625 created = float( scache_fd.readline().split(":")[1].strip().split()[0] )
628 datafiles = data_index(setpath)
630 data_freshness = sorted(
631 [ x[1] for x in datafiles.values() ],
634 else: data_freshness = now
635 if created < data_freshness <= now:
638 print( "[clearing outdated %s]" % scache_fn )
639 except (IOError, OSError):
642 scache = configparser.ConfigParser()
643 scache.read(scache_fn)
644 for section in scache.sections():
645 if not config.has_section(section):
646 config.add_section(section)
647 for option,value in scache.items(section):
648 config.set(section, option, value)
651 def list_aliases(config, detail=False):
652 """Return a formatted list of aliases defined in the config."""
654 output = "\n# configured alias details..."
655 for section in sorted(config.sections()):
656 output += "\n\n[%s]" % section
657 for item in sorted(config.items(section)):
658 output += "\n%s = %s" % item
661 output = "configured aliases and cached searches..."
662 for section in sorted(config.sections()):
663 if config.has_option(section, "description"):
664 description = config.get(section, "description")
665 else: description = "(no description provided)"
666 output += "\n %s: %s" % (section, description)
669 def data_index(path):
672 for filename in ("airports", "places", "stations", "zctas", "zones"):
673 for dirname in path.split(":"):
674 for extension in ("", ".gz", ".txt"):
675 candidate = os.path.expanduser(
676 os.path.join( dirname, "".join( (filename, extension) ) )
678 if os.path.exists(candidate):
679 datafiles[filename] = (
681 os.stat(candidate).st_mtime
684 if filename in datafiles:
698 """Find URIs using airport, gecos, placename, station, ZCTA/ZIP, zone."""
699 import codecs, datetime, time, os, re, sys
700 if pyversion("3"): import configparser
701 else: import ConfigParser as configparser
702 datafiles = data_index(path)
703 if re.match("[A-Za-z]{3}$", expression): searchtype = "airport"
704 elif re.match("[A-Za-z0-9]{4}$", expression): searchtype = "station"
705 elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", expression): searchtype = "zone"
706 elif re.match("[0-9]{5}$", expression): searchtype = "ZCTA"
708 r"[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?, *[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?$",
711 searchtype = "coordinates"
712 elif re.match(r"(FIPS|fips)\d+$", expression): searchtype = "FIPS"
716 if cache_search: action = "caching"
717 else: action = "using"
726 (0.995, "excellent"),
729 if not quiet: print("Searching via %s..."%searchtype)
730 stations = configparser.ConfigParser()
731 dataname = "stations"
732 if dataname in datafiles:
733 datafile = datafiles[dataname][0]
734 if datafile.endswith(".gz"):
737 stations.read_string(
738 gzip.open(datafile).read().decode("utf-8") )
739 else: stations.readfp( gzip.open(datafile) )
741 stations.read(datafile)
743 message = "%s error: can't find \"%s\" data file\n" % (
744 os.path.basename( sys.argv[0] ),
747 sys.stderr.write(message)
749 zones = configparser.ConfigParser()
751 if dataname in datafiles:
752 datafile = datafiles[dataname][0]
753 if datafile.endswith(".gz"):
756 zones.read_string( gzip.open(datafile).read().decode("utf-8") )
757 else: zones.readfp( gzip.open(datafile) )
761 message = "%s error: can't find \"%s\" data file\n" % (
762 os.path.basename( sys.argv[0] ),
765 sys.stderr.write(message)
773 if searchtype == "airport":
774 expression = expression.lower()
775 airports = configparser.ConfigParser()
776 dataname = "airports"
777 if dataname in datafiles:
778 datafile = datafiles[dataname][0]
779 if datafile.endswith(".gz"):
782 airports.read_string(
783 gzip.open(datafile).read().decode("utf-8") )
784 else: airports.readfp( gzip.open(datafile) )
786 airports.read(datafile)
788 message = "%s error: can't find \"%s\" data file\n" % (
789 os.path.basename( sys.argv[0] ),
792 sys.stderr.write(message)
794 if airports.has_section(expression) \
795 and airports.has_option(expression, "station"):
796 search = (expression, "IATA/FAA airport code %s" % expression)
797 station = ( airports.get(expression, "station"), 0 )
798 if stations.has_option(station[0], "zone"):
799 zone = eval( stations.get(station[0], "zone") )
801 if not ( info or quiet ) \
802 and stations.has_option( station[0], "description" ):
806 stations.get(station[0], "description")
810 message = "No IATA/FAA airport code \"%s\" in the %s file.\n" % (
812 datafiles["airports"][0]
814 sys.stderr.write(message)
816 elif searchtype == "station":
817 expression = expression.lower()
818 if stations.has_section(expression):
819 station = (expression, 0)
821 search = (expression, "ICAO station code %s" % expression)
822 if stations.has_option(expression, "zone"):
823 zone = eval( stations.get(expression, "zone") )
825 if not ( info or quiet ) \
826 and stations.has_option(expression, "description"):
830 stations.get(expression, "description")
834 message = "No ICAO weather station \"%s\" in the %s file.\n" % (
836 datafiles["stations"][0]
838 sys.stderr.write(message)
840 elif searchtype == "zone":
841 expression = expression.lower()
842 if zones.has_section(expression) \
843 and zones.has_option(expression, "station"):
844 zone = (expression, 0)
845 station = eval( zones.get(expression, "station") )
847 search = (expression, "NWS/NOAA weather zone %s" % expression)
848 if not ( info or quiet ) \
849 and zones.has_option(expression, "description"):
853 zones.get(expression, "description")
857 message = "No usable NWS weather zone \"%s\" in the %s file.\n" % (
859 datafiles["zones"][0]
861 sys.stderr.write(message)
863 elif searchtype == "ZCTA":
864 zctas = configparser.ConfigParser()
866 if dataname in datafiles:
867 datafile = datafiles[dataname][0]
868 if datafile.endswith(".gz"):
872 gzip.open(datafile).read().decode("utf-8") )
873 else: zctas.readfp( gzip.open(datafile) )
877 message = "%s error: can't find \"%s\" data file\n" % (
878 os.path.basename( sys.argv[0] ),
881 sys.stderr.write(message)
884 if zctas.has_section(expression) \
885 and zctas.has_option(expression, "station"):
886 station = eval( zctas.get(expression, "station") )
887 search = (expression, "Census ZCTA (ZIP code) %s" % expression)
888 if zctas.has_option(expression, "zone"):
889 zone = eval( zctas.get(expression, "zone") )
891 message = "No census ZCTA (ZIP code) \"%s\" in the %s file.\n" % (
893 datafiles["zctas"][0]
895 sys.stderr.write(message)
897 elif searchtype == "coordinates":
898 search = (expression, "Geographic coordinates %s" % expression)
900 for station in stations.sections():
901 if stations.has_option(station, "location"):
902 stationtable[station] = {
903 "location": eval( stations.get(station, "location") )
905 station = closest( gecos(expression), stationtable, "location", 0.1 )
907 message = "No ICAO weather station found near %s.\n" % expression
908 sys.stderr.write(message)
911 for zone in zones.sections():
912 if zones.has_option(zone, "centroid"):
914 "centroid": eval( zones.get(zone, "centroid") )
916 zone = closest( gecos(expression), zonetable, "centroid", 0.1 )
918 message = "No NWS weather zone near %s; forecasts unavailable.\n" \
920 sys.stderr.write(message)
921 elif searchtype in ("FIPS", "name"):
922 places = configparser.ConfigParser()
924 if dataname in datafiles:
925 datafile = datafiles[dataname][0]
926 if datafile.endswith(".gz"):
930 gzip.open(datafile).read().decode("utf-8") )
931 else: places.readfp( gzip.open(datafile) )
933 places.read(datafile)
935 message = "%s error: can't find \"%s\" data file\n" % (
936 os.path.basename( sys.argv[0] ),
939 sys.stderr.write(message)
942 place = expression.lower()
943 if places.has_section(place) and places.has_option(place, "station"):
944 station = eval( places.get(place, "station") )
945 search = (expression, "Census Place %s" % expression)
946 if places.has_option(place, "description"):
949 search[1] + ", %s" % places.get(place, "description")
951 if places.has_option(place, "zone"):
952 zone = eval( places.get(place, "zone") )
953 if not ( info or quiet ) \
954 and places.has_option(place, "description"):
958 places.get(place, "description")
962 for place in places.sections():
963 if places.has_option(place, "description") \
964 and places.has_option(place, "station") \
967 places.get(place, "description"),
970 possibilities.append(place)
971 for place in stations.sections():
972 if stations.has_option(place, "description") \
975 stations.get(place, "description"),
978 possibilities.append(place)
979 for place in zones.sections():
980 if zones.has_option(place, "description") \
981 and zones.has_option(place, "station") \
984 zones.get(place, "description"),
987 possibilities.append(place)
988 if len(possibilities) == 1:
989 place = possibilities[0]
990 if places.has_section(place):
991 station = eval( places.get(place, "station") )
992 description = places.get(place, "description")
993 if places.has_option(place, "zone"):
994 zone = eval( places.get(place, "zone" ) )
995 search = ( expression, "%s: %s" % (place, description) )
996 elif stations.has_section(place):
997 station = (place, 0.0)
998 description = stations.get(place, "description")
999 if stations.has_option(place, "zone"):
1000 zone = eval( stations.get(place, "zone" ) )
1001 search = ( expression, "ICAO station code %s" % place )
1002 elif zones.has_section(place):
1003 station = eval( zones.get(place, "station") )
1004 description = zones.get(place, "description")
1006 search = ( expression, "NWS/NOAA weather zone %s" % place )
1007 if not ( info or quiet ):
1008 print( "[%s result %s]" % (action, description) )
1009 if not possibilities and not station[0]:
1010 message = "No FIPS code/census area match in the %s file.\n" % (
1011 datafiles["places"][0]
1013 sys.stderr.write(message)
1016 uris["metar"] = stations.get( station[0], "metar" )
1018 for key,value in zones.items( zone[0] ):
1019 if key not in ("centroid", "description", "station"):
1022 count = len(possibilities)
1023 if count <= max_results:
1024 print( "Your search is ambiguous, returning %s matches:" % count )
1025 for place in sorted(possibilities):
1026 if places.has_section(place):
1030 places.get(place, "description")
1033 elif stations.has_section(place):
1037 stations.get(place, "description")
1040 elif zones.has_section(place):
1044 zones.get(place, "description")
1049 "Your search is too ambiguous, returning %s matches." % count
1056 for section in dataset.sections():
1057 if dataset.has_option(section, "station"):
1059 eval( dataset.get(section, "station") )[1]
1061 if dataset.has_option(section, "zone"):
1062 zonelist.append( eval( dataset.get(section, "zone") )[1] )
1065 scount = len(stationlist)
1066 zcount = len(zonelist)
1069 for score in scores:
1071 sranks.append( stationlist[ int( (1-score[0]) * scount ) ] )
1073 zranks.append( zonelist[ int( (1-score[0]) * zcount ) ] )
1074 description = search[1]
1075 uris["description"] = description
1077 "%s\n%s" % ( description, "-" * len(description) )
1082 stations.get( station[0], "description" )
1085 km = radian_to_km*station[1]
1086 mi = radian_to_mi*station[1]
1087 if sranks and not description.startswith("ICAO station code "):
1088 for index in range(0, len(scores)):
1089 if station[1] >= sranks[index]:
1090 score = scores[index][1]
1093 " (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1095 elif searchtype == "coordinates":
1096 print( " (%.3gkm, %.3gmi)" % (km, mi) )
1099 "%s: %s" % ( zone[0], zones.get( zone[0], "description" ) )
1101 km = radian_to_km*zone[1]
1102 mi = radian_to_mi*zone[1]
1103 if zranks and not description.startswith("NWS/NOAA weather zone "):
1104 for index in range(0, len(scores)):
1105 if zone[1] >= zranks[index]:
1106 score = scores[index][1]
1109 " (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1111 elif searchtype == "coordinates" and zone[0]:
1112 print( " (%.3gkm, %.3gmi)" % (km, mi) )
1115 nowstamp = "%s (%s)" % (
1117 datetime.datetime.isoformat(
1118 datetime.datetime.fromtimestamp(now),
1122 search_cache = ["\n"]
1123 search_cache.append( "[%s]\n" % search[0] )
1124 search_cache.append( "cached = %s\n" % nowstamp )
1125 for uriname in sorted(uris.keys()):
1126 search_cache.append( "%s = %s\n" % ( uriname, uris[uriname] ) )
1127 real_cachedir = os.path.expanduser(cachedir)
1128 if not os.path.exists(real_cachedir):
1129 try: os.makedirs(real_cachedir)
1130 except (IOError, OSError): pass
1131 scache_fn = os.path.join(real_cachedir, "searches")
1132 if not os.path.exists(scache_fn):
1134 [ x[1] for x in datafiles.values() ],
1137 thenstamp = "%s (%s)" % (
1139 datetime.datetime.isoformat(
1140 datetime.datetime.fromtimestamp(then),
1144 search_cache.insert(
1146 "# based on data files from: %s\n" % thenstamp
1149 scache_existing = configparser.ConfigParser()
1150 scache_existing.read(scache_fn)
1151 if not scache_existing.has_section(search[0]):
1152 scache_fd = codecs.open(scache_fn, "a", "utf-8")
1153 scache_fd.writelines(search_cache)
1155 except (IOError, OSError): pass
1159 def closest(position, nodes, fieldname, angle=None):
1161 if not angle: angle = 2*math.pi
1164 if fieldname in nodes[name]:
1165 node = nodes[name][fieldname]
1166 if node and abs( position[0]-node[0] ) < angle:
1167 if abs( position[1]-node[1] ) < angle \
1168 or abs( abs( position[1]-node[1] ) - 2*math.pi ) < angle:
1169 if position == node:
1173 candidate = math.acos(
1174 math.sin( position[0] ) * math.sin( node[0] ) \
1175 + math.cos( position[0] ) \
1176 * math.cos( node[0] ) \
1177 * math.cos( position[1] - node[1] )
1179 if candidate < angle:
1182 if match: match = str(match)
1183 return (match, angle)
1185 def gecos(formatted):
1187 coordinates = formatted.split(",")
1188 for coordinate in range(0, 2):
1189 degrees, foo, minutes, bar, seconds, hemisphere = re.match(
1190 r"([\+-]?\d+\.?\d*)(-(\d+))?(-(\d+))?([ensw]?)$",
1191 coordinates[coordinate].strip().lower()
1193 value = float(degrees)
1194 if minutes: value += float(minutes)/60
1195 if seconds: value += float(seconds)/3600
1196 if hemisphere and hemisphere in "sw": value *= -1
1197 coordinates[coordinate] = math.radians(value)
1198 return tuple(coordinates)
1201 import codecs, csv, datetime, hashlib, os, re, sys, tarfile, time, zipfile
1202 if pyversion("3"): import configparser
1203 else: import ConfigParser as configparser
1204 for filename in os.listdir("."):
1205 if re.match("[0-9]{4}_Gaz_counties_national.zip$", filename):
1206 gcounties_an = filename
1207 gcounties_fn = filename[:-4] + ".txt"
1208 elif re.match("[0-9]{4}_Gaz_cousubs_national.zip$", filename):
1209 gcousubs_an = filename
1210 gcousubs_fn = filename[:-4] + ".txt"
1211 elif re.match("[0-9]{4}_Gaz_place_national.zip$", filename):
1212 gplace_an = filename
1213 gplace_fn = filename[:-4] + ".txt"
1214 elif re.match("[0-9]{4}_Gaz_zcta_national.zip$", filename):
1216 gzcta_fn = filename[:-4] + ".txt"
1217 elif re.match("bp[0-9]{2}[a-z]{2}[0-9]{2}.dbx$", filename):
1218 cpfzcf_fn = filename
1219 nsdcccc_fn = "nsd_cccc.txt"
1220 ourairports_fn = "airports.csv"
1221 overrides_fn = "overrides.conf"
1222 overrideslog_fn = "overrides.log"
1226 airports_fn = "airports"
1227 places_fn = "places"
1228 stations_fn = "stations"
1233 # generated by %s on %s from these public domain sources:
1235 # https://www.census.gov/geographies/reference-files/time-series/geo/gazetteer-files.html
1241 # https://www.weather.gov/gis/ZoneCounty/
1244 # https://tgftp.nws.noaa.gov/data/
1247 # https://ourairports.com/data/
1250 # ...and these manually-generated or hand-compiled adjustments:
1256 os.path.basename( sys.argv[0] ),
1257 datetime.date.isoformat(
1258 datetime.datetime.utcfromtimestamp( int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) )
1260 hashlib.md5( open(gcounties_an, "rb").read() ).hexdigest(),
1261 datetime.date.isoformat(
1262 datetime.datetime.utcfromtimestamp( os.path.getmtime(gcounties_an) )
1265 hashlib.md5( open(gcousubs_an, "rb").read() ).hexdigest(),
1266 datetime.date.isoformat(
1267 datetime.datetime.utcfromtimestamp( os.path.getmtime(gcousubs_an) )
1270 hashlib.md5( open(gplace_an, "rb").read() ).hexdigest(),
1271 datetime.date.isoformat(
1272 datetime.datetime.utcfromtimestamp( os.path.getmtime(gplace_an) )
1275 hashlib.md5( open(gzcta_an, "rb").read() ).hexdigest(),
1276 datetime.date.isoformat(
1277 datetime.datetime.utcfromtimestamp( os.path.getmtime(gzcta_an) )
1280 hashlib.md5( open(cpfzcf_fn, "rb").read() ).hexdigest(),
1281 datetime.date.isoformat(
1282 datetime.datetime.utcfromtimestamp( os.path.getmtime(cpfzcf_fn) )
1285 hashlib.md5( open(nsdcccc_fn, "rb").read() ).hexdigest(),
1286 datetime.date.isoformat(
1287 datetime.datetime.utcfromtimestamp( os.path.getmtime(nsdcccc_fn) )
1290 hashlib.md5( open(ourairports_fn, "rb").read() ).hexdigest(),
1291 datetime.date.isoformat(
1292 datetime.datetime.utcfromtimestamp( os.path.getmtime(ourairports_fn) )
1295 hashlib.md5( open(overrides_fn, "rb").read() ).hexdigest(),
1296 datetime.date.isoformat(
1297 datetime.datetime.utcfromtimestamp( os.path.getmtime(overrides_fn) )
1300 hashlib.md5( open(slist_fn, "rb").read() ).hexdigest(),
1301 datetime.date.isoformat(
1302 datetime.datetime.utcfromtimestamp( os.path.getmtime(slist_fn) )
1305 hashlib.md5( open(zlist_fn, "rb").read() ).hexdigest(),
1306 datetime.date.isoformat(
1307 datetime.datetime.utcfromtimestamp( os.path.getmtime(zlist_fn) )
1316 message = "Reading %s:%s..." % (gcounties_an, gcounties_fn)
1317 sys.stdout.write(message)
1320 gcounties = zipfile.ZipFile(gcounties_an).open(gcounties_fn, "r")
1321 columns = gcounties.readline().decode("utf-8").strip().split("\t")
1322 for line in gcounties:
1323 fields = line.decode("utf-8").strip().split("\t")
1324 f_geoid = fields[ columns.index("GEOID") ].strip()
1325 f_name = fields[ columns.index("NAME") ].strip()
1326 f_usps = fields[ columns.index("USPS") ].strip()
1327 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1328 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1329 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1330 fips = "fips%s" % f_geoid
1331 if fips not in places: places[fips] = {}
1332 places[fips]["centroid"] = gecos(
1333 "%s,%s" % (f_intptlat, f_intptlong)
1335 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1338 print("done (%s lines)." % count)
1339 message = "Reading %s:%s..." % (gcousubs_an, gcousubs_fn)
1340 sys.stdout.write(message)
1343 gcousubs = zipfile.ZipFile(gcousubs_an).open(gcousubs_fn, "r")
1344 columns = gcousubs.readline().decode("utf-8").strip().split("\t")
1345 for line in gcousubs:
1346 fields = line.decode("utf-8").strip().split("\t")
1347 f_geoid = fields[ columns.index("GEOID") ].strip()
1348 f_name = fields[ columns.index("NAME") ].strip()
1349 f_usps = fields[ columns.index("USPS") ].strip()
1350 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1351 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1352 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1353 fips = "fips%s" % f_geoid
1354 if fips not in places: places[fips] = {}
1355 places[fips]["centroid"] = gecos(
1356 "%s,%s" % (f_intptlat, f_intptlong)
1358 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1361 print("done (%s lines)." % count)
1362 message = "Reading %s:%s..." % (gplace_an, gplace_fn)
1363 sys.stdout.write(message)
1366 gplace = zipfile.ZipFile(gplace_an).open(gplace_fn, "r")
1367 columns = gplace.readline().decode("utf-8").strip().split("\t")
1369 fields = line.decode("utf-8").strip().split("\t")
1370 f_geoid = fields[ columns.index("GEOID") ].strip()
1371 f_name = fields[ columns.index("NAME") ].strip()
1372 f_usps = fields[ columns.index("USPS") ].strip()
1373 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1374 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1375 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1376 fips = "fips%s" % f_geoid
1377 if fips not in places: places[fips] = {}
1378 places[fips]["centroid"] = gecos(
1379 "%s,%s" % (f_intptlat, f_intptlong)
1381 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1384 print("done (%s lines)." % count)
1385 message = "Reading %s..." % slist_fn
1386 sys.stdout.write(message)
1389 slist = codecs.open(slist_fn, "rU", "utf-8")
1391 icao = line.split("#")[0].strip()
1394 "metar": "https://tgftp.nws.noaa.gov/data/observations/"\
1395 + "metar/decoded/%s.TXT" % icao.upper()
1399 print("done (%s lines)." % count)
1400 message = "Reading %s..." % nsdcccc_fn
1401 sys.stdout.write(message)
1404 nsdcccc = codecs.open(nsdcccc_fn, "rU", "utf-8")
1405 for line in nsdcccc:
1407 fields = line.split(";")
1408 icao = fields[0].strip().lower()
1409 if icao in stations:
1411 name = " ".join( fields[3].strip().title().split() )
1412 if name: description.append(name)
1413 st = fields[4].strip()
1414 if st: description.append(st)
1415 country = " ".join( fields[5].strip().title().split() )
1416 if country: description.append(country)
1418 stations[icao]["description"] = ", ".join(description)
1419 lat, lon = fields[7:9]
1421 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1422 elif "location" not in stations[icao]:
1423 lat, lon = fields[5:7]
1425 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1428 print("done (%s lines)." % count)
1429 message = "Reading %s..." % ourairports_fn
1430 sys.stdout.write(message)
1433 ourairports = open(ourairports_fn, "rU")
1434 for row in csv.reader(ourairports):
1435 icao = row[12].lower()
1436 if icao in stations:
1437 iata = row[13].lower()
1438 if len(iata) == 3: airports[iata] = { "station": icao }
1439 if "description" not in stations[icao]:
1442 if name: description.append(name)
1443 municipality = row[10]
1444 if municipality: description.append(municipality)
1449 c,r = region.split("-", 1)
1450 if c == country: region = r
1451 description.append(region)
1453 description.append(country)
1455 stations[icao]["description"] = ", ".join(description)
1456 if "location" not in stations[icao]:
1461 stations[icao]["location"] = gecos(
1462 "%s,%s" % (lat, lon)
1466 print("done (%s lines)." % count)
1467 message = "Reading %s..." % zlist_fn
1468 sys.stdout.write(message)
1471 zlist = codecs.open(zlist_fn, "rU", "utf-8")
1473 line = line.split("#")[0].strip()
1478 print("done (%s lines)." % count)
1479 message = "Reading %s..." % cpfzcf_fn
1480 sys.stdout.write(message)
1484 cpfzcf = codecs.open(cpfzcf_fn, "rU", "utf-8")
1486 fields = line.strip().split("|")
1487 if len(fields) == 11 \
1488 and fields[0] and fields[1] and fields[9] and fields[10]:
1489 zone = "z".join( fields[:2] ).lower()
1493 zones[zone]["coastal_flood_statement"] = (
1494 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1495 "flood/coastal/%s/%s.txt" % (state.lower(), zone))
1496 zones[zone]["flash_flood_statement"] = (
1497 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1498 "flash_flood/statement/%s/%s.txt"
1499 % (state.lower(), zone))
1500 zones[zone]["flash_flood_warning"] = (
1501 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1502 "flash_flood/warning/%s/%s.txt"
1503 % (state.lower(), zone))
1504 zones[zone]["flash_flood_watch"] = (
1505 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1506 "flash_flood/watch/%s/%s.txt" % (state.lower(), zone))
1507 zones[zone]["flood_statement"] = (
1508 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1509 "flood/statement/%s/%s.txt" % (state.lower(), zone))
1510 zones[zone]["flood_warning"] = (
1511 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1512 "flood/warning/%s/%s.txt" % (state.lower(), zone))
1513 zones[zone]["severe_thunderstorm_warning"] = (
1514 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1515 "thunderstorm/%s/%s.txt" % (state.lower(), zone))
1516 zones[zone]["severe_weather_statement"] = (
1517 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1518 "severe_weather_stmt/%s/%s.txt"
1519 % (state.lower(), zone))
1520 zones[zone]["short_term_forecast"] = (
1521 "https://tgftp.nws.noaa.gov/data/forecasts/nowcast/"
1522 "%s/%s.txt" % (state.lower(), zone))
1523 zones[zone]["special_weather_statement"] = (
1524 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1525 "special_weather_stmt/%s/%s.txt"
1526 % (state.lower(), zone))
1527 zones[zone]["state_forecast"] = (
1528 "https://tgftp.nws.noaa.gov/data/forecasts/state/"
1529 "%s/%s.txt" % (state.lower(), zone))
1530 zones[zone]["urgent_weather_message"] = (
1531 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1532 "non_precip/%s/%s.txt" % (state.lower(), zone))
1533 zones[zone]["zone_forecast"] = (
1534 "https://tgftp.nws.noaa.gov/data/forecasts/zone/"
1535 "%s/%s.txt" % (state.lower(), zone))
1536 description = fields[3].strip()
1537 fips = "fips%s"%fields[6]
1540 if description.endswith(county):
1541 description += " County"
1543 description += ", %s County" % county
1544 description += ", %s, US" % state
1545 zones[zone]["description"] = description
1546 zones[zone]["centroid"] = gecos( ",".join( fields[9:11] ) )
1547 if fips in places and not zones[zone]["centroid"]:
1548 zones[zone]["centroid"] = places[fips]["centroid"]
1551 print("done (%s lines)." % count)
1552 message = "Reading %s:%s..." % (gzcta_an, gzcta_fn)
1553 sys.stdout.write(message)
1556 gzcta = zipfile.ZipFile(gzcta_an).open(gzcta_fn, "r")
1557 columns = gzcta.readline().decode("utf-8").strip().split("\t")
1559 fields = line.decode("utf-8").strip().split("\t")
1560 f_geoid = fields[ columns.index("GEOID") ].strip()
1561 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1562 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1563 if f_geoid and f_intptlat and f_intptlong:
1564 if f_geoid not in zctas: zctas[f_geoid] = {}
1565 zctas[f_geoid]["centroid"] = gecos(
1566 "%s,%s" % (f_intptlat, f_intptlong)
1570 print("done (%s lines)." % count)
1571 message = "Reading %s..." % overrides_fn
1572 sys.stdout.write(message)
1578 overrides = configparser.ConfigParser()
1579 overrides.readfp( codecs.open(overrides_fn, "r", "utf8") )
1581 for section in overrides.sections():
1584 if section.startswith("-"):
1585 section = section[1:]
1587 else: delete = False
1588 if re.match("[A-Za-z]{3}$", section):
1590 if section in airports:
1591 del( airports[section] )
1592 logact = "removed airport %s" % section
1595 logact = "tried to remove nonexistent airport %s" % section
1597 if section in airports:
1598 logact = "changed airport %s" % section
1601 airports[section] = {}
1602 logact = "added airport %s" % section
1604 for key,value in overrides.items(section):
1605 if key in airports[section]: chgopt += 1
1607 if key in ("centroid", "location"):
1608 airports[section][key] = eval(value)
1610 airports[section][key] = value
1611 if addopt and chgopt:
1612 logact += " (+%s/!%s options)" % (addopt, chgopt)
1613 elif addopt: logact += " (+%s options)" % addopt
1614 elif chgopt: logact += " (!%s options)" % chgopt
1615 elif re.match("[A-Za-z0-9]{4}$", section):
1617 if section in stations:
1618 del( stations[section] )
1619 logact = "removed station %s" % section
1622 logact = "tried to remove nonexistent station %s" % section
1624 if section in stations:
1625 logact = "changed station %s" % section
1628 stations[section] = {}
1629 logact = "added station %s" % section
1631 for key,value in overrides.items(section):
1632 if key in stations[section]: chgopt += 1
1634 if key in ("centroid", "location"):
1635 stations[section][key] = eval(value)
1637 stations[section][key] = value
1638 if addopt and chgopt:
1639 logact += " (+%s/!%s options)" % (addopt, chgopt)
1640 elif addopt: logact += " (+%s options)" % addopt
1641 elif chgopt: logact += " (!%s options)" % chgopt
1642 elif re.match("[0-9]{5}$", section):
1644 if section in zctas:
1645 del( zctas[section] )
1646 logact = "removed zcta %s" % section
1649 logact = "tried to remove nonexistent zcta %s" % section
1651 if section in zctas:
1652 logact = "changed zcta %s" % section
1656 logact = "added zcta %s" % section
1658 for key,value in overrides.items(section):
1659 if key in zctas[section]: chgopt += 1
1661 if key in ("centroid", "location"):
1662 zctas[section][key] = eval(value)
1664 zctas[section][key] = value
1665 if addopt and chgopt:
1666 logact += " (+%s/!%s options)" % (addopt, chgopt)
1667 elif addopt: logact += " (+%s options)" % addopt
1668 elif chgopt: logact += " (!%s options)" % chgopt
1669 elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", section):
1671 if section in zones:
1672 del( zones[section] )
1673 logact = "removed zone %s" % section
1676 logact = "tried to remove nonexistent zone %s" % section
1678 if section in zones:
1679 logact = "changed zone %s" % section
1683 logact = "added zone %s" % section
1685 for key,value in overrides.items(section):
1686 if key in zones[section]: chgopt += 1
1688 if key in ("centroid", "location"):
1689 zones[section][key] = eval(value)
1691 zones[section][key] = value
1692 if addopt and chgopt:
1693 logact += " (+%s/!%s options)" % (addopt, chgopt)
1694 elif addopt: logact += " (+%s options)" % addopt
1695 elif chgopt: logact += " (!%s options)" % chgopt
1696 elif re.match("fips[0-9]+$", section):
1698 if section in places:
1699 del( places[section] )
1700 logact = "removed place %s" % section
1703 logact = "tried to remove nonexistent place %s" % section
1705 if section in places:
1706 logact = "changed place %s" % section
1709 places[section] = {}
1710 logact = "added place %s" % section
1712 for key,value in overrides.items(section):
1713 if key in places[section]: chgopt += 1
1715 if key in ("centroid", "location"):
1716 places[section][key] = eval(value)
1718 places[section][key] = value
1719 if addopt and chgopt:
1720 logact += " (+%s/!%s options)" % (addopt, chgopt)
1721 elif addopt: logact += " (+%s options)" % addopt
1722 elif chgopt: logact += " (!%s options)" % chgopt
1724 overrideslog.append("%s\n" % logact)
1726 if os.path.exists(overrideslog_fn):
1727 os.rename(overrideslog_fn, "%s_old"%overrideslog_fn)
1728 overrideslog_fd = codecs.open(overrideslog_fn, "w", "utf8")
1730 overrideslog_fd.write(
1731 '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
1732 '# use, copy, modify, and distribute this software is granted under terms\n'
1733 '# provided in the LICENSE file distributed with this software.\n\n'
1734 % time.gmtime().tm_year)
1735 overrideslog_fd.writelines(overrideslog)
1736 overrideslog_fd.close()
1737 print("done (%s overridden sections: +%s/-%s/!%s)." % (
1743 estimate = 2*len(places) + len(stations) + 2*len(zctas) + len(zones)
1745 "Correlating places, stations, ZCTAs and zones (upper bound is %s):" % \
1749 milestones = list( range(51) )
1751 sys.stdout.write(message)
1754 centroid = places[fips]["centroid"]
1756 station = closest(centroid, stations, "location", 0.1)
1758 places[fips]["station"] = station
1761 level = int(50*count/estimate)
1762 if level in milestones:
1763 for remaining in milestones[:milestones.index(level)+1]:
1766 sys.stdout.write(message)
1769 message = "%s%%" % (remaining*2,)
1770 sys.stdout.write(message)
1772 milestones.remove(remaining)
1774 zone = closest(centroid, zones, "centroid", 0.1)
1776 places[fips]["zone"] = zone
1779 level = int(50*count/estimate)
1780 if level in milestones:
1781 for remaining in milestones[:milestones.index(level)+1]:
1784 sys.stdout.write(message)
1787 message = "%s%%" % (remaining*2,)
1788 sys.stdout.write(message)
1790 milestones.remove(remaining)
1791 for station in stations:
1792 if "location" in stations[station]:
1793 location = stations[station]["location"]
1795 zone = closest(location, zones, "centroid", 0.1)
1797 stations[station]["zone"] = zone
1800 level = int(50*count/estimate)
1801 if level in milestones:
1802 for remaining in milestones[:milestones.index(level)+1]:
1805 sys.stdout.write(message)
1808 message = "%s%%" % (remaining*2,)
1809 sys.stdout.write(message)
1811 milestones.remove(remaining)
1812 for zcta in zctas.keys():
1813 centroid = zctas[zcta]["centroid"]
1815 station = closest(centroid, stations, "location", 0.1)
1817 zctas[zcta]["station"] = station
1820 level = int(50*count/estimate)
1821 if level in milestones:
1822 for remaining in milestones[ : milestones.index(level)+1 ]:
1825 sys.stdout.write(message)
1828 message = "%s%%" % (remaining*2,)
1829 sys.stdout.write(message)
1831 milestones.remove(remaining)
1833 zone = closest(centroid, zones, "centroid", 0.1)
1835 zctas[zcta]["zone"] = zone
1838 level = int(50*count/estimate)
1839 if level in milestones:
1840 for remaining in milestones[:milestones.index(level)+1]:
1843 sys.stdout.write(message)
1846 message = "%s%%" % (remaining*2,)
1847 sys.stdout.write(message)
1849 milestones.remove(remaining)
1850 for zone in zones.keys():
1851 if "centroid" in zones[zone]:
1852 centroid = zones[zone]["centroid"]
1854 station = closest(centroid, stations, "location", 0.1)
1856 zones[zone]["station"] = station
1859 level = int(50*count/estimate)
1860 if level in milestones:
1861 for remaining in milestones[:milestones.index(level)+1]:
1864 sys.stdout.write(message)
1867 message = "%s%%" % (remaining*2,)
1868 sys.stdout.write(message)
1870 milestones.remove(remaining)
1871 for remaining in milestones:
1874 sys.stdout.write(message)
1877 message = "%s%%" % (remaining*2,)
1878 sys.stdout.write(message)
1880 print("\n done (%s correlations)." % count)
1881 message = "Writing %s..." % airports_fn
1882 sys.stdout.write(message)
1885 if os.path.exists(airports_fn):
1886 os.rename(airports_fn, "%s_old"%airports_fn)
1887 airports_fd = codecs.open(airports_fn, "w", "utf8")
1888 airports_fd.write(header)
1889 for airport in sorted( airports.keys() ):
1890 airports_fd.write("\n\n[%s]" % airport)
1891 for key, value in sorted( airports[airport].items() ):
1892 if type(value) is float: value = "%.7f"%value
1893 elif type(value) is tuple:
1895 for element in value:
1896 if type(element) is float: elements.append("%.7f"%element)
1897 else: elements.append( repr(element) )
1898 value = "(%s)"%", ".join(elements)
1899 airports_fd.write( "\n%s = %s" % (key, value) )
1901 airports_fd.write("\n")
1903 print("done (%s sections)." % count)
1904 message = "Writing %s..." % places_fn
1905 sys.stdout.write(message)
1908 if os.path.exists(places_fn):
1909 os.rename(places_fn, "%s_old"%places_fn)
1910 places_fd = codecs.open(places_fn, "w", "utf8")
1911 places_fd.write(header)
1912 for fips in sorted( places.keys() ):
1913 places_fd.write("\n\n[%s]" % fips)
1914 for key, value in sorted( places[fips].items() ):
1915 if type(value) is float: value = "%.7f"%value
1916 elif type(value) is tuple:
1918 for element in value:
1919 if type(element) is float: elements.append("%.7f"%element)
1920 else: elements.append( repr(element) )
1921 value = "(%s)"%", ".join(elements)
1922 places_fd.write( "\n%s = %s" % (key, value) )
1924 places_fd.write("\n")
1926 print("done (%s sections)." % count)
1927 message = "Writing %s..." % stations_fn
1928 sys.stdout.write(message)
1931 if os.path.exists(stations_fn):
1932 os.rename(stations_fn, "%s_old"%stations_fn)
1933 stations_fd = codecs.open(stations_fn, "w", "utf-8")
1934 stations_fd.write(header)
1935 for station in sorted( stations.keys() ):
1936 stations_fd.write("\n\n[%s]" % station)
1937 for key, value in sorted( stations[station].items() ):
1938 if type(value) is float: value = "%.7f"%value
1939 elif type(value) is tuple:
1941 for element in value:
1942 if type(element) is float: elements.append("%.7f"%element)
1943 else: elements.append( repr(element) )
1944 value = "(%s)"%", ".join(elements)
1945 if type(value) is bytes:
1946 value = value.decode("utf-8")
1947 stations_fd.write( "\n%s = %s" % (key, value) )
1949 stations_fd.write("\n")
1951 print("done (%s sections)." % count)
1952 message = "Writing %s..." % zctas_fn
1953 sys.stdout.write(message)
1956 if os.path.exists(zctas_fn):
1957 os.rename(zctas_fn, "%s_old"%zctas_fn)
1958 zctas_fd = codecs.open(zctas_fn, "w", "utf8")
1959 zctas_fd.write(header)
1960 for zcta in sorted( zctas.keys() ):
1961 zctas_fd.write("\n\n[%s]" % zcta)
1962 for key, value in sorted( zctas[zcta].items() ):
1963 if type(value) is float: value = "%.7f"%value
1964 elif type(value) is tuple:
1966 for element in value:
1967 if type(element) is float: elements.append("%.7f"%element)
1968 else: elements.append( repr(element) )
1969 value = "(%s)"%", ".join(elements)
1970 zctas_fd.write( "\n%s = %s" % (key, value) )
1972 zctas_fd.write("\n")
1974 print("done (%s sections)." % count)
1975 message = "Writing %s..." % zones_fn
1976 sys.stdout.write(message)
1979 if os.path.exists(zones_fn):
1980 os.rename(zones_fn, "%s_old"%zones_fn)
1981 zones_fd = codecs.open(zones_fn, "w", "utf8")
1982 zones_fd.write(header)
1983 for zone in sorted( zones.keys() ):
1984 zones_fd.write("\n\n[%s]" % zone)
1985 for key, value in sorted( zones[zone].items() ):
1986 if type(value) is float: value = "%.7f"%value
1987 elif type(value) is tuple:
1989 for element in value:
1990 if type(element) is float: elements.append("%.7f"%element)
1991 else: elements.append( repr(element) )
1992 value = "(%s)"%", ".join(elements)
1993 zones_fd.write( "\n%s = %s" % (key, value) )
1995 zones_fd.write("\n")
1997 print("done (%s sections)." % count)
1998 message = "Starting QA check..."
1999 sys.stdout.write(message)
2001 airports = configparser.ConfigParser()
2002 airports.read(airports_fn)
2003 places = configparser.ConfigParser()
2004 places.read(places_fn)
2005 stations = configparser.ConfigParser()
2006 stations.read(stations_fn)
2007 zctas = configparser.ConfigParser()
2008 zctas.read(zctas_fn)
2009 zones = configparser.ConfigParser()
2010 zones.read(zones_fn)
2012 places_nocentroid = 0
2013 places_nodescription = 0
2014 for place in sorted( places.sections() ):
2015 if not places.has_option(place, "centroid"):
2016 qalog.append("%s: no centroid\n" % place)
2017 places_nocentroid += 1
2018 if not places.has_option(place, "description"):
2019 qalog.append("%s: no description\n" % place)
2020 places_nodescription += 1
2021 stations_nodescription = 0
2022 stations_nolocation = 0
2023 stations_nometar = 0
2024 for station in sorted( stations.sections() ):
2025 if not stations.has_option(station, "description"):
2026 qalog.append("%s: no description\n" % station)
2027 stations_nodescription += 1
2028 if not stations.has_option(station, "location"):
2029 qalog.append("%s: no location\n" % station)
2030 stations_nolocation += 1
2031 if not stations.has_option(station, "metar"):
2032 qalog.append("%s: no metar\n" % station)
2033 stations_nometar += 1
2034 airports_badstation = 0
2035 airports_nostation = 0
2036 for airport in sorted( airports.sections() ):
2037 if not airports.has_option(airport, "station"):
2038 qalog.append("%s: no station\n" % airport)
2039 airports_nostation += 1
2041 station = airports.get(airport, "station")
2042 if station not in stations.sections():
2043 qalog.append( "%s: bad station %s\n" % (airport, station) )
2044 airports_badstation += 1
2045 zctas_nocentroid = 0
2046 for zcta in sorted( zctas.sections() ):
2047 if not zctas.has_option(zcta, "centroid"):
2048 qalog.append("%s: no centroid\n" % zcta)
2049 zctas_nocentroid += 1
2050 zones_nocentroid = 0
2051 zones_nodescription = 0
2052 zones_noforecast = 0
2053 zones_overlapping = 0
2055 for zone in zones.sections():
2056 if zones.has_option(zone, "centroid"):
2058 "centroid": eval( zones.get(zone, "centroid") )
2060 for zone in sorted( zones.sections() ):
2061 if zones.has_option(zone, "centroid"):
2062 zonetable_local = zonetable.copy()
2063 del( zonetable_local[zone] )
2064 centroid = eval( zones.get(zone, "centroid") )
2066 nearest = closest(centroid, zonetable_local, "centroid", 0.1)
2067 if nearest[1]*radian_to_km < 1:
2068 qalog.append( "%s: within one km of %s\n" % (
2072 zones_overlapping += 1
2074 qalog.append("%s: no centroid\n" % zone)
2075 zones_nocentroid += 1
2076 if not zones.has_option(zone, "description"):
2077 qalog.append("%s: no description\n" % zone)
2078 zones_nodescription += 1
2079 if not zones.has_option(zone, "zone_forecast"):
2080 qalog.append("%s: no forecast\n" % zone)
2081 zones_noforecast += 1
2082 if os.path.exists(qalog_fn):
2083 os.rename(qalog_fn, "%s_old"%qalog_fn)
2084 qalog_fd = codecs.open(qalog_fn, "w", "utf8")
2087 '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
2088 '# use, copy, modify, and distribute this software is granted under terms\n'
2089 '# provided in the LICENSE file distributed with this software.\n\n'
2090 % time.gmtime().tm_year)
2091 qalog_fd.writelines(qalog)
2094 print("issues found (see %s for details):"%qalog_fn)
2095 if airports_badstation:
2096 print(" %s airports with invalid station"%airports_badstation)
2097 if airports_nostation:
2098 print(" %s airports with no station"%airports_nostation)
2099 if places_nocentroid:
2100 print(" %s places with no centroid"%places_nocentroid)
2101 if places_nodescription:
2102 print(" %s places with no description"%places_nodescription)
2103 if stations_nodescription:
2104 print(" %s stations with no description"%stations_nodescription)
2105 if stations_nolocation:
2106 print(" %s stations with no location"%stations_nolocation)
2107 if stations_nometar:
2108 print(" %s stations with no METAR"%stations_nometar)
2109 if zctas_nocentroid:
2110 print(" %s ZCTAs with no centroid"%zctas_nocentroid)
2111 if zones_nocentroid:
2112 print(" %s zones with no centroid"%zones_nocentroid)
2113 if zones_nodescription:
2114 print(" %s zones with no description"%zones_nodescription)
2115 if zones_noforecast:
2116 print(" %s zones with no forecast"%zones_noforecast)
2117 if zones_overlapping:
2118 print(" %s zones within one km of another"%zones_overlapping)
2119 else: print("no issues found.")
2120 print("Indexing complete!")