1 """Contains various object definitions needed by the weather utility."""
3 weather_copyright = """\
4 # Copyright (c) 2006-2020 Jeremy Stanley <fungi@yuggoth.org>. Permission to
5 # use, copy, modify, and distribute this software is granted under terms
6 # provided in the LICENSE file distributed with this software.
9 weather_version = "2.4"
11 radian_to_km = 6372.795484
12 radian_to_mi = 3959.871528
14 def pyversion(ref=None):
15 """Determine the Python version and optionally compare to a reference."""
17 ver = platform.python_version()
20 int(x) for x in ver.split(".")[:2]
22 int(x) for x in ref.split(".")[:2]
27 """An object to contain selection data."""
29 """Store the config, options and arguments."""
30 self.config = get_config()
31 self.options, self.arguments = get_options(self.config)
32 if self.get_bool("cache") and self.get_bool("cache_search") \
33 and not self.get_bool("longlist"):
34 integrate_search_cache(
39 if not self.arguments:
40 if "id" in self.options.__dict__ \
41 and self.options.__dict__["id"]:
42 self.arguments.append( self.options.__dict__["id"] )
43 del( self.options.__dict__["id"] )
45 message = "WARNING: the --id option is deprecated and will eventually be removed\n"
46 sys.stderr.write(message)
47 elif "city" in self.options.__dict__ \
48 and self.options.__dict__["city"] \
49 and "st" in self.options.__dict__ \
50 and self.options.__dict__["st"]:
51 self.arguments.append(
53 self.options.__dict__["city"],
54 self.options.__dict__["st"]
57 del( self.options.__dict__["city"] )
58 del( self.options.__dict__["st"] )
60 message = "WARNING: the --city/--st options are deprecated and will eventually be removed\n"
61 sys.stderr.write(message)
62 def get(self, option, argument=None):
63 """Retrieve data from the config or options."""
65 if self.config.has_section(argument) and (
66 self.config.has_option(argument, "city") \
67 or self.config.has_option(argument, "id") \
68 or self.config.has_option(argument, "st")
70 self.config.remove_section(argument)
72 message = "WARNING: the city/id/st options are now unsupported in aliases\n"
73 sys.stderr.write(message)
74 if not self.config.has_section(argument):
77 path=self.get("setpath"),
78 info=self.get("info"),
80 self.get("cache") and self.get("cache_search")
82 cachedir=self.get("cachedir"),
83 quiet=self.get_bool("quiet")
85 self.config.add_section(argument)
86 for item in guessed.items():
87 self.config.set(argument, *item)
88 if self.config.has_option(argument, option):
89 return self.config.get(argument, option)
90 if option in self.options.__dict__:
91 return self.options.__dict__[option]
94 message = "%s error: no URI defined for %s\n" % (
95 os.path.basename( sys.argv[0] ),
98 sys.stderr.write(message)
100 def get_bool(self, option, argument=None):
101 """Get data and coerce to a boolean if necessary."""
102 return bool(self.get(option, argument))
103 def getint(self, option, argument=None):
104 """Get data and coerce to an integer if necessary."""
105 value = self.get(option, argument)
106 if value: return int(value)
110 """Average a list of coordinates."""
117 return (x/count, y/count)
119 def filter_units(line, units="imperial"):
120 """Filter or convert units in a line of text between US/UK and metric."""
122 # filter lines with both pressures in the form of "X inches (Y hPa)" or
125 "(.* )(\d*(\.\d+)? (inches|in\. Hg)) \((\d*(\.\d+)? hPa)\)(.*)",
129 preamble, in_hg, i_fr, i_un, hpa, h_fr, trailer = dual_p.groups()
130 if units == "imperial": line = preamble + in_hg + trailer
131 elif units == "metric": line = preamble + hpa + trailer
132 # filter lines with both temperatures in the form of "X F (Y C)"
134 "(.* )(-?\d*(\.\d+)? F) \((-?\d*(\.\d+)? C)\)(.*)",
138 preamble, fahrenheit, f_fr, celsius, c_fr, trailer = dual_t.groups()
139 if units == "imperial": line = preamble + fahrenheit + trailer
140 elif units == "metric": line = preamble + celsius + trailer
141 # if metric is desired, convert distances in the form of "X mile(s)" to
143 if units == "metric":
144 imperial_d = re.match(
145 "(.* )(\d+)( mile\(s\))(.*)",
149 preamble, mi, m_u, trailer = imperial_d.groups()
150 line = preamble + str(int(round(int(mi)*1.609344))) \
151 + " kilometer(s)" + trailer
152 # filter speeds in the form of "X MPH (Y KT)" to just "X MPH"; if metric is
153 # desired, convert to "Z KPH"
154 imperial_s = re.match(
155 "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
159 preamble, mph, m_u, kt, trailer = imperial_s.groups()
160 if units == "imperial": line = preamble + mph + m_u + trailer
161 elif units == "metric":
162 line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
164 imperial_s = re.match(
165 "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
169 preamble, mph, m_u, kt, trailer = imperial_s.groups()
170 if units == "imperial": line = preamble + mph + m_u + trailer
171 elif units == "metric":
172 line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
174 # if imperial is desired, qualify given forcast temperatures like "X F"; if
175 # metric is desired, convert to "Y C"
176 imperial_t = re.match(
177 "(.* )(High |high |Low |low )(\d+)(\.|,)(.*)",
181 preamble, parameter, fahrenheit, sep, trailer = imperial_t.groups()
182 if units == "imperial":
183 line = preamble + parameter + fahrenheit + " F" + sep + trailer
184 elif units == "metric":
185 line = preamble + parameter \
186 + str(int(round((int(fahrenheit)-32)*5/9))) + " C" + sep \
188 # hand off the resulting line
198 """Return a string containing the results of a URI GET."""
200 import urllib, urllib.error, urllib.request
201 URLError = urllib.error.URLError
202 urlopen = urllib.request.urlopen
204 import urllib2 as urllib
205 URLError = urllib.URLError
206 urlopen = urllib.urlopen
209 dcachedir = os.path.join( os.path.expanduser(cachedir), "datacache" )
210 if not os.path.exists(dcachedir):
211 try: os.makedirs(dcachedir)
212 except (IOError, OSError): pass
213 dcache_fn = os.path.join(
215 uri.split(":",1)[1].replace("/","_")
218 if cache_data and os.access(dcache_fn, os.R_OK) \
219 and now-cacheage < os.stat(dcache_fn).st_mtime <= now:
220 dcache_fd = open(dcache_fn)
221 data = dcache_fd.read()
225 data = urlopen(uri).read().decode("utf-8")
227 if ignore_fail: return ""
229 import os, sys, traceback
230 message = "%s error: failed to retrieve\n %s\n %s" % (
231 os.path.basename( sys.argv[0] ),
233 traceback.format_exception_only(
238 sys.stderr.write(message)
240 # Some data sources are HTML with the plain text wrapped in pre tags
242 data = data[data.find("<pre>")+5:data.find("</pre>")]
246 dcache_fd = codecs.open(dcache_fn, "w", "utf-8")
247 dcache_fd.write(data)
249 except (IOError, OSError): pass
263 """Return a summarized METAR for the specified station."""
266 message = "%s error: METAR URI required for conditions\n" % \
267 os.path.basename( sys.argv[0] )
268 sys.stderr.write(message)
272 cache_data=cache_data,
276 if pyversion("3") and type(metar) is bytes: metar = metar.decode("utf-8")
277 if verbose: return metar
280 lines = metar.split("\n")
283 "relative_humidity," \
284 + "precipitation_last_hour," \
285 + "sky conditions," \
291 headerlist = headers.lower().replace("_"," ").split(",")
294 title = "Current conditions at %s"
295 place = lines[0].split(", ")
297 place = "%s, %s" % ( place[0].title(), place[1] )
298 else: place = "<UNKNOWN>"
299 output.append(title%place)
300 output.append("Last updated " + lines[1])
302 for header in headerlist:
304 if line.lower().startswith(header + ":"):
305 if re.match(r".*:\d+$", line): line = line[:line.rfind(":")]
306 if imperial: line = filter_units(line, units="imperial")
307 elif metric: line = filter_units(line, units="metric")
308 if quiet: output.append(line)
309 else: output.append(" " + line)
313 "(no conditions matched your header list, try with --verbose)"
315 return "\n".join(output)
325 """Return alert notice for the specified URI."""
328 message = "%s error: Alert URI required for alerts\n" % \
329 os.path.basename( sys.argv[0] )
330 sys.stderr.write(message)
335 cache_data=cache_data,
339 if pyversion("3") and type(alert) is bytes: alert = alert.decode("utf-8")
341 if verbose: return alert
343 if alert.find("\nNATIONAL WEATHER SERVICE") == -1:
347 lines = alert.split("\n")
349 valid_time = time.strftime("%Y%m%d%H%M")
352 if line.startswith("Expires:") \
353 and "Expires:" + valid_time > line:
355 if muted and line.startswith("NATIONAL WEATHER SERVICE"):
362 if line and not muted:
363 if quiet: output.append(line)
364 else: output.append(" " + line)
365 return "\n".join(output)
367 def get_options(config):
368 """Parse the options passed on the command line."""
370 # for optparse's builtin -h/--help option
372 "usage: %prog [options] [alias1|search1 [alias2|search2 [...]]]"
374 # for optparse's builtin --version option
375 verstring = "%prog " + weather_version
379 option_parser = optparse.OptionParser(usage=usage, version=verstring)
380 # separate options object from list of arguments and return both
382 # the -a/--alert option
383 if config.has_option("default", "alert"):
384 default_alert = bool(config.get("default", "alert"))
385 else: default_alert = False
386 option_parser.add_option("-a", "--alert",
389 default=default_alert,
390 help="include local alert notices")
392 # the --atypes option
393 if config.has_option("default", "atypes"):
394 default_atypes = config.get("default", "atypes")
397 "coastal_flood_statement," \
398 + "flash_flood_statement," \
399 + "flash_flood_warning," \
400 + "flash_flood_watch," \
401 + "flood_statement," \
403 + "marine_weather_statement," \
404 + "river_statement," \
405 + "severe_thunderstorm_warning," \
406 + "severe_weather_statement," \
407 + "short_term_forecast," \
408 + "special_marine_warning," \
409 + "special_weather_statement," \
410 + "tornado_warning," \
411 + "urgent_weather_message"
412 option_parser.add_option("--atypes",
414 default=default_atypes,
415 help="list of alert notification types to display")
417 # the --build-sets option
418 option_parser.add_option("--build-sets",
422 help="(re)build location correlation sets")
424 # the --cacheage option
425 if config.has_option("default", "cacheage"):
426 default_cacheage = config.getint("default", "cacheage")
427 else: default_cacheage = 900
428 option_parser.add_option("--cacheage",
430 default=default_cacheage,
431 help="duration in seconds to refresh cached data")
433 # the --cachedir option
434 if config.has_option("default", "cachedir"):
435 default_cachedir = config.get("default", "cachedir")
436 else: default_cachedir = "~/.weather"
437 option_parser.add_option("--cachedir",
439 default=default_cachedir,
440 help="directory for storing cached searches and data")
442 # the -f/--forecast option
443 if config.has_option("default", "forecast"):
444 default_forecast = bool(config.get("default", "forecast"))
445 else: default_forecast = False
446 option_parser.add_option("-f", "--forecast",
449 default=default_forecast,
450 help="include a local forecast")
452 # the --headers option
453 if config.has_option("default", "headers"):
454 default_headers = config.get("default", "headers")
458 + "relative_humidity," \
463 + "sky_conditions," \
464 + "precipitation_last_hour"
465 option_parser.add_option("--headers",
467 default=default_headers,
468 help="list of conditions headers to display")
470 # the --imperial option
471 if config.has_option("default", "imperial"):
472 default_imperial = bool(config.get("default", "imperial"))
473 else: default_imperial = False
474 option_parser.add_option("--imperial",
477 default=default_imperial,
478 help="filter/convert conditions for US/UK units")
481 option_parser.add_option("--info",
485 help="output detailed information for your search")
487 # the -l/--list option
488 option_parser.add_option("-l", "--list",
492 help="list all configured aliases and cached searches")
494 # the --longlist option
495 option_parser.add_option("--longlist",
499 help="display details of all configured aliases")
501 # the -m/--metric option
502 if config.has_option("default", "metric"):
503 default_metric = bool(config.get("default", "metric"))
504 else: default_metric = False
505 option_parser.add_option("-m", "--metric",
508 default=default_metric,
509 help="filter/convert conditions for metric units")
511 # the -n/--no-conditions option
512 if config.has_option("default", "conditions"):
513 default_conditions = bool(config.get("default", "conditions"))
514 else: default_conditions = True
515 option_parser.add_option("-n", "--no-conditions",
517 action="store_false",
518 default=default_conditions,
519 help="disable output of current conditions")
521 # the --no-cache option
522 if config.has_option("default", "cache"):
523 default_cache = bool(config.get("default", "cache"))
524 else: default_cache = True
525 option_parser.add_option("--no-cache",
527 action="store_false",
529 help="disable all caching (searches and data)")
531 # the --no-cache-data option
532 if config.has_option("default", "cache_data"):
533 default_cache_data = bool(config.get("default", "cache_data"))
534 else: default_cache_data = True
535 option_parser.add_option("--no-cache-data",
537 action="store_false",
539 help="disable retrieved data caching")
541 # the --no-cache-search option
542 if config.has_option("default", "cache_search"):
543 default_cache_search = bool(config.get("default", "cache_search"))
544 else: default_cache_search = True
545 option_parser.add_option("--no-cache-search",
547 action="store_false",
549 help="disable search result caching")
551 # the -q/--quiet option
552 if config.has_option("default", "quiet"):
553 default_quiet = bool(config.get("default", "quiet"))
554 else: default_quiet = False
555 option_parser.add_option("-q", "--quiet",
558 default=default_quiet,
559 help="skip preambles and don't indent")
561 # the --setpath option
562 if config.has_option("default", "setpath"):
563 default_setpath = config.get("default", "setpath")
564 else: default_setpath = ".:~/.weather"
565 option_parser.add_option("--setpath",
567 default=default_setpath,
568 help="directory search path for correlation sets")
570 # the -v/--verbose option
571 if config.has_option("default", "verbose"):
572 default_verbose = bool(config.get("default", "verbose"))
573 else: default_verbose = False
574 option_parser.add_option("-v", "--verbose",
577 default=default_verbose,
578 help="show full decoded feeds")
581 if config.has_option("default", "city"):
582 default_city = config.get("default", "city")
583 else: default_city = ""
584 option_parser.add_option("-c", "--city",
586 default=default_city,
587 help=optparse.SUPPRESS_HELP)
588 if config.has_option("default", "id"):
589 default_id = config.get("default", "id")
590 else: default_id = ""
591 option_parser.add_option("-i", "--id",
594 help=optparse.SUPPRESS_HELP)
595 if config.has_option("default", "st"):
596 default_st = config.get("default", "st")
597 else: default_st = ""
598 option_parser.add_option("-s", "--st",
601 help=optparse.SUPPRESS_HELP)
603 options, arguments = option_parser.parse_args()
604 return options, arguments
607 """Parse the aliases and configuration."""
608 if pyversion("3"): import configparser
609 else: import ConfigParser as configparser
610 config = configparser.ConfigParser()
614 "/etc/weather/weatherrc",
615 os.path.expanduser("~/.weather/weatherrc"),
616 os.path.expanduser("~/.weatherrc"),
619 for rcfile in rcfiles:
620 if os.access(rcfile, os.R_OK): config.read(rcfile)
621 for section in config.sections():
622 if section != section.lower():
623 if config.has_section(section.lower()):
624 config.remove_section(section.lower())
625 config.add_section(section.lower())
626 for option,value in config.items(section):
627 config.set(section.lower(), option, value)
630 def integrate_search_cache(config, cachedir, setpath):
631 """Add cached search results into the configuration."""
632 if pyversion("3"): import configparser
633 else: import ConfigParser as configparser
635 scache_fn = os.path.join( os.path.expanduser(cachedir), "searches" )
636 if not os.access(scache_fn, os.R_OK): return config
637 scache_fd = open(scache_fn)
638 created = float( scache_fd.readline().split(":")[1].strip().split()[0] )
641 datafiles = data_index(setpath)
643 data_freshness = sorted(
644 [ x[1] for x in datafiles.values() ],
647 else: data_freshness = now
648 if created < data_freshness <= now:
651 print( "[clearing outdated %s]" % scache_fn )
652 except (IOError, OSError):
655 scache = configparser.ConfigParser()
656 scache.read(scache_fn)
657 for section in scache.sections():
658 if not config.has_section(section):
659 config.add_section(section)
660 for option,value in scache.items(section):
661 config.set(section, option, value)
664 def list_aliases(config, detail=False):
665 """Return a formatted list of aliases defined in the config."""
667 output = "\n# configured alias details..."
668 for section in sorted(config.sections()):
669 output += "\n\n[%s]" % section
670 for item in sorted(config.items(section)):
671 output += "\n%s = %s" % item
674 output = "configured aliases and cached searches..."
675 for section in sorted(config.sections()):
676 if config.has_option(section, "description"):
677 description = config.get(section, "description")
678 else: description = "(no description provided)"
679 output += "\n %s: %s" % (section, description)
682 def data_index(path):
685 for filename in ("airports", "places", "stations", "zctas", "zones"):
686 for dirname in path.split(":"):
687 for extension in ("", ".gz", ".txt"):
688 candidate = os.path.expanduser(
689 os.path.join( dirname, "".join( (filename, extension) ) )
691 if os.path.exists(candidate):
692 datafiles[filename] = (
694 os.stat(candidate).st_mtime
697 if filename in datafiles:
711 """Find URIs using airport, gecos, placename, station, ZCTA/ZIP, zone."""
712 import codecs, datetime, time, os, re, sys
713 if pyversion("3"): import configparser
714 else: import ConfigParser as configparser
715 datafiles = data_index(path)
716 if re.match("[A-Za-z]{3}$", expression): searchtype = "airport"
717 elif re.match("[A-Za-z0-9]{4}$", expression): searchtype = "station"
718 elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", expression): searchtype = "zone"
719 elif re.match("[0-9]{5}$", expression): searchtype = "ZCTA"
721 r"[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?, *[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?$",
724 searchtype = "coordinates"
725 elif re.match(r"(FIPS|fips)\d+$", expression): searchtype = "FIPS"
729 if cache_search: action = "caching"
730 else: action = "using"
739 (0.995, "excellent"),
742 if not quiet: print("Searching via %s..."%searchtype)
743 stations = configparser.ConfigParser()
744 dataname = "stations"
745 if dataname in datafiles:
746 datafile = datafiles[dataname][0]
747 if datafile.endswith(".gz"):
750 stations.read_string(
751 gzip.open(datafile).read().decode("utf-8") )
752 else: stations.readfp( gzip.open(datafile) )
754 stations.read(datafile)
756 message = "%s error: can't find \"%s\" data file\n" % (
757 os.path.basename( sys.argv[0] ),
760 sys.stderr.write(message)
762 zones = configparser.ConfigParser()
764 if dataname in datafiles:
765 datafile = datafiles[dataname][0]
766 if datafile.endswith(".gz"):
769 zones.read_string( gzip.open(datafile).read().decode("utf-8") )
770 else: zones.readfp( gzip.open(datafile) )
774 message = "%s error: can't find \"%s\" data file\n" % (
775 os.path.basename( sys.argv[0] ),
778 sys.stderr.write(message)
786 if searchtype == "airport":
787 expression = expression.lower()
788 airports = configparser.ConfigParser()
789 dataname = "airports"
790 if dataname in datafiles:
791 datafile = datafiles[dataname][0]
792 if datafile.endswith(".gz"):
795 airports.read_string(
796 gzip.open(datafile).read().decode("utf-8") )
797 else: airports.readfp( gzip.open(datafile) )
799 airports.read(datafile)
801 message = "%s error: can't find \"%s\" data file\n" % (
802 os.path.basename( sys.argv[0] ),
805 sys.stderr.write(message)
807 if airports.has_section(expression) \
808 and airports.has_option(expression, "station"):
809 search = (expression, "IATA/FAA airport code %s" % expression)
810 station = ( airports.get(expression, "station"), 0 )
811 if stations.has_option(station[0], "zone"):
812 zone = eval( stations.get(station[0], "zone") )
814 if not ( info or quiet ) \
815 and stations.has_option( station[0], "description" ):
819 stations.get(station[0], "description")
823 message = "No IATA/FAA airport code \"%s\" in the %s file.\n" % (
825 datafiles["airports"][0]
827 sys.stderr.write(message)
829 elif searchtype == "station":
830 expression = expression.lower()
831 if stations.has_section(expression):
832 station = (expression, 0)
834 search = (expression, "ICAO station code %s" % expression)
835 if stations.has_option(expression, "zone"):
836 zone = eval( stations.get(expression, "zone") )
838 if not ( info or quiet ) \
839 and stations.has_option(expression, "description"):
843 stations.get(expression, "description")
847 message = "No ICAO weather station \"%s\" in the %s file.\n" % (
849 datafiles["stations"][0]
851 sys.stderr.write(message)
853 elif searchtype == "zone":
854 expression = expression.lower()
855 if zones.has_section(expression) \
856 and zones.has_option(expression, "station"):
857 zone = (expression, 0)
858 station = eval( zones.get(expression, "station") )
860 search = (expression, "NWS/NOAA weather zone %s" % expression)
861 if not ( info or quiet ) \
862 and zones.has_option(expression, "description"):
866 zones.get(expression, "description")
870 message = "No usable NWS weather zone \"%s\" in the %s file.\n" % (
872 datafiles["zones"][0]
874 sys.stderr.write(message)
876 elif searchtype == "ZCTA":
877 zctas = configparser.ConfigParser()
879 if dataname in datafiles:
880 datafile = datafiles[dataname][0]
881 if datafile.endswith(".gz"):
885 gzip.open(datafile).read().decode("utf-8") )
886 else: zctas.readfp( gzip.open(datafile) )
890 message = "%s error: can't find \"%s\" data file\n" % (
891 os.path.basename( sys.argv[0] ),
894 sys.stderr.write(message)
897 if zctas.has_section(expression) \
898 and zctas.has_option(expression, "station"):
899 station = eval( zctas.get(expression, "station") )
900 search = (expression, "Census ZCTA (ZIP code) %s" % expression)
901 if zctas.has_option(expression, "zone"):
902 zone = eval( zctas.get(expression, "zone") )
904 message = "No census ZCTA (ZIP code) \"%s\" in the %s file.\n" % (
906 datafiles["zctas"][0]
908 sys.stderr.write(message)
910 elif searchtype == "coordinates":
911 search = (expression, "Geographic coordinates %s" % expression)
913 for station in stations.sections():
914 if stations.has_option(station, "location"):
915 stationtable[station] = {
916 "location": eval( stations.get(station, "location") )
918 station = closest( gecos(expression), stationtable, "location", 0.1 )
920 message = "No ICAO weather station found near %s.\n" % expression
921 sys.stderr.write(message)
924 for zone in zones.sections():
925 if zones.has_option(zone, "centroid"):
927 "centroid": eval( zones.get(zone, "centroid") )
929 zone = closest( gecos(expression), zonetable, "centroid", 0.1 )
931 message = "No NWS weather zone near %s; forecasts unavailable.\n" \
933 sys.stderr.write(message)
934 elif searchtype in ("FIPS", "name"):
935 places = configparser.ConfigParser()
937 if dataname in datafiles:
938 datafile = datafiles[dataname][0]
939 if datafile.endswith(".gz"):
943 gzip.open(datafile).read().decode("utf-8") )
944 else: places.readfp( gzip.open(datafile) )
946 places.read(datafile)
948 message = "%s error: can't find \"%s\" data file\n" % (
949 os.path.basename( sys.argv[0] ),
952 sys.stderr.write(message)
955 place = expression.lower()
956 if places.has_section(place) and places.has_option(place, "station"):
957 station = eval( places.get(place, "station") )
958 search = (expression, "Census Place %s" % expression)
959 if places.has_option(place, "description"):
962 search[1] + ", %s" % places.get(place, "description")
964 if places.has_option(place, "zone"):
965 zone = eval( places.get(place, "zone") )
966 if not ( info or quiet ) \
967 and places.has_option(place, "description"):
971 places.get(place, "description")
975 for place in places.sections():
976 if places.has_option(place, "description") \
977 and places.has_option(place, "station") \
980 places.get(place, "description"),
983 possibilities.append(place)
984 for place in stations.sections():
985 if stations.has_option(place, "description") \
988 stations.get(place, "description"),
991 possibilities.append(place)
992 for place in zones.sections():
993 if zones.has_option(place, "description") \
994 and zones.has_option(place, "station") \
997 zones.get(place, "description"),
1000 possibilities.append(place)
1001 if len(possibilities) == 1:
1002 place = possibilities[0]
1003 if places.has_section(place):
1004 station = eval( places.get(place, "station") )
1005 description = places.get(place, "description")
1006 if places.has_option(place, "zone"):
1007 zone = eval( places.get(place, "zone" ) )
1008 search = ( expression, "%s: %s" % (place, description) )
1009 elif stations.has_section(place):
1010 station = (place, 0.0)
1011 description = stations.get(place, "description")
1012 if stations.has_option(place, "zone"):
1013 zone = eval( stations.get(place, "zone" ) )
1014 search = ( expression, "ICAO station code %s" % place )
1015 elif zones.has_section(place):
1016 station = eval( zones.get(place, "station") )
1017 description = zones.get(place, "description")
1019 search = ( expression, "NWS/NOAA weather zone %s" % place )
1020 if not ( info or quiet ):
1021 print( "[%s result %s]" % (action, description) )
1022 if not possibilities and not station[0]:
1023 message = "No FIPS code/census area match in the %s file.\n" % (
1024 datafiles["places"][0]
1026 sys.stderr.write(message)
1029 uris["metar"] = stations.get( station[0], "metar" )
1031 for key,value in zones.items( zone[0] ):
1032 if key not in ("centroid", "description", "station"):
1035 count = len(possibilities)
1036 if count <= max_results:
1037 print( "Your search is ambiguous, returning %s matches:" % count )
1038 for place in sorted(possibilities):
1039 if places.has_section(place):
1043 places.get(place, "description")
1046 elif stations.has_section(place):
1050 stations.get(place, "description")
1053 elif zones.has_section(place):
1057 zones.get(place, "description")
1062 "Your search is too ambiguous, returning %s matches." % count
1069 for section in dataset.sections():
1070 if dataset.has_option(section, "station"):
1072 eval( dataset.get(section, "station") )[1]
1074 if dataset.has_option(section, "zone"):
1075 zonelist.append( eval( dataset.get(section, "zone") )[1] )
1078 scount = len(stationlist)
1079 zcount = len(zonelist)
1082 for score in scores:
1084 sranks.append( stationlist[ int( (1-score[0]) * scount ) ] )
1086 zranks.append( zonelist[ int( (1-score[0]) * zcount ) ] )
1087 description = search[1]
1088 uris["description"] = description
1090 "%s\n%s" % ( description, "-" * len(description) )
1095 stations.get( station[0], "description" )
1098 km = radian_to_km*station[1]
1099 mi = radian_to_mi*station[1]
1100 if sranks and not description.startswith("ICAO station code "):
1101 for index in range(0, len(scores)):
1102 if station[1] >= sranks[index]:
1103 score = scores[index][1]
1106 " (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1108 elif searchtype == "coordinates":
1109 print( " (%.3gkm, %.3gmi)" % (km, mi) )
1112 "%s: %s" % ( zone[0], zones.get( zone[0], "description" ) )
1114 km = radian_to_km*zone[1]
1115 mi = radian_to_mi*zone[1]
1116 if zranks and not description.startswith("NWS/NOAA weather zone "):
1117 for index in range(0, len(scores)):
1118 if zone[1] >= zranks[index]:
1119 score = scores[index][1]
1122 " (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1124 elif searchtype == "coordinates" and zone[0]:
1125 print( " (%.3gkm, %.3gmi)" % (km, mi) )
1128 nowstamp = "%s (%s)" % (
1130 datetime.datetime.isoformat(
1131 datetime.datetime.fromtimestamp(now),
1135 search_cache = ["\n"]
1136 search_cache.append( "[%s]\n" % search[0] )
1137 search_cache.append( "cached = %s\n" % nowstamp )
1138 for uriname in sorted(uris.keys()):
1139 search_cache.append( "%s = %s\n" % ( uriname, uris[uriname] ) )
1140 real_cachedir = os.path.expanduser(cachedir)
1141 if not os.path.exists(real_cachedir):
1142 try: os.makedirs(real_cachedir)
1143 except (IOError, OSError): pass
1144 scache_fn = os.path.join(real_cachedir, "searches")
1145 if not os.path.exists(scache_fn):
1147 [ x[1] for x in datafiles.values() ],
1150 thenstamp = "%s (%s)" % (
1152 datetime.datetime.isoformat(
1153 datetime.datetime.fromtimestamp(then),
1157 search_cache.insert(
1159 "# based on data files from: %s\n" % thenstamp
1162 scache_existing = configparser.ConfigParser()
1163 scache_existing.read(scache_fn)
1164 if not scache_existing.has_section(search[0]):
1165 scache_fd = codecs.open(scache_fn, "a", "utf-8")
1166 scache_fd.writelines(search_cache)
1168 except (IOError, OSError): pass
1172 def closest(position, nodes, fieldname, angle=None):
1174 if not angle: angle = 2*math.pi
1177 if fieldname in nodes[name]:
1178 node = nodes[name][fieldname]
1179 if node and abs( position[0]-node[0] ) < angle:
1180 if abs( position[1]-node[1] ) < angle \
1181 or abs( abs( position[1]-node[1] ) - 2*math.pi ) < angle:
1182 if position == node:
1186 candidate = math.acos(
1187 math.sin( position[0] ) * math.sin( node[0] ) \
1188 + math.cos( position[0] ) \
1189 * math.cos( node[0] ) \
1190 * math.cos( position[1] - node[1] )
1192 if candidate < angle:
1195 if match: match = str(match)
1196 return (match, angle)
1198 def gecos(formatted):
1200 coordinates = formatted.split(",")
1201 for coordinate in range(0, 2):
1202 degrees, foo, minutes, bar, seconds, hemisphere = re.match(
1203 r"([\+-]?\d+\.?\d*)(-(\d+))?(-(\d+))?([ensw]?)$",
1204 coordinates[coordinate].strip().lower()
1206 value = float(degrees)
1207 if minutes: value += float(minutes)/60
1208 if seconds: value += float(seconds)/3600
1209 if hemisphere and hemisphere in "sw": value *= -1
1210 coordinates[coordinate] = math.radians(value)
1211 return tuple(coordinates)
1214 import codecs, csv, datetime, hashlib, os, re, sys, tarfile, time, zipfile
1215 if pyversion("3"): import configparser
1216 else: import ConfigParser as configparser
1217 for filename in os.listdir("."):
1218 if re.match("[0-9]{4}_Gaz_counties_national.zip$", filename):
1219 gcounties_an = filename
1220 gcounties_fn = filename[:-4] + ".txt"
1221 elif re.match("[0-9]{4}_Gaz_cousubs_national.zip$", filename):
1222 gcousubs_an = filename
1223 gcousubs_fn = filename[:-4] + ".txt"
1224 elif re.match("[0-9]{4}_Gaz_place_national.zip$", filename):
1225 gplace_an = filename
1226 gplace_fn = filename[:-4] + ".txt"
1227 elif re.match("[0-9]{4}_Gaz_zcta_national.zip$", filename):
1229 gzcta_fn = filename[:-4] + ".txt"
1230 elif re.match("bp[0-9]{2}[a-z]{2}[0-9]{2}.dbx$", filename):
1231 cpfzcf_fn = filename
1232 nsdcccc_fn = "nsd_cccc.txt"
1233 ourairports_fn = "airports.csv"
1234 overrides_fn = "overrides.conf"
1235 overrideslog_fn = "overrides.log"
1239 airports_fn = "airports"
1240 places_fn = "places"
1241 stations_fn = "stations"
1246 # generated by %s on %s from these public domain sources:
1248 # https://www.census.gov/geographies/reference-files/time-series/geo/gazetteer-files.html
1254 # https://www.weather.gov/gis/ZoneCounty/
1257 # https://tgftp.nws.noaa.gov/data/
1260 # https://ourairports.com/data/
1263 # ...and these manually-generated or hand-compiled adjustments:
1269 os.path.basename( sys.argv[0] ),
1270 datetime.date.isoformat(
1271 datetime.datetime.utcfromtimestamp( int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) )
1273 hashlib.md5( open(gcounties_an, "rb").read() ).hexdigest(),
1274 datetime.date.isoformat(
1275 datetime.datetime.utcfromtimestamp( os.path.getmtime(gcounties_an) )
1278 hashlib.md5( open(gcousubs_an, "rb").read() ).hexdigest(),
1279 datetime.date.isoformat(
1280 datetime.datetime.utcfromtimestamp( os.path.getmtime(gcousubs_an) )
1283 hashlib.md5( open(gplace_an, "rb").read() ).hexdigest(),
1284 datetime.date.isoformat(
1285 datetime.datetime.utcfromtimestamp( os.path.getmtime(gplace_an) )
1288 hashlib.md5( open(gzcta_an, "rb").read() ).hexdigest(),
1289 datetime.date.isoformat(
1290 datetime.datetime.utcfromtimestamp( os.path.getmtime(gzcta_an) )
1293 hashlib.md5( open(cpfzcf_fn, "rb").read() ).hexdigest(),
1294 datetime.date.isoformat(
1295 datetime.datetime.utcfromtimestamp( os.path.getmtime(cpfzcf_fn) )
1298 hashlib.md5( open(nsdcccc_fn, "rb").read() ).hexdigest(),
1299 datetime.date.isoformat(
1300 datetime.datetime.utcfromtimestamp( os.path.getmtime(nsdcccc_fn) )
1303 hashlib.md5( open(ourairports_fn, "rb").read() ).hexdigest(),
1304 datetime.date.isoformat(
1305 datetime.datetime.utcfromtimestamp( os.path.getmtime(ourairports_fn) )
1308 hashlib.md5( open(overrides_fn, "rb").read() ).hexdigest(),
1309 datetime.date.isoformat(
1310 datetime.datetime.utcfromtimestamp( os.path.getmtime(overrides_fn) )
1313 hashlib.md5( open(slist_fn, "rb").read() ).hexdigest(),
1314 datetime.date.isoformat(
1315 datetime.datetime.utcfromtimestamp( os.path.getmtime(slist_fn) )
1318 hashlib.md5( open(zlist_fn, "rb").read() ).hexdigest(),
1319 datetime.date.isoformat(
1320 datetime.datetime.utcfromtimestamp( os.path.getmtime(zlist_fn) )
1329 message = "Reading %s:%s..." % (gcounties_an, gcounties_fn)
1330 sys.stdout.write(message)
1333 gcounties = zipfile.ZipFile(gcounties_an).open(gcounties_fn, "r")
1334 columns = gcounties.readline().decode("utf-8").strip().split("\t")
1335 for line in gcounties:
1336 fields = line.decode("utf-8").strip().split("\t")
1337 f_geoid = fields[ columns.index("GEOID") ].strip()
1338 f_name = fields[ columns.index("NAME") ].strip()
1339 f_usps = fields[ columns.index("USPS") ].strip()
1340 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1341 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1342 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1343 fips = "fips%s" % f_geoid
1344 if fips not in places: places[fips] = {}
1345 places[fips]["centroid"] = gecos(
1346 "%s,%s" % (f_intptlat, f_intptlong)
1348 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1351 print("done (%s lines)." % count)
1352 message = "Reading %s:%s..." % (gcousubs_an, gcousubs_fn)
1353 sys.stdout.write(message)
1356 gcousubs = zipfile.ZipFile(gcousubs_an).open(gcousubs_fn, "r")
1357 columns = gcousubs.readline().decode("utf-8").strip().split("\t")
1358 for line in gcousubs:
1359 fields = line.decode("utf-8").strip().split("\t")
1360 f_geoid = fields[ columns.index("GEOID") ].strip()
1361 f_name = fields[ columns.index("NAME") ].strip()
1362 f_usps = fields[ columns.index("USPS") ].strip()
1363 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1364 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1365 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1366 fips = "fips%s" % f_geoid
1367 if fips not in places: places[fips] = {}
1368 places[fips]["centroid"] = gecos(
1369 "%s,%s" % (f_intptlat, f_intptlong)
1371 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1374 print("done (%s lines)." % count)
1375 message = "Reading %s:%s..." % (gplace_an, gplace_fn)
1376 sys.stdout.write(message)
1379 gplace = zipfile.ZipFile(gplace_an).open(gplace_fn, "r")
1380 columns = gplace.readline().decode("utf-8").strip().split("\t")
1382 fields = line.decode("utf-8").strip().split("\t")
1383 f_geoid = fields[ columns.index("GEOID") ].strip()
1384 f_name = fields[ columns.index("NAME") ].strip()
1385 f_usps = fields[ columns.index("USPS") ].strip()
1386 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1387 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1388 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1389 fips = "fips%s" % f_geoid
1390 if fips not in places: places[fips] = {}
1391 places[fips]["centroid"] = gecos(
1392 "%s,%s" % (f_intptlat, f_intptlong)
1394 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1397 print("done (%s lines)." % count)
1398 message = "Reading %s..." % slist_fn
1399 sys.stdout.write(message)
1402 slist = codecs.open(slist_fn, "rU", "utf-8")
1404 icao = line.split("#")[0].strip()
1407 "metar": "https://tgftp.nws.noaa.gov/data/observations/"\
1408 + "metar/decoded/%s.TXT" % icao.upper()
1412 print("done (%s lines)." % count)
1413 message = "Reading %s..." % nsdcccc_fn
1414 sys.stdout.write(message)
1417 nsdcccc = codecs.open(nsdcccc_fn, "rU", "utf-8")
1418 for line in nsdcccc:
1420 fields = line.split(";")
1421 icao = fields[0].strip().lower()
1422 if icao in stations:
1424 name = " ".join( fields[3].strip().title().split() )
1425 if name: description.append(name)
1426 st = fields[4].strip()
1427 if st: description.append(st)
1428 country = " ".join( fields[5].strip().title().split() )
1429 if country: description.append(country)
1431 stations[icao]["description"] = ", ".join(description)
1432 lat, lon = fields[7:9]
1434 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1435 elif "location" not in stations[icao]:
1436 lat, lon = fields[5:7]
1438 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1441 print("done (%s lines)." % count)
1442 message = "Reading %s..." % ourairports_fn
1443 sys.stdout.write(message)
1446 ourairports = open(ourairports_fn, "rU")
1447 for row in csv.reader(ourairports):
1448 icao = row[12].lower()
1449 if icao in stations:
1450 iata = row[13].lower()
1451 if len(iata) == 3: airports[iata] = { "station": icao }
1452 if "description" not in stations[icao]:
1455 if name: description.append(name)
1456 municipality = row[10]
1457 if municipality: description.append(municipality)
1462 c,r = region.split("-", 1)
1463 if c == country: region = r
1464 description.append(region)
1466 description.append(country)
1468 stations[icao]["description"] = ", ".join(description)
1469 if "location" not in stations[icao]:
1474 stations[icao]["location"] = gecos(
1475 "%s,%s" % (lat, lon)
1479 print("done (%s lines)." % count)
1480 message = "Reading %s..." % zlist_fn
1481 sys.stdout.write(message)
1484 zlist = codecs.open(zlist_fn, "rU", "utf-8")
1486 line = line.split("#")[0].strip()
1491 print("done (%s lines)." % count)
1492 message = "Reading %s..." % cpfzcf_fn
1493 sys.stdout.write(message)
1497 cpfzcf = codecs.open(cpfzcf_fn, "rU", "utf-8")
1499 fields = line.strip().split("|")
1500 if len(fields) == 11 \
1501 and fields[0] and fields[1] and fields[9] and fields[10]:
1502 zone = "z".join( fields[:2] ).lower()
1506 zones[zone]["coastal_flood_statement"] = (
1507 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1508 "flood/coastal/%s/%s.txt" % (state.lower(), zone))
1509 zones[zone]["flash_flood_statement"] = (
1510 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1511 "flash_flood/statement/%s/%s.txt"
1512 % (state.lower(), zone))
1513 zones[zone]["flash_flood_warning"] = (
1514 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1515 "flash_flood/warning/%s/%s.txt"
1516 % (state.lower(), zone))
1517 zones[zone]["flash_flood_watch"] = (
1518 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1519 "flash_flood/watch/%s/%s.txt" % (state.lower(), zone))
1520 zones[zone]["flood_statement"] = (
1521 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1522 "flood/statement/%s/%s.txt" % (state.lower(), zone))
1523 zones[zone]["flood_warning"] = (
1524 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1525 "flood/warning/%s/%s.txt" % (state.lower(), zone))
1526 zones[zone]["severe_thunderstorm_warning"] = (
1527 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1528 "thunderstorm/%s/%s.txt" % (state.lower(), zone))
1529 zones[zone]["severe_weather_statement"] = (
1530 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1531 "severe_weather_stmt/%s/%s.txt"
1532 % (state.lower(), zone))
1533 zones[zone]["short_term_forecast"] = (
1534 "https://tgftp.nws.noaa.gov/data/forecasts/nowcast/"
1535 "%s/%s.txt" % (state.lower(), zone))
1536 zones[zone]["special_weather_statement"] = (
1537 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1538 "special_weather_stmt/%s/%s.txt"
1539 % (state.lower(), zone))
1540 zones[zone]["state_forecast"] = (
1541 "https://tgftp.nws.noaa.gov/data/forecasts/state/"
1542 "%s/%s.txt" % (state.lower(), zone))
1543 zones[zone]["urgent_weather_message"] = (
1544 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1545 "non_precip/%s/%s.txt" % (state.lower(), zone))
1546 zones[zone]["zone_forecast"] = (
1547 "https://tgftp.nws.noaa.gov/data/forecasts/zone/"
1548 "%s/%s.txt" % (state.lower(), zone))
1549 description = fields[3].strip()
1550 fips = "fips%s"%fields[6]
1553 if description.endswith(county):
1554 description += " County"
1556 description += ", %s County" % county
1557 description += ", %s, US" % state
1558 zones[zone]["description"] = description
1559 zones[zone]["centroid"] = gecos( ",".join( fields[9:11] ) )
1560 if fips in places and not zones[zone]["centroid"]:
1561 zones[zone]["centroid"] = places[fips]["centroid"]
1564 print("done (%s lines)." % count)
1565 message = "Reading %s:%s..." % (gzcta_an, gzcta_fn)
1566 sys.stdout.write(message)
1569 gzcta = zipfile.ZipFile(gzcta_an).open(gzcta_fn, "r")
1570 columns = gzcta.readline().decode("utf-8").strip().split("\t")
1572 fields = line.decode("utf-8").strip().split("\t")
1573 f_geoid = fields[ columns.index("GEOID") ].strip()
1574 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1575 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1576 if f_geoid and f_intptlat and f_intptlong:
1577 if f_geoid not in zctas: zctas[f_geoid] = {}
1578 zctas[f_geoid]["centroid"] = gecos(
1579 "%s,%s" % (f_intptlat, f_intptlong)
1583 print("done (%s lines)." % count)
1584 message = "Reading %s..." % overrides_fn
1585 sys.stdout.write(message)
1591 overrides = configparser.ConfigParser()
1592 overrides.readfp( codecs.open(overrides_fn, "r", "utf8") )
1594 for section in overrides.sections():
1597 if section.startswith("-"):
1598 section = section[1:]
1600 else: delete = False
1601 if re.match("[A-Za-z]{3}$", section):
1603 if section in airports:
1604 del( airports[section] )
1605 logact = "removed airport %s" % section
1608 logact = "tried to remove nonexistent airport %s" % section
1610 if section in airports:
1611 logact = "changed airport %s" % section
1614 airports[section] = {}
1615 logact = "added airport %s" % section
1617 for key,value in overrides.items(section):
1618 if key in airports[section]: chgopt += 1
1620 if key in ("centroid", "location"):
1621 airports[section][key] = eval(value)
1623 airports[section][key] = value
1624 if addopt and chgopt:
1625 logact += " (+%s/!%s options)" % (addopt, chgopt)
1626 elif addopt: logact += " (+%s options)" % addopt
1627 elif chgopt: logact += " (!%s options)" % chgopt
1628 elif re.match("[A-Za-z0-9]{4}$", section):
1630 if section in stations:
1631 del( stations[section] )
1632 logact = "removed station %s" % section
1635 logact = "tried to remove nonexistent station %s" % section
1637 if section in stations:
1638 logact = "changed station %s" % section
1641 stations[section] = {}
1642 logact = "added station %s" % section
1644 for key,value in overrides.items(section):
1645 if key in stations[section]: chgopt += 1
1647 if key in ("centroid", "location"):
1648 stations[section][key] = eval(value)
1650 stations[section][key] = value
1651 if addopt and chgopt:
1652 logact += " (+%s/!%s options)" % (addopt, chgopt)
1653 elif addopt: logact += " (+%s options)" % addopt
1654 elif chgopt: logact += " (!%s options)" % chgopt
1655 elif re.match("[0-9]{5}$", section):
1657 if section in zctas:
1658 del( zctas[section] )
1659 logact = "removed zcta %s" % section
1662 logact = "tried to remove nonexistent zcta %s" % section
1664 if section in zctas:
1665 logact = "changed zcta %s" % section
1669 logact = "added zcta %s" % section
1671 for key,value in overrides.items(section):
1672 if key in zctas[section]: chgopt += 1
1674 if key in ("centroid", "location"):
1675 zctas[section][key] = eval(value)
1677 zctas[section][key] = value
1678 if addopt and chgopt:
1679 logact += " (+%s/!%s options)" % (addopt, chgopt)
1680 elif addopt: logact += " (+%s options)" % addopt
1681 elif chgopt: logact += " (!%s options)" % chgopt
1682 elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", section):
1684 if section in zones:
1685 del( zones[section] )
1686 logact = "removed zone %s" % section
1689 logact = "tried to remove nonexistent zone %s" % section
1691 if section in zones:
1692 logact = "changed zone %s" % section
1696 logact = "added zone %s" % section
1698 for key,value in overrides.items(section):
1699 if key in zones[section]: chgopt += 1
1701 if key in ("centroid", "location"):
1702 zones[section][key] = eval(value)
1704 zones[section][key] = value
1705 if addopt and chgopt:
1706 logact += " (+%s/!%s options)" % (addopt, chgopt)
1707 elif addopt: logact += " (+%s options)" % addopt
1708 elif chgopt: logact += " (!%s options)" % chgopt
1709 elif re.match("fips[0-9]+$", section):
1711 if section in places:
1712 del( places[section] )
1713 logact = "removed place %s" % section
1716 logact = "tried to remove nonexistent place %s" % section
1718 if section in places:
1719 logact = "changed place %s" % section
1722 places[section] = {}
1723 logact = "added place %s" % section
1725 for key,value in overrides.items(section):
1726 if key in places[section]: chgopt += 1
1728 if key in ("centroid", "location"):
1729 places[section][key] = eval(value)
1731 places[section][key] = value
1732 if addopt and chgopt:
1733 logact += " (+%s/!%s options)" % (addopt, chgopt)
1734 elif addopt: logact += " (+%s options)" % addopt
1735 elif chgopt: logact += " (!%s options)" % chgopt
1737 overrideslog.append("%s\n" % logact)
1739 if os.path.exists(overrideslog_fn):
1740 os.rename(overrideslog_fn, "%s_old"%overrideslog_fn)
1741 overrideslog_fd = codecs.open(overrideslog_fn, "w", "utf8")
1743 overrideslog_fd.write(
1744 '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
1745 '# use, copy, modify, and distribute this software is granted under terms\n'
1746 '# provided in the LICENSE file distributed with this software.\n\n'
1747 % time.gmtime().tm_year)
1748 overrideslog_fd.writelines(overrideslog)
1749 overrideslog_fd.close()
1750 print("done (%s overridden sections: +%s/-%s/!%s)." % (
1756 estimate = 2*len(places) + len(stations) + 2*len(zctas) + len(zones)
1758 "Correlating places, stations, ZCTAs and zones (upper bound is %s):" % \
1762 milestones = list( range(51) )
1764 sys.stdout.write(message)
1767 centroid = places[fips]["centroid"]
1769 station = closest(centroid, stations, "location", 0.1)
1771 places[fips]["station"] = station
1774 level = int(50*count/estimate)
1775 if level in milestones:
1776 for remaining in milestones[:milestones.index(level)+1]:
1779 sys.stdout.write(message)
1782 message = "%s%%" % (remaining*2,)
1783 sys.stdout.write(message)
1785 milestones.remove(remaining)
1787 zone = closest(centroid, zones, "centroid", 0.1)
1789 places[fips]["zone"] = zone
1792 level = int(50*count/estimate)
1793 if level in milestones:
1794 for remaining in milestones[:milestones.index(level)+1]:
1797 sys.stdout.write(message)
1800 message = "%s%%" % (remaining*2,)
1801 sys.stdout.write(message)
1803 milestones.remove(remaining)
1804 for station in stations:
1805 if "location" in stations[station]:
1806 location = stations[station]["location"]
1808 zone = closest(location, zones, "centroid", 0.1)
1810 stations[station]["zone"] = zone
1813 level = int(50*count/estimate)
1814 if level in milestones:
1815 for remaining in milestones[:milestones.index(level)+1]:
1818 sys.stdout.write(message)
1821 message = "%s%%" % (remaining*2,)
1822 sys.stdout.write(message)
1824 milestones.remove(remaining)
1825 for zcta in zctas.keys():
1826 centroid = zctas[zcta]["centroid"]
1828 station = closest(centroid, stations, "location", 0.1)
1830 zctas[zcta]["station"] = station
1833 level = int(50*count/estimate)
1834 if level in milestones:
1835 for remaining in milestones[ : milestones.index(level)+1 ]:
1838 sys.stdout.write(message)
1841 message = "%s%%" % (remaining*2,)
1842 sys.stdout.write(message)
1844 milestones.remove(remaining)
1846 zone = closest(centroid, zones, "centroid", 0.1)
1848 zctas[zcta]["zone"] = zone
1851 level = int(50*count/estimate)
1852 if level in milestones:
1853 for remaining in milestones[:milestones.index(level)+1]:
1856 sys.stdout.write(message)
1859 message = "%s%%" % (remaining*2,)
1860 sys.stdout.write(message)
1862 milestones.remove(remaining)
1863 for zone in zones.keys():
1864 if "centroid" in zones[zone]:
1865 centroid = zones[zone]["centroid"]
1867 station = closest(centroid, stations, "location", 0.1)
1869 zones[zone]["station"] = station
1872 level = int(50*count/estimate)
1873 if level in milestones:
1874 for remaining in milestones[:milestones.index(level)+1]:
1877 sys.stdout.write(message)
1880 message = "%s%%" % (remaining*2,)
1881 sys.stdout.write(message)
1883 milestones.remove(remaining)
1884 for remaining in milestones:
1887 sys.stdout.write(message)
1890 message = "%s%%" % (remaining*2,)
1891 sys.stdout.write(message)
1893 print("\n done (%s correlations)." % count)
1894 message = "Writing %s..." % airports_fn
1895 sys.stdout.write(message)
1898 if os.path.exists(airports_fn):
1899 os.rename(airports_fn, "%s_old"%airports_fn)
1900 airports_fd = codecs.open(airports_fn, "w", "utf8")
1901 airports_fd.write(header)
1902 for airport in sorted( airports.keys() ):
1903 airports_fd.write("\n\n[%s]" % airport)
1904 for key, value in sorted( airports[airport].items() ):
1905 if type(value) is float: value = "%.7f"%value
1906 elif type(value) is tuple:
1908 for element in value:
1909 if type(element) is float: elements.append("%.7f"%element)
1910 else: elements.append( repr(element) )
1911 value = "(%s)"%", ".join(elements)
1912 airports_fd.write( "\n%s = %s" % (key, value) )
1914 airports_fd.write("\n")
1916 print("done (%s sections)." % count)
1917 message = "Writing %s..." % places_fn
1918 sys.stdout.write(message)
1921 if os.path.exists(places_fn):
1922 os.rename(places_fn, "%s_old"%places_fn)
1923 places_fd = codecs.open(places_fn, "w", "utf8")
1924 places_fd.write(header)
1925 for fips in sorted( places.keys() ):
1926 places_fd.write("\n\n[%s]" % fips)
1927 for key, value in sorted( places[fips].items() ):
1928 if type(value) is float: value = "%.7f"%value
1929 elif type(value) is tuple:
1931 for element in value:
1932 if type(element) is float: elements.append("%.7f"%element)
1933 else: elements.append( repr(element) )
1934 value = "(%s)"%", ".join(elements)
1935 places_fd.write( "\n%s = %s" % (key, value) )
1937 places_fd.write("\n")
1939 print("done (%s sections)." % count)
1940 message = "Writing %s..." % stations_fn
1941 sys.stdout.write(message)
1944 if os.path.exists(stations_fn):
1945 os.rename(stations_fn, "%s_old"%stations_fn)
1946 stations_fd = codecs.open(stations_fn, "w", "utf-8")
1947 stations_fd.write(header)
1948 for station in sorted( stations.keys() ):
1949 stations_fd.write("\n\n[%s]" % station)
1950 for key, value in sorted( stations[station].items() ):
1951 if type(value) is float: value = "%.7f"%value
1952 elif type(value) is tuple:
1954 for element in value:
1955 if type(element) is float: elements.append("%.7f"%element)
1956 else: elements.append( repr(element) )
1957 value = "(%s)"%", ".join(elements)
1958 if type(value) is bytes:
1959 value = value.decode("utf-8")
1960 stations_fd.write( "\n%s = %s" % (key, value) )
1962 stations_fd.write("\n")
1964 print("done (%s sections)." % count)
1965 message = "Writing %s..." % zctas_fn
1966 sys.stdout.write(message)
1969 if os.path.exists(zctas_fn):
1970 os.rename(zctas_fn, "%s_old"%zctas_fn)
1971 zctas_fd = codecs.open(zctas_fn, "w", "utf8")
1972 zctas_fd.write(header)
1973 for zcta in sorted( zctas.keys() ):
1974 zctas_fd.write("\n\n[%s]" % zcta)
1975 for key, value in sorted( zctas[zcta].items() ):
1976 if type(value) is float: value = "%.7f"%value
1977 elif type(value) is tuple:
1979 for element in value:
1980 if type(element) is float: elements.append("%.7f"%element)
1981 else: elements.append( repr(element) )
1982 value = "(%s)"%", ".join(elements)
1983 zctas_fd.write( "\n%s = %s" % (key, value) )
1985 zctas_fd.write("\n")
1987 print("done (%s sections)." % count)
1988 message = "Writing %s..." % zones_fn
1989 sys.stdout.write(message)
1992 if os.path.exists(zones_fn):
1993 os.rename(zones_fn, "%s_old"%zones_fn)
1994 zones_fd = codecs.open(zones_fn, "w", "utf8")
1995 zones_fd.write(header)
1996 for zone in sorted( zones.keys() ):
1997 zones_fd.write("\n\n[%s]" % zone)
1998 for key, value in sorted( zones[zone].items() ):
1999 if type(value) is float: value = "%.7f"%value
2000 elif type(value) is tuple:
2002 for element in value:
2003 if type(element) is float: elements.append("%.7f"%element)
2004 else: elements.append( repr(element) )
2005 value = "(%s)"%", ".join(elements)
2006 zones_fd.write( "\n%s = %s" % (key, value) )
2008 zones_fd.write("\n")
2010 print("done (%s sections)." % count)
2011 message = "Starting QA check..."
2012 sys.stdout.write(message)
2014 airports = configparser.ConfigParser()
2015 airports.read(airports_fn)
2016 places = configparser.ConfigParser()
2017 places.read(places_fn)
2018 stations = configparser.ConfigParser()
2019 stations.read(stations_fn)
2020 zctas = configparser.ConfigParser()
2021 zctas.read(zctas_fn)
2022 zones = configparser.ConfigParser()
2023 zones.read(zones_fn)
2025 places_nocentroid = 0
2026 places_nodescription = 0
2027 for place in sorted( places.sections() ):
2028 if not places.has_option(place, "centroid"):
2029 qalog.append("%s: no centroid\n" % place)
2030 places_nocentroid += 1
2031 if not places.has_option(place, "description"):
2032 qalog.append("%s: no description\n" % place)
2033 places_nodescription += 1
2034 stations_nodescription = 0
2035 stations_nolocation = 0
2036 stations_nometar = 0
2037 for station in sorted( stations.sections() ):
2038 if not stations.has_option(station, "description"):
2039 qalog.append("%s: no description\n" % station)
2040 stations_nodescription += 1
2041 if not stations.has_option(station, "location"):
2042 qalog.append("%s: no location\n" % station)
2043 stations_nolocation += 1
2044 if not stations.has_option(station, "metar"):
2045 qalog.append("%s: no metar\n" % station)
2046 stations_nometar += 1
2047 airports_badstation = 0
2048 airports_nostation = 0
2049 for airport in sorted( airports.sections() ):
2050 if not airports.has_option(airport, "station"):
2051 qalog.append("%s: no station\n" % airport)
2052 airports_nostation += 1
2054 station = airports.get(airport, "station")
2055 if station not in stations.sections():
2056 qalog.append( "%s: bad station %s\n" % (airport, station) )
2057 airports_badstation += 1
2058 zctas_nocentroid = 0
2059 for zcta in sorted( zctas.sections() ):
2060 if not zctas.has_option(zcta, "centroid"):
2061 qalog.append("%s: no centroid\n" % zcta)
2062 zctas_nocentroid += 1
2063 zones_nocentroid = 0
2064 zones_nodescription = 0
2065 zones_noforecast = 0
2066 zones_overlapping = 0
2068 for zone in zones.sections():
2069 if zones.has_option(zone, "centroid"):
2071 "centroid": eval( zones.get(zone, "centroid") )
2073 for zone in sorted( zones.sections() ):
2074 if zones.has_option(zone, "centroid"):
2075 zonetable_local = zonetable.copy()
2076 del( zonetable_local[zone] )
2077 centroid = eval( zones.get(zone, "centroid") )
2079 nearest = closest(centroid, zonetable_local, "centroid", 0.1)
2080 if nearest[1]*radian_to_km < 1:
2081 qalog.append( "%s: within one km of %s\n" % (
2085 zones_overlapping += 1
2087 qalog.append("%s: no centroid\n" % zone)
2088 zones_nocentroid += 1
2089 if not zones.has_option(zone, "description"):
2090 qalog.append("%s: no description\n" % zone)
2091 zones_nodescription += 1
2092 if not zones.has_option(zone, "zone_forecast"):
2093 qalog.append("%s: no forecast\n" % zone)
2094 zones_noforecast += 1
2095 if os.path.exists(qalog_fn):
2096 os.rename(qalog_fn, "%s_old"%qalog_fn)
2097 qalog_fd = codecs.open(qalog_fn, "w", "utf8")
2100 '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
2101 '# use, copy, modify, and distribute this software is granted under terms\n'
2102 '# provided in the LICENSE file distributed with this software.\n\n'
2103 % time.gmtime().tm_year)
2104 qalog_fd.writelines(qalog)
2107 print("issues found (see %s for details):"%qalog_fn)
2108 if airports_badstation:
2109 print(" %s airports with invalid station"%airports_badstation)
2110 if airports_nostation:
2111 print(" %s airports with no station"%airports_nostation)
2112 if places_nocentroid:
2113 print(" %s places with no centroid"%places_nocentroid)
2114 if places_nodescription:
2115 print(" %s places with no description"%places_nodescription)
2116 if stations_nodescription:
2117 print(" %s stations with no description"%stations_nodescription)
2118 if stations_nolocation:
2119 print(" %s stations with no location"%stations_nolocation)
2120 if stations_nometar:
2121 print(" %s stations with no METAR"%stations_nometar)
2122 if zctas_nocentroid:
2123 print(" %s ZCTAs with no centroid"%zctas_nocentroid)
2124 if zones_nocentroid:
2125 print(" %s zones with no centroid"%zones_nocentroid)
2126 if zones_nodescription:
2127 print(" %s zones with no description"%zones_nodescription)
2128 if zones_noforecast:
2129 print(" %s zones with no forecast"%zones_noforecast)
2130 if zones_overlapping:
2131 print(" %s zones within one km of another"%zones_overlapping)
2132 else: print("no issues found.")
2133 print("Indexing complete!")