1 """Contains various object definitions needed by the weather utility."""
3 weather_copyright = """\
4 # Copyright (c) 2006-2020 Jeremy Stanley <fungi@yuggoth.org>. Permission to
5 # use, copy, modify, and distribute this software is granted under terms
6 # provided in the LICENSE file distributed with this software.
9 weather_version = "2.4.1"
11 radian_to_km = 6372.795484
12 radian_to_mi = 3959.871528
14 def pyversion(ref=None):
15 """Determine the Python version and optionally compare to a reference."""
17 ver = platform.python_version()
20 int(x) for x in ver.split(".")[:2]
22 int(x) for x in ref.split(".")[:2]
27 """An object to contain selection data."""
29 """Store the config, options and arguments."""
30 self.config = get_config()
31 self.options, self.arguments = get_options(self.config)
32 if self.get_bool("cache") and self.get_bool("cache_search") \
33 and not self.get_bool("longlist"):
34 integrate_search_cache(
39 if not self.arguments:
40 if "id" in self.options.__dict__ \
41 and self.options.__dict__["id"]:
42 self.arguments.append( self.options.__dict__["id"] )
43 del( self.options.__dict__["id"] )
45 message = "WARNING: the --id option is deprecated and will eventually be removed\n"
46 sys.stderr.write(message)
47 elif "city" in self.options.__dict__ \
48 and self.options.__dict__["city"] \
49 and "st" in self.options.__dict__ \
50 and self.options.__dict__["st"]:
51 self.arguments.append(
53 self.options.__dict__["city"],
54 self.options.__dict__["st"]
57 del( self.options.__dict__["city"] )
58 del( self.options.__dict__["st"] )
60 message = "WARNING: the --city/--st options are deprecated and will eventually be removed\n"
61 sys.stderr.write(message)
62 def get(self, option, argument=None):
63 """Retrieve data from the config or options."""
65 if self.config.has_section(argument) and (
66 self.config.has_option(argument, "city") \
67 or self.config.has_option(argument, "id") \
68 or self.config.has_option(argument, "st")
70 self.config.remove_section(argument)
72 message = "WARNING: the city/id/st options are now unsupported in aliases\n"
73 sys.stderr.write(message)
74 if not self.config.has_section(argument):
77 path=self.get("setpath"),
78 info=self.get("info"),
80 self.get("cache") and self.get("cache_search")
82 cachedir=self.get("cachedir"),
83 quiet=self.get_bool("quiet")
85 self.config.add_section(argument)
86 for item in guessed.items():
87 self.config.set(argument, *item)
88 if self.config.has_option(argument, option):
89 return self.config.get(argument, option)
90 if option in self.options.__dict__:
91 return self.options.__dict__[option]
93 message = "WARNING: no URI defined for %s\n" % option
94 sys.stderr.write(message)
96 def get_bool(self, option, argument=None):
97 """Get data and coerce to a boolean if necessary."""
98 # Mimic configparser's getboolean() method by treating
99 # false/no/off/0 as False and true/yes/on/1 as True values,
101 value = self.get(option, argument)
102 if isinstance(value, bool):
104 if isinstance(value, str):
105 vlower = value.lower()
106 if vlower in ('false', 'no', 'off', '0'):
108 elif vlower in ('true', 'yes', 'on', '1'):
110 raise ValueError("Not a boolean: %s" % value)
111 def getint(self, option, argument=None):
112 """Get data and coerce to an integer if necessary."""
113 value = self.get(option, argument)
114 if value: return int(value)
118 """Average a list of coordinates."""
125 return (x/count, y/count)
127 def filter_units(line, units="imperial"):
128 """Filter or convert units in a line of text between US/UK and metric."""
130 # filter lines with both pressures in the form of "X inches (Y hPa)" or
133 "(.* )(\d*(\.\d+)? (inches|in\. Hg)) \((\d*(\.\d+)? hPa)\)(.*)",
137 preamble, in_hg, i_fr, i_un, hpa, h_fr, trailer = dual_p.groups()
138 if units == "imperial": line = preamble + in_hg + trailer
139 elif units == "metric": line = preamble + hpa + trailer
140 # filter lines with both temperatures in the form of "X F (Y C)"
142 "(.* )(-?\d*(\.\d+)? F) \((-?\d*(\.\d+)? C)\)(.*)",
146 preamble, fahrenheit, f_fr, celsius, c_fr, trailer = dual_t.groups()
147 if units == "imperial": line = preamble + fahrenheit + trailer
148 elif units == "metric": line = preamble + celsius + trailer
149 # if metric is desired, convert distances in the form of "X mile(s)" to
151 if units == "metric":
152 imperial_d = re.match(
153 "(.* )(\d+)( mile\(s\))(.*)",
157 preamble, mi, m_u, trailer = imperial_d.groups()
158 line = preamble + str(int(round(int(mi)*1.609344))) \
159 + " kilometer(s)" + trailer
160 # filter speeds in the form of "X MPH (Y KT)" to just "X MPH"; if metric is
161 # desired, convert to "Z KPH"
162 imperial_s = re.match(
163 "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
167 preamble, mph, m_u, kt, trailer = imperial_s.groups()
168 if units == "imperial": line = preamble + mph + m_u + trailer
169 elif units == "metric":
170 line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
172 imperial_s = re.match(
173 "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
177 preamble, mph, m_u, kt, trailer = imperial_s.groups()
178 if units == "imperial": line = preamble + mph + m_u + trailer
179 elif units == "metric":
180 line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
182 # if imperial is desired, qualify given forcast temperatures like "X F"; if
183 # metric is desired, convert to "Y C"
184 imperial_t = re.match(
185 "(.* )(High |high |Low |low )(\d+)(\.|,)(.*)",
189 preamble, parameter, fahrenheit, sep, trailer = imperial_t.groups()
190 if units == "imperial":
191 line = preamble + parameter + fahrenheit + " F" + sep + trailer
192 elif units == "metric":
193 line = preamble + parameter \
194 + str(int(round((int(fahrenheit)-32)*5/9))) + " C" + sep \
196 # hand off the resulting line
206 """Return a string containing the results of a URI GET."""
208 import urllib, urllib.error, urllib.request
209 URLError = urllib.error.URLError
210 urlopen = urllib.request.urlopen
212 import urllib2 as urllib
213 URLError = urllib.URLError
214 urlopen = urllib.urlopen
217 dcachedir = os.path.join( os.path.expanduser(cachedir), "datacache" )
218 if not os.path.exists(dcachedir):
219 try: os.makedirs(dcachedir)
220 except (IOError, OSError): pass
221 dcache_fn = os.path.join(
223 uri.split(":",1)[1].replace("/","_")
226 if cache_data and os.access(dcache_fn, os.R_OK) \
227 and now-cacheage < os.stat(dcache_fn).st_mtime <= now:
228 dcache_fd = open(dcache_fn)
229 data = dcache_fd.read()
233 data = urlopen(uri).read().decode("utf-8")
235 if ignore_fail: return ""
237 sys.stderr.write("%s error: failed to retrieve\n %s\n\n" % (
238 os.path.basename( sys.argv[0] ), uri))
240 # Some data sources are HTML with the plain text wrapped in pre tags
242 data = data[data.find("<pre>")+5:data.find("</pre>")]
246 dcache_fd = codecs.open(dcache_fn, "w", "utf-8")
247 dcache_fd.write(data)
249 except (IOError, OSError): pass
263 """Return a summarized METAR for the specified station."""
266 message = "%s error: METAR URI required for conditions\n" % \
267 os.path.basename( sys.argv[0] )
268 sys.stderr.write(message)
272 cache_data=cache_data,
276 if pyversion("3") and type(metar) is bytes: metar = metar.decode("utf-8")
277 if verbose: return metar
280 lines = metar.split("\n")
283 "relative_humidity," \
284 + "precipitation_last_hour," \
285 + "sky conditions," \
291 headerlist = headers.lower().replace("_"," ").split(",")
294 title = "Current conditions at %s"
295 place = lines[0].split(", ")
297 place = "%s, %s" % ( place[0].title(), place[1] )
298 else: place = "<UNKNOWN>"
299 output.append(title%place)
300 output.append("Last updated " + lines[1])
302 for header in headerlist:
304 if line.lower().startswith(header + ":"):
305 if re.match(r".*:\d+$", line): line = line[:line.rfind(":")]
306 if imperial: line = filter_units(line, units="imperial")
307 elif metric: line = filter_units(line, units="metric")
308 if quiet: output.append(line)
309 else: output.append(" " + line)
313 "(no conditions matched your header list, try with --verbose)"
315 return "\n".join(output)
325 """Return alert notice for the specified URI."""
331 cache_data=cache_data,
335 if pyversion("3") and type(alert) is bytes: alert = alert.decode("utf-8")
337 if verbose: return alert
339 if alert.find("\nNATIONAL WEATHER SERVICE") == -1:
343 lines = alert.split("\n")
345 valid_time = time.strftime("%Y%m%d%H%M")
348 if line.startswith("Expires:") \
349 and "Expires:" + valid_time > line:
351 if muted and line.startswith("NATIONAL WEATHER SERVICE"):
358 if line and not muted:
359 if quiet: output.append(line)
360 else: output.append(" " + line)
361 return "\n".join(output)
363 def get_options(config):
364 """Parse the options passed on the command line."""
366 # for optparse's builtin -h/--help option
368 "usage: %prog [options] [alias1|search1 [alias2|search2 [...]]]"
370 # for optparse's builtin --version option
371 verstring = "%prog " + weather_version
375 option_parser = optparse.OptionParser(usage=usage, version=verstring)
376 # separate options object from list of arguments and return both
378 # the -a/--alert option
379 if config.has_option("default", "alert"):
380 default_alert = config.getboolean("default", "alert")
381 else: default_alert = False
382 option_parser.add_option("-a", "--alert",
385 default=default_alert,
386 help="include local alert notices")
388 # the --atypes option
389 if config.has_option("default", "atypes"):
390 default_atypes = config.get("default", "atypes")
393 "coastal_flood_statement," \
394 + "flash_flood_statement," \
395 + "flash_flood_warning," \
396 + "flash_flood_watch," \
397 + "flood_statement," \
399 + "severe_thunderstorm_warning," \
400 + "severe_weather_statement," \
401 + "special_weather_statement," \
402 + "urgent_weather_message"
403 option_parser.add_option("--atypes",
405 default=default_atypes,
406 help="list of alert notification types to display")
408 # the --build-sets option
409 option_parser.add_option("--build-sets",
413 help="(re)build location correlation sets")
415 # the --cacheage option
416 if config.has_option("default", "cacheage"):
417 default_cacheage = config.getint("default", "cacheage")
418 else: default_cacheage = 900
419 option_parser.add_option("--cacheage",
421 default=default_cacheage,
422 help="duration in seconds to refresh cached data")
424 # the --cachedir option
425 if config.has_option("default", "cachedir"):
426 default_cachedir = config.get("default", "cachedir")
427 else: default_cachedir = "~/.weather"
428 option_parser.add_option("--cachedir",
430 default=default_cachedir,
431 help="directory for storing cached searches and data")
433 # the -f/--forecast option
434 if config.has_option("default", "forecast"):
435 default_forecast = config.getboolean("default", "forecast")
436 else: default_forecast = False
437 option_parser.add_option("-f", "--forecast",
440 default=default_forecast,
441 help="include a local forecast")
443 # the --headers option
444 if config.has_option("default", "headers"):
445 default_headers = config.get("default", "headers")
449 + "relative_humidity," \
454 + "sky_conditions," \
455 + "precipitation_last_hour"
456 option_parser.add_option("--headers",
458 default=default_headers,
459 help="list of conditions headers to display")
461 # the --imperial option
462 if config.has_option("default", "imperial"):
463 default_imperial = config.getboolean("default", "imperial")
464 else: default_imperial = False
465 option_parser.add_option("--imperial",
468 default=default_imperial,
469 help="filter/convert conditions for US/UK units")
472 option_parser.add_option("--info",
476 help="output detailed information for your search")
478 # the -l/--list option
479 option_parser.add_option("-l", "--list",
483 help="list all configured aliases and cached searches")
485 # the --longlist option
486 option_parser.add_option("--longlist",
490 help="display details of all configured aliases")
492 # the -m/--metric option
493 if config.has_option("default", "metric"):
494 default_metric = config.getboolean("default", "metric")
495 else: default_metric = False
496 option_parser.add_option("-m", "--metric",
499 default=default_metric,
500 help="filter/convert conditions for metric units")
502 # the -n/--no-conditions option
503 if config.has_option("default", "conditions"):
504 default_conditions = config.getboolean("default", "conditions")
505 else: default_conditions = True
506 option_parser.add_option("-n", "--no-conditions",
508 action="store_false",
509 default=default_conditions,
510 help="disable output of current conditions")
512 # the --no-cache option
513 if config.has_option("default", "cache"):
514 default_cache = config.getboolean("default", "cache")
515 else: default_cache = True
516 option_parser.add_option("--no-cache",
518 action="store_false",
520 help="disable all caching (searches and data)")
522 # the --no-cache-data option
523 if config.has_option("default", "cache_data"):
524 default_cache_data = config.getboolean("default", "cache_data")
525 else: default_cache_data = True
526 option_parser.add_option("--no-cache-data",
528 action="store_false",
530 help="disable retrieved data caching")
532 # the --no-cache-search option
533 if config.has_option("default", "cache_search"):
534 default_cache_search = config.getboolean("default", "cache_search")
535 else: default_cache_search = True
536 option_parser.add_option("--no-cache-search",
538 action="store_false",
540 help="disable search result caching")
542 # the -q/--quiet option
543 if config.has_option("default", "quiet"):
544 default_quiet = config.getboolean("default", "quiet")
545 else: default_quiet = False
546 option_parser.add_option("-q", "--quiet",
549 default=default_quiet,
550 help="skip preambles and don't indent")
552 # the --setpath option
553 if config.has_option("default", "setpath"):
554 default_setpath = config.get("default", "setpath")
555 else: default_setpath = ".:~/.weather"
556 option_parser.add_option("--setpath",
558 default=default_setpath,
559 help="directory search path for correlation sets")
561 # the -v/--verbose option
562 if config.has_option("default", "verbose"):
563 default_verbose = config.getboolean("default", "verbose")
564 else: default_verbose = False
565 option_parser.add_option("-v", "--verbose",
568 default=default_verbose,
569 help="show full decoded feeds")
572 if config.has_option("default", "city"):
573 default_city = config.get("default", "city")
574 else: default_city = ""
575 option_parser.add_option("-c", "--city",
577 default=default_city,
578 help=optparse.SUPPRESS_HELP)
579 if config.has_option("default", "id"):
580 default_id = config.get("default", "id")
581 else: default_id = ""
582 option_parser.add_option("-i", "--id",
585 help=optparse.SUPPRESS_HELP)
586 if config.has_option("default", "st"):
587 default_st = config.get("default", "st")
588 else: default_st = ""
589 option_parser.add_option("-s", "--st",
592 help=optparse.SUPPRESS_HELP)
594 options, arguments = option_parser.parse_args()
595 return options, arguments
598 """Parse the aliases and configuration."""
599 if pyversion("3"): import configparser
600 else: import ConfigParser as configparser
601 config = configparser.ConfigParser()
605 "/etc/weather/weatherrc",
606 os.path.expanduser("~/.weather/weatherrc"),
607 os.path.expanduser("~/.weatherrc"),
610 for rcfile in rcfiles:
611 if os.access(rcfile, os.R_OK): config.read(rcfile)
612 for section in config.sections():
613 if section != section.lower():
614 if config.has_section(section.lower()):
615 config.remove_section(section.lower())
616 config.add_section(section.lower())
617 for option,value in config.items(section):
618 config.set(section.lower(), option, value)
621 def integrate_search_cache(config, cachedir, setpath):
622 """Add cached search results into the configuration."""
623 if pyversion("3"): import configparser
624 else: import ConfigParser as configparser
626 scache_fn = os.path.join( os.path.expanduser(cachedir), "searches" )
627 if not os.access(scache_fn, os.R_OK): return config
628 scache_fd = open(scache_fn)
629 created = float( scache_fd.readline().split(":")[1].strip().split()[0] )
632 datafiles = data_index(setpath)
634 data_freshness = sorted(
635 [ x[1] for x in datafiles.values() ],
638 else: data_freshness = now
639 if created < data_freshness <= now:
642 print( "[clearing outdated %s]" % scache_fn )
643 except (IOError, OSError):
646 scache = configparser.ConfigParser()
647 scache.read(scache_fn)
648 for section in scache.sections():
649 if not config.has_section(section):
650 config.add_section(section)
651 for option,value in scache.items(section):
652 config.set(section, option, value)
655 def list_aliases(config, detail=False):
656 """Return a formatted list of aliases defined in the config."""
658 output = "\n# configured alias details..."
659 for section in sorted(config.sections()):
660 output += "\n\n[%s]" % section
661 for item in sorted(config.items(section)):
662 output += "\n%s = %s" % item
665 output = "configured aliases and cached searches..."
666 for section in sorted(config.sections()):
667 if config.has_option(section, "description"):
668 description = config.get(section, "description")
669 else: description = "(no description provided)"
670 output += "\n %s: %s" % (section, description)
673 def data_index(path):
676 for filename in ("airports", "places", "stations", "zctas", "zones"):
677 for dirname in path.split(":"):
678 for extension in ("", ".gz", ".txt"):
679 candidate = os.path.expanduser(
680 os.path.join( dirname, "".join( (filename, extension) ) )
682 if os.path.exists(candidate):
683 datafiles[filename] = (
685 os.stat(candidate).st_mtime
688 if filename in datafiles:
702 """Find URIs using airport, gecos, placename, station, ZCTA/ZIP, zone."""
703 import codecs, datetime, time, os, re, sys
704 if pyversion("3"): import configparser
705 else: import ConfigParser as configparser
706 datafiles = data_index(path)
707 if re.match("[A-Za-z]{3}$", expression): searchtype = "airport"
708 elif re.match("[A-Za-z0-9]{4}$", expression): searchtype = "station"
709 elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", expression): searchtype = "zone"
710 elif re.match("[0-9]{5}$", expression): searchtype = "ZCTA"
712 r"[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?, *[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?$",
715 searchtype = "coordinates"
716 elif re.match(r"(FIPS|fips)\d+$", expression): searchtype = "FIPS"
720 if cache_search: action = "caching"
721 else: action = "using"
730 (0.995, "excellent"),
733 if not quiet: print("Searching via %s..."%searchtype)
734 stations = configparser.ConfigParser()
735 dataname = "stations"
736 if dataname in datafiles:
737 datafile = datafiles[dataname][0]
738 if datafile.endswith(".gz"):
741 stations.read_string(
742 gzip.open(datafile).read().decode("utf-8") )
743 else: stations.readfp( gzip.open(datafile) )
745 stations.read(datafile)
747 message = "%s error: can't find \"%s\" data file\n" % (
748 os.path.basename( sys.argv[0] ),
751 sys.stderr.write(message)
753 zones = configparser.ConfigParser()
755 if dataname in datafiles:
756 datafile = datafiles[dataname][0]
757 if datafile.endswith(".gz"):
760 zones.read_string( gzip.open(datafile).read().decode("utf-8") )
761 else: zones.readfp( gzip.open(datafile) )
765 message = "%s error: can't find \"%s\" data file\n" % (
766 os.path.basename( sys.argv[0] ),
769 sys.stderr.write(message)
777 if searchtype == "airport":
778 expression = expression.lower()
779 airports = configparser.ConfigParser()
780 dataname = "airports"
781 if dataname in datafiles:
782 datafile = datafiles[dataname][0]
783 if datafile.endswith(".gz"):
786 airports.read_string(
787 gzip.open(datafile).read().decode("utf-8") )
788 else: airports.readfp( gzip.open(datafile) )
790 airports.read(datafile)
792 message = "%s error: can't find \"%s\" data file\n" % (
793 os.path.basename( sys.argv[0] ),
796 sys.stderr.write(message)
798 if airports.has_section(expression) \
799 and airports.has_option(expression, "station"):
800 search = (expression, "IATA/FAA airport code %s" % expression)
801 station = ( airports.get(expression, "station"), 0 )
802 if stations.has_option(station[0], "zone"):
803 zone = eval( stations.get(station[0], "zone") )
805 if not ( info or quiet ) \
806 and stations.has_option( station[0], "description" ):
810 stations.get(station[0], "description")
814 message = "No IATA/FAA airport code \"%s\" in the %s file.\n" % (
816 datafiles["airports"][0]
818 sys.stderr.write(message)
820 elif searchtype == "station":
821 expression = expression.lower()
822 if stations.has_section(expression):
823 station = (expression, 0)
825 search = (expression, "ICAO station code %s" % expression)
826 if stations.has_option(expression, "zone"):
827 zone = eval( stations.get(expression, "zone") )
829 if not ( info or quiet ) \
830 and stations.has_option(expression, "description"):
834 stations.get(expression, "description")
838 message = "No ICAO weather station \"%s\" in the %s file.\n" % (
840 datafiles["stations"][0]
842 sys.stderr.write(message)
844 elif searchtype == "zone":
845 expression = expression.lower()
846 if zones.has_section(expression) \
847 and zones.has_option(expression, "station"):
848 zone = (expression, 0)
849 station = eval( zones.get(expression, "station") )
851 search = (expression, "NWS/NOAA weather zone %s" % expression)
852 if not ( info or quiet ) \
853 and zones.has_option(expression, "description"):
857 zones.get(expression, "description")
861 message = "No usable NWS weather zone \"%s\" in the %s file.\n" % (
863 datafiles["zones"][0]
865 sys.stderr.write(message)
867 elif searchtype == "ZCTA":
868 zctas = configparser.ConfigParser()
870 if dataname in datafiles:
871 datafile = datafiles[dataname][0]
872 if datafile.endswith(".gz"):
876 gzip.open(datafile).read().decode("utf-8") )
877 else: zctas.readfp( gzip.open(datafile) )
881 message = "%s error: can't find \"%s\" data file\n" % (
882 os.path.basename( sys.argv[0] ),
885 sys.stderr.write(message)
888 if zctas.has_section(expression) \
889 and zctas.has_option(expression, "station"):
890 station = eval( zctas.get(expression, "station") )
891 search = (expression, "Census ZCTA (ZIP code) %s" % expression)
892 if zctas.has_option(expression, "zone"):
893 zone = eval( zctas.get(expression, "zone") )
895 message = "No census ZCTA (ZIP code) \"%s\" in the %s file.\n" % (
897 datafiles["zctas"][0]
899 sys.stderr.write(message)
901 elif searchtype == "coordinates":
902 search = (expression, "Geographic coordinates %s" % expression)
904 for station in stations.sections():
905 if stations.has_option(station, "location"):
906 stationtable[station] = {
907 "location": eval( stations.get(station, "location") )
909 station = closest( gecos(expression), stationtable, "location", 0.1 )
911 message = "No ICAO weather station found near %s.\n" % expression
912 sys.stderr.write(message)
915 for zone in zones.sections():
916 if zones.has_option(zone, "centroid"):
918 "centroid": eval( zones.get(zone, "centroid") )
920 zone = closest( gecos(expression), zonetable, "centroid", 0.1 )
922 message = "No NWS weather zone near %s; forecasts unavailable.\n" \
924 sys.stderr.write(message)
925 elif searchtype in ("FIPS", "name"):
926 places = configparser.ConfigParser()
928 if dataname in datafiles:
929 datafile = datafiles[dataname][0]
930 if datafile.endswith(".gz"):
934 gzip.open(datafile).read().decode("utf-8") )
935 else: places.readfp( gzip.open(datafile) )
937 places.read(datafile)
939 message = "%s error: can't find \"%s\" data file\n" % (
940 os.path.basename( sys.argv[0] ),
943 sys.stderr.write(message)
946 place = expression.lower()
947 if places.has_section(place) and places.has_option(place, "station"):
948 station = eval( places.get(place, "station") )
949 search = (expression, "Census Place %s" % expression)
950 if places.has_option(place, "description"):
953 search[1] + ", %s" % places.get(place, "description")
955 if places.has_option(place, "zone"):
956 zone = eval( places.get(place, "zone") )
957 if not ( info or quiet ) \
958 and places.has_option(place, "description"):
962 places.get(place, "description")
966 for place in places.sections():
967 if places.has_option(place, "description") \
968 and places.has_option(place, "station") \
971 places.get(place, "description"),
974 possibilities.append(place)
975 for place in stations.sections():
976 if stations.has_option(place, "description") \
979 stations.get(place, "description"),
982 possibilities.append(place)
983 for place in zones.sections():
984 if zones.has_option(place, "description") \
985 and zones.has_option(place, "station") \
988 zones.get(place, "description"),
991 possibilities.append(place)
992 if len(possibilities) == 1:
993 place = possibilities[0]
994 if places.has_section(place):
995 station = eval( places.get(place, "station") )
996 description = places.get(place, "description")
997 if places.has_option(place, "zone"):
998 zone = eval( places.get(place, "zone" ) )
999 search = ( expression, "%s: %s" % (place, description) )
1000 elif stations.has_section(place):
1001 station = (place, 0.0)
1002 description = stations.get(place, "description")
1003 if stations.has_option(place, "zone"):
1004 zone = eval( stations.get(place, "zone" ) )
1005 search = ( expression, "ICAO station code %s" % place )
1006 elif zones.has_section(place):
1007 station = eval( zones.get(place, "station") )
1008 description = zones.get(place, "description")
1010 search = ( expression, "NWS/NOAA weather zone %s" % place )
1011 if not ( info or quiet ):
1012 print( "[%s result %s]" % (action, description) )
1013 if not possibilities and not station[0]:
1014 message = "No FIPS code/census area match in the %s file.\n" % (
1015 datafiles["places"][0]
1017 sys.stderr.write(message)
1020 uris["metar"] = stations.get( station[0], "metar" )
1022 for key,value in zones.items( zone[0] ):
1023 if key not in ("centroid", "description", "station"):
1026 count = len(possibilities)
1027 if count <= max_results:
1028 print( "Your search is ambiguous, returning %s matches:" % count )
1029 for place in sorted(possibilities):
1030 if places.has_section(place):
1034 places.get(place, "description")
1037 elif stations.has_section(place):
1041 stations.get(place, "description")
1044 elif zones.has_section(place):
1048 zones.get(place, "description")
1053 "Your search is too ambiguous, returning %s matches." % count
1060 for section in dataset.sections():
1061 if dataset.has_option(section, "station"):
1063 eval( dataset.get(section, "station") )[1]
1065 if dataset.has_option(section, "zone"):
1066 zonelist.append( eval( dataset.get(section, "zone") )[1] )
1069 scount = len(stationlist)
1070 zcount = len(zonelist)
1073 for score in scores:
1075 sranks.append( stationlist[ int( (1-score[0]) * scount ) ] )
1077 zranks.append( zonelist[ int( (1-score[0]) * zcount ) ] )
1078 description = search[1]
1079 uris["description"] = description
1081 "%s\n%s" % ( description, "-" * len(description) )
1086 stations.get( station[0], "description" )
1089 km = radian_to_km*station[1]
1090 mi = radian_to_mi*station[1]
1091 if sranks and not description.startswith("ICAO station code "):
1092 for index in range(0, len(scores)):
1093 if station[1] >= sranks[index]:
1094 score = scores[index][1]
1097 " (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1099 elif searchtype == "coordinates":
1100 print( " (%.3gkm, %.3gmi)" % (km, mi) )
1103 "%s: %s" % ( zone[0], zones.get( zone[0], "description" ) )
1105 km = radian_to_km*zone[1]
1106 mi = radian_to_mi*zone[1]
1107 if zranks and not description.startswith("NWS/NOAA weather zone "):
1108 for index in range(0, len(scores)):
1109 if zone[1] >= zranks[index]:
1110 score = scores[index][1]
1113 " (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1115 elif searchtype == "coordinates" and zone[0]:
1116 print( " (%.3gkm, %.3gmi)" % (km, mi) )
1119 nowstamp = "%s (%s)" % (
1121 datetime.datetime.isoformat(
1122 datetime.datetime.fromtimestamp(now),
1126 search_cache = ["\n"]
1127 search_cache.append( "[%s]\n" % search[0] )
1128 search_cache.append( "cached = %s\n" % nowstamp )
1129 for uriname in sorted(uris.keys()):
1130 search_cache.append( "%s = %s\n" % ( uriname, uris[uriname] ) )
1131 real_cachedir = os.path.expanduser(cachedir)
1132 if not os.path.exists(real_cachedir):
1133 try: os.makedirs(real_cachedir)
1134 except (IOError, OSError): pass
1135 scache_fn = os.path.join(real_cachedir, "searches")
1136 if not os.path.exists(scache_fn):
1138 [ x[1] for x in datafiles.values() ],
1141 thenstamp = "%s (%s)" % (
1143 datetime.datetime.isoformat(
1144 datetime.datetime.fromtimestamp(then),
1148 search_cache.insert(
1150 "# based on data files from: %s\n" % thenstamp
1153 scache_existing = configparser.ConfigParser()
1154 scache_existing.read(scache_fn)
1155 if not scache_existing.has_section(search[0]):
1156 scache_fd = codecs.open(scache_fn, "a", "utf-8")
1157 scache_fd.writelines(search_cache)
1159 except (IOError, OSError): pass
1163 def closest(position, nodes, fieldname, angle=None):
1165 if not angle: angle = 2*math.pi
1168 if fieldname in nodes[name]:
1169 node = nodes[name][fieldname]
1170 if node and abs( position[0]-node[0] ) < angle:
1171 if abs( position[1]-node[1] ) < angle \
1172 or abs( abs( position[1]-node[1] ) - 2*math.pi ) < angle:
1173 if position == node:
1177 candidate = math.acos(
1178 math.sin( position[0] ) * math.sin( node[0] ) \
1179 + math.cos( position[0] ) \
1180 * math.cos( node[0] ) \
1181 * math.cos( position[1] - node[1] )
1183 if candidate < angle:
1186 if match: match = str(match)
1187 return (match, angle)
1189 def gecos(formatted):
1191 coordinates = formatted.split(",")
1192 for coordinate in range(0, 2):
1193 degrees, foo, minutes, bar, seconds, hemisphere = re.match(
1194 r"([\+-]?\d+\.?\d*)(-(\d+))?(-(\d+))?([ensw]?)$",
1195 coordinates[coordinate].strip().lower()
1197 value = float(degrees)
1198 if minutes: value += float(minutes)/60
1199 if seconds: value += float(seconds)/3600
1200 if hemisphere and hemisphere in "sw": value *= -1
1201 coordinates[coordinate] = math.radians(value)
1202 return tuple(coordinates)
1205 import codecs, csv, datetime, hashlib, os, re, sys, tarfile, time, zipfile
1206 if pyversion("3"): import configparser
1207 else: import ConfigParser as configparser
1208 for filename in os.listdir("."):
1209 if re.match("[0-9]{4}_Gaz_counties_national.zip$", filename):
1210 gcounties_an = filename
1211 gcounties_fn = filename[:-4] + ".txt"
1212 elif re.match("[0-9]{4}_Gaz_cousubs_national.zip$", filename):
1213 gcousubs_an = filename
1214 gcousubs_fn = filename[:-4] + ".txt"
1215 elif re.match("[0-9]{4}_Gaz_place_national.zip$", filename):
1216 gplace_an = filename
1217 gplace_fn = filename[:-4] + ".txt"
1218 elif re.match("[0-9]{4}_Gaz_zcta_national.zip$", filename):
1220 gzcta_fn = filename[:-4] + ".txt"
1221 elif re.match("bp[0-9]{2}[a-z]{2}[0-9]{2}.dbx$", filename):
1222 cpfzcf_fn = filename
1223 nsdcccc_fn = "nsd_cccc.txt"
1224 ourairports_fn = "airports.csv"
1225 overrides_fn = "overrides.conf"
1226 overrideslog_fn = "overrides.log"
1230 airports_fn = "airports"
1231 places_fn = "places"
1232 stations_fn = "stations"
1237 # generated by %s on %s from these public domain sources:
1239 # https://www.census.gov/geographies/reference-files/time-series/geo/gazetteer-files.html
1245 # https://www.weather.gov/gis/ZoneCounty/
1248 # https://tgftp.nws.noaa.gov/data/
1251 # https://ourairports.com/data/
1254 # ...and these manually-generated or hand-compiled adjustments:
1260 os.path.basename( sys.argv[0] ),
1261 datetime.date.isoformat(
1262 datetime.datetime.utcfromtimestamp( int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) )
1264 hashlib.md5( open(gcounties_an, "rb").read() ).hexdigest(),
1265 datetime.date.isoformat(
1266 datetime.datetime.utcfromtimestamp( os.path.getmtime(gcounties_an) )
1269 hashlib.md5( open(gcousubs_an, "rb").read() ).hexdigest(),
1270 datetime.date.isoformat(
1271 datetime.datetime.utcfromtimestamp( os.path.getmtime(gcousubs_an) )
1274 hashlib.md5( open(gplace_an, "rb").read() ).hexdigest(),
1275 datetime.date.isoformat(
1276 datetime.datetime.utcfromtimestamp( os.path.getmtime(gplace_an) )
1279 hashlib.md5( open(gzcta_an, "rb").read() ).hexdigest(),
1280 datetime.date.isoformat(
1281 datetime.datetime.utcfromtimestamp( os.path.getmtime(gzcta_an) )
1284 hashlib.md5( open(cpfzcf_fn, "rb").read() ).hexdigest(),
1285 datetime.date.isoformat(
1286 datetime.datetime.utcfromtimestamp( os.path.getmtime(cpfzcf_fn) )
1289 hashlib.md5( open(nsdcccc_fn, "rb").read() ).hexdigest(),
1290 datetime.date.isoformat(
1291 datetime.datetime.utcfromtimestamp( os.path.getmtime(nsdcccc_fn) )
1294 hashlib.md5( open(ourairports_fn, "rb").read() ).hexdigest(),
1295 datetime.date.isoformat(
1296 datetime.datetime.utcfromtimestamp( os.path.getmtime(ourairports_fn) )
1299 hashlib.md5( open(overrides_fn, "rb").read() ).hexdigest(),
1300 datetime.date.isoformat(
1301 datetime.datetime.utcfromtimestamp( os.path.getmtime(overrides_fn) )
1304 hashlib.md5( open(slist_fn, "rb").read() ).hexdigest(),
1305 datetime.date.isoformat(
1306 datetime.datetime.utcfromtimestamp( os.path.getmtime(slist_fn) )
1309 hashlib.md5( open(zlist_fn, "rb").read() ).hexdigest(),
1310 datetime.date.isoformat(
1311 datetime.datetime.utcfromtimestamp( os.path.getmtime(zlist_fn) )
1320 message = "Reading %s:%s..." % (gcounties_an, gcounties_fn)
1321 sys.stdout.write(message)
1324 gcounties = zipfile.ZipFile(gcounties_an).open(gcounties_fn, "r")
1325 columns = gcounties.readline().decode("utf-8").strip().split("\t")
1326 for line in gcounties:
1327 fields = line.decode("utf-8").strip().split("\t")
1328 f_geoid = fields[ columns.index("GEOID") ].strip()
1329 f_name = fields[ columns.index("NAME") ].strip()
1330 f_usps = fields[ columns.index("USPS") ].strip()
1331 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1332 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1333 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1334 fips = "fips%s" % f_geoid
1335 if fips not in places: places[fips] = {}
1336 places[fips]["centroid"] = gecos(
1337 "%s,%s" % (f_intptlat, f_intptlong)
1339 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1342 print("done (%s lines)." % count)
1343 message = "Reading %s:%s..." % (gcousubs_an, gcousubs_fn)
1344 sys.stdout.write(message)
1347 gcousubs = zipfile.ZipFile(gcousubs_an).open(gcousubs_fn, "r")
1348 columns = gcousubs.readline().decode("utf-8").strip().split("\t")
1349 for line in gcousubs:
1350 fields = line.decode("utf-8").strip().split("\t")
1351 f_geoid = fields[ columns.index("GEOID") ].strip()
1352 f_name = fields[ columns.index("NAME") ].strip()
1353 f_usps = fields[ columns.index("USPS") ].strip()
1354 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1355 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1356 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1357 fips = "fips%s" % f_geoid
1358 if fips not in places: places[fips] = {}
1359 places[fips]["centroid"] = gecos(
1360 "%s,%s" % (f_intptlat, f_intptlong)
1362 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1365 print("done (%s lines)." % count)
1366 message = "Reading %s:%s..." % (gplace_an, gplace_fn)
1367 sys.stdout.write(message)
1370 gplace = zipfile.ZipFile(gplace_an).open(gplace_fn, "r")
1371 columns = gplace.readline().decode("utf-8").strip().split("\t")
1373 fields = line.decode("utf-8").strip().split("\t")
1374 f_geoid = fields[ columns.index("GEOID") ].strip()
1375 f_name = fields[ columns.index("NAME") ].strip()
1376 f_usps = fields[ columns.index("USPS") ].strip()
1377 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1378 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1379 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1380 fips = "fips%s" % f_geoid
1381 if fips not in places: places[fips] = {}
1382 places[fips]["centroid"] = gecos(
1383 "%s,%s" % (f_intptlat, f_intptlong)
1385 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1388 print("done (%s lines)." % count)
1389 message = "Reading %s..." % slist_fn
1390 sys.stdout.write(message)
1393 slist = codecs.open(slist_fn, "rU", "utf-8")
1395 icao = line.split("#")[0].strip()
1398 "metar": "https://tgftp.nws.noaa.gov/data/observations/"\
1399 + "metar/decoded/%s.TXT" % icao.upper()
1403 print("done (%s lines)." % count)
1404 message = "Reading %s..." % nsdcccc_fn
1405 sys.stdout.write(message)
1408 nsdcccc = codecs.open(nsdcccc_fn, "rU", "utf-8")
1409 for line in nsdcccc:
1411 fields = line.split(";")
1412 icao = fields[0].strip().lower()
1413 if icao in stations:
1415 name = " ".join( fields[3].strip().title().split() )
1416 if name: description.append(name)
1417 st = fields[4].strip()
1418 if st: description.append(st)
1419 country = " ".join( fields[5].strip().title().split() )
1420 if country: description.append(country)
1422 stations[icao]["description"] = ", ".join(description)
1423 lat, lon = fields[7:9]
1425 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1426 elif "location" not in stations[icao]:
1427 lat, lon = fields[5:7]
1429 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1432 print("done (%s lines)." % count)
1433 message = "Reading %s..." % ourairports_fn
1434 sys.stdout.write(message)
1437 ourairports = open(ourairports_fn, "rU")
1438 for row in csv.reader(ourairports):
1439 icao = row[12].lower()
1440 if icao in stations:
1441 iata = row[13].lower()
1442 if len(iata) == 3: airports[iata] = { "station": icao }
1443 if "description" not in stations[icao]:
1446 if name: description.append(name)
1447 municipality = row[10]
1448 if municipality: description.append(municipality)
1453 c,r = region.split("-", 1)
1454 if c == country: region = r
1455 description.append(region)
1457 description.append(country)
1459 stations[icao]["description"] = ", ".join(description)
1460 if "location" not in stations[icao]:
1465 stations[icao]["location"] = gecos(
1466 "%s,%s" % (lat, lon)
1470 print("done (%s lines)." % count)
1471 message = "Reading %s..." % zlist_fn
1472 sys.stdout.write(message)
1475 zlist = codecs.open(zlist_fn, "rU", "utf-8")
1477 line = line.split("#")[0].strip()
1482 print("done (%s lines)." % count)
1483 message = "Reading %s..." % cpfzcf_fn
1484 sys.stdout.write(message)
1488 cpfzcf = codecs.open(cpfzcf_fn, "rU", "utf-8")
1490 fields = line.strip().split("|")
1491 if len(fields) == 11 \
1492 and fields[0] and fields[1] and fields[9] and fields[10]:
1493 zone = "z".join( fields[:2] ).lower()
1497 zones[zone]["coastal_flood_statement"] = (
1498 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1499 "flood/coastal/%s/%s.txt" % (state.lower(), zone))
1500 zones[zone]["flash_flood_statement"] = (
1501 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1502 "flash_flood/statement/%s/%s.txt"
1503 % (state.lower(), zone))
1504 zones[zone]["flash_flood_warning"] = (
1505 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1506 "flash_flood/warning/%s/%s.txt"
1507 % (state.lower(), zone))
1508 zones[zone]["flash_flood_watch"] = (
1509 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1510 "flash_flood/watch/%s/%s.txt" % (state.lower(), zone))
1511 zones[zone]["flood_statement"] = (
1512 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1513 "flood/statement/%s/%s.txt" % (state.lower(), zone))
1514 zones[zone]["flood_warning"] = (
1515 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1516 "flood/warning/%s/%s.txt" % (state.lower(), zone))
1517 zones[zone]["severe_thunderstorm_warning"] = (
1518 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1519 "thunderstorm/%s/%s.txt" % (state.lower(), zone))
1520 zones[zone]["severe_weather_statement"] = (
1521 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1522 "severe_weather_stmt/%s/%s.txt"
1523 % (state.lower(), zone))
1524 zones[zone]["short_term_forecast"] = (
1525 "https://tgftp.nws.noaa.gov/data/forecasts/nowcast/"
1526 "%s/%s.txt" % (state.lower(), zone))
1527 zones[zone]["special_weather_statement"] = (
1528 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1529 "special_weather_stmt/%s/%s.txt"
1530 % (state.lower(), zone))
1531 zones[zone]["state_forecast"] = (
1532 "https://tgftp.nws.noaa.gov/data/forecasts/state/"
1533 "%s/%s.txt" % (state.lower(), zone))
1534 zones[zone]["urgent_weather_message"] = (
1535 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1536 "non_precip/%s/%s.txt" % (state.lower(), zone))
1537 zones[zone]["zone_forecast"] = (
1538 "https://tgftp.nws.noaa.gov/data/forecasts/zone/"
1539 "%s/%s.txt" % (state.lower(), zone))
1540 description = fields[3].strip()
1541 fips = "fips%s"%fields[6]
1544 if description.endswith(county):
1545 description += " County"
1547 description += ", %s County" % county
1548 description += ", %s, US" % state
1549 zones[zone]["description"] = description
1550 zones[zone]["centroid"] = gecos( ",".join( fields[9:11] ) )
1551 if fips in places and not zones[zone]["centroid"]:
1552 zones[zone]["centroid"] = places[fips]["centroid"]
1555 print("done (%s lines)." % count)
1556 message = "Reading %s:%s..." % (gzcta_an, gzcta_fn)
1557 sys.stdout.write(message)
1560 gzcta = zipfile.ZipFile(gzcta_an).open(gzcta_fn, "r")
1561 columns = gzcta.readline().decode("utf-8").strip().split("\t")
1563 fields = line.decode("utf-8").strip().split("\t")
1564 f_geoid = fields[ columns.index("GEOID") ].strip()
1565 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1566 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1567 if f_geoid and f_intptlat and f_intptlong:
1568 if f_geoid not in zctas: zctas[f_geoid] = {}
1569 zctas[f_geoid]["centroid"] = gecos(
1570 "%s,%s" % (f_intptlat, f_intptlong)
1574 print("done (%s lines)." % count)
1575 message = "Reading %s..." % overrides_fn
1576 sys.stdout.write(message)
1582 overrides = configparser.ConfigParser()
1583 overrides.readfp( codecs.open(overrides_fn, "r", "utf8") )
1585 for section in overrides.sections():
1588 if section.startswith("-"):
1589 section = section[1:]
1591 else: delete = False
1592 if re.match("[A-Za-z]{3}$", section):
1594 if section in airports:
1595 del( airports[section] )
1596 logact = "removed airport %s" % section
1599 logact = "tried to remove nonexistent airport %s" % section
1601 if section in airports:
1602 logact = "changed airport %s" % section
1605 airports[section] = {}
1606 logact = "added airport %s" % section
1608 for key,value in overrides.items(section):
1609 if key in airports[section]: chgopt += 1
1611 if key in ("centroid", "location"):
1612 airports[section][key] = eval(value)
1614 airports[section][key] = value
1615 if addopt and chgopt:
1616 logact += " (+%s/!%s options)" % (addopt, chgopt)
1617 elif addopt: logact += " (+%s options)" % addopt
1618 elif chgopt: logact += " (!%s options)" % chgopt
1619 elif re.match("[A-Za-z0-9]{4}$", section):
1621 if section in stations:
1622 del( stations[section] )
1623 logact = "removed station %s" % section
1626 logact = "tried to remove nonexistent station %s" % section
1628 if section in stations:
1629 logact = "changed station %s" % section
1632 stations[section] = {}
1633 logact = "added station %s" % section
1635 for key,value in overrides.items(section):
1636 if key in stations[section]: chgopt += 1
1638 if key in ("centroid", "location"):
1639 stations[section][key] = eval(value)
1641 stations[section][key] = value
1642 if addopt and chgopt:
1643 logact += " (+%s/!%s options)" % (addopt, chgopt)
1644 elif addopt: logact += " (+%s options)" % addopt
1645 elif chgopt: logact += " (!%s options)" % chgopt
1646 elif re.match("[0-9]{5}$", section):
1648 if section in zctas:
1649 del( zctas[section] )
1650 logact = "removed zcta %s" % section
1653 logact = "tried to remove nonexistent zcta %s" % section
1655 if section in zctas:
1656 logact = "changed zcta %s" % section
1660 logact = "added zcta %s" % section
1662 for key,value in overrides.items(section):
1663 if key in zctas[section]: chgopt += 1
1665 if key in ("centroid", "location"):
1666 zctas[section][key] = eval(value)
1668 zctas[section][key] = value
1669 if addopt and chgopt:
1670 logact += " (+%s/!%s options)" % (addopt, chgopt)
1671 elif addopt: logact += " (+%s options)" % addopt
1672 elif chgopt: logact += " (!%s options)" % chgopt
1673 elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", section):
1675 if section in zones:
1676 del( zones[section] )
1677 logact = "removed zone %s" % section
1680 logact = "tried to remove nonexistent zone %s" % section
1682 if section in zones:
1683 logact = "changed zone %s" % section
1687 logact = "added zone %s" % section
1689 for key,value in overrides.items(section):
1690 if key in zones[section]: chgopt += 1
1692 if key in ("centroid", "location"):
1693 zones[section][key] = eval(value)
1695 zones[section][key] = value
1696 if addopt and chgopt:
1697 logact += " (+%s/!%s options)" % (addopt, chgopt)
1698 elif addopt: logact += " (+%s options)" % addopt
1699 elif chgopt: logact += " (!%s options)" % chgopt
1700 elif re.match("fips[0-9]+$", section):
1702 if section in places:
1703 del( places[section] )
1704 logact = "removed place %s" % section
1707 logact = "tried to remove nonexistent place %s" % section
1709 if section in places:
1710 logact = "changed place %s" % section
1713 places[section] = {}
1714 logact = "added place %s" % section
1716 for key,value in overrides.items(section):
1717 if key in places[section]: chgopt += 1
1719 if key in ("centroid", "location"):
1720 places[section][key] = eval(value)
1722 places[section][key] = value
1723 if addopt and chgopt:
1724 logact += " (+%s/!%s options)" % (addopt, chgopt)
1725 elif addopt: logact += " (+%s options)" % addopt
1726 elif chgopt: logact += " (!%s options)" % chgopt
1728 overrideslog.append("%s\n" % logact)
1730 if os.path.exists(overrideslog_fn):
1731 os.rename(overrideslog_fn, "%s_old"%overrideslog_fn)
1732 overrideslog_fd = codecs.open(overrideslog_fn, "w", "utf8")
1734 overrideslog_fd.write(
1735 '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
1736 '# use, copy, modify, and distribute this software is granted under terms\n'
1737 '# provided in the LICENSE file distributed with this software.\n\n'
1738 % time.gmtime().tm_year)
1739 overrideslog_fd.writelines(overrideslog)
1740 overrideslog_fd.close()
1741 print("done (%s overridden sections: +%s/-%s/!%s)." % (
1747 estimate = 2*len(places) + len(stations) + 2*len(zctas) + len(zones)
1749 "Correlating places, stations, ZCTAs and zones (upper bound is %s):" % \
1753 milestones = list( range(51) )
1755 sys.stdout.write(message)
1758 centroid = places[fips]["centroid"]
1760 station = closest(centroid, stations, "location", 0.1)
1762 places[fips]["station"] = station
1765 level = int(50*count/estimate)
1766 if level in milestones:
1767 for remaining in milestones[:milestones.index(level)+1]:
1770 sys.stdout.write(message)
1773 message = "%s%%" % (remaining*2,)
1774 sys.stdout.write(message)
1776 milestones.remove(remaining)
1778 zone = closest(centroid, zones, "centroid", 0.1)
1780 places[fips]["zone"] = zone
1783 level = int(50*count/estimate)
1784 if level in milestones:
1785 for remaining in milestones[:milestones.index(level)+1]:
1788 sys.stdout.write(message)
1791 message = "%s%%" % (remaining*2,)
1792 sys.stdout.write(message)
1794 milestones.remove(remaining)
1795 for station in stations:
1796 if "location" in stations[station]:
1797 location = stations[station]["location"]
1799 zone = closest(location, zones, "centroid", 0.1)
1801 stations[station]["zone"] = zone
1804 level = int(50*count/estimate)
1805 if level in milestones:
1806 for remaining in milestones[:milestones.index(level)+1]:
1809 sys.stdout.write(message)
1812 message = "%s%%" % (remaining*2,)
1813 sys.stdout.write(message)
1815 milestones.remove(remaining)
1816 for zcta in zctas.keys():
1817 centroid = zctas[zcta]["centroid"]
1819 station = closest(centroid, stations, "location", 0.1)
1821 zctas[zcta]["station"] = station
1824 level = int(50*count/estimate)
1825 if level in milestones:
1826 for remaining in milestones[ : milestones.index(level)+1 ]:
1829 sys.stdout.write(message)
1832 message = "%s%%" % (remaining*2,)
1833 sys.stdout.write(message)
1835 milestones.remove(remaining)
1837 zone = closest(centroid, zones, "centroid", 0.1)
1839 zctas[zcta]["zone"] = zone
1842 level = int(50*count/estimate)
1843 if level in milestones:
1844 for remaining in milestones[:milestones.index(level)+1]:
1847 sys.stdout.write(message)
1850 message = "%s%%" % (remaining*2,)
1851 sys.stdout.write(message)
1853 milestones.remove(remaining)
1854 for zone in zones.keys():
1855 if "centroid" in zones[zone]:
1856 centroid = zones[zone]["centroid"]
1858 station = closest(centroid, stations, "location", 0.1)
1860 zones[zone]["station"] = station
1863 level = int(50*count/estimate)
1864 if level in milestones:
1865 for remaining in milestones[:milestones.index(level)+1]:
1868 sys.stdout.write(message)
1871 message = "%s%%" % (remaining*2,)
1872 sys.stdout.write(message)
1874 milestones.remove(remaining)
1875 for remaining in milestones:
1878 sys.stdout.write(message)
1881 message = "%s%%" % (remaining*2,)
1882 sys.stdout.write(message)
1884 print("\n done (%s correlations)." % count)
1885 message = "Writing %s..." % airports_fn
1886 sys.stdout.write(message)
1889 if os.path.exists(airports_fn):
1890 os.rename(airports_fn, "%s_old"%airports_fn)
1891 airports_fd = codecs.open(airports_fn, "w", "utf8")
1892 airports_fd.write(header)
1893 for airport in sorted( airports.keys() ):
1894 airports_fd.write("\n\n[%s]" % airport)
1895 for key, value in sorted( airports[airport].items() ):
1896 if type(value) is float: value = "%.7f"%value
1897 elif type(value) is tuple:
1899 for element in value:
1900 if type(element) is float: elements.append("%.7f"%element)
1901 else: elements.append( repr(element) )
1902 value = "(%s)"%", ".join(elements)
1903 airports_fd.write( "\n%s = %s" % (key, value) )
1905 airports_fd.write("\n")
1907 print("done (%s sections)." % count)
1908 message = "Writing %s..." % places_fn
1909 sys.stdout.write(message)
1912 if os.path.exists(places_fn):
1913 os.rename(places_fn, "%s_old"%places_fn)
1914 places_fd = codecs.open(places_fn, "w", "utf8")
1915 places_fd.write(header)
1916 for fips in sorted( places.keys() ):
1917 places_fd.write("\n\n[%s]" % fips)
1918 for key, value in sorted( places[fips].items() ):
1919 if type(value) is float: value = "%.7f"%value
1920 elif type(value) is tuple:
1922 for element in value:
1923 if type(element) is float: elements.append("%.7f"%element)
1924 else: elements.append( repr(element) )
1925 value = "(%s)"%", ".join(elements)
1926 places_fd.write( "\n%s = %s" % (key, value) )
1928 places_fd.write("\n")
1930 print("done (%s sections)." % count)
1931 message = "Writing %s..." % stations_fn
1932 sys.stdout.write(message)
1935 if os.path.exists(stations_fn):
1936 os.rename(stations_fn, "%s_old"%stations_fn)
1937 stations_fd = codecs.open(stations_fn, "w", "utf-8")
1938 stations_fd.write(header)
1939 for station in sorted( stations.keys() ):
1940 stations_fd.write("\n\n[%s]" % station)
1941 for key, value in sorted( stations[station].items() ):
1942 if type(value) is float: value = "%.7f"%value
1943 elif type(value) is tuple:
1945 for element in value:
1946 if type(element) is float: elements.append("%.7f"%element)
1947 else: elements.append( repr(element) )
1948 value = "(%s)"%", ".join(elements)
1949 if type(value) is bytes:
1950 value = value.decode("utf-8")
1951 stations_fd.write( "\n%s = %s" % (key, value) )
1953 stations_fd.write("\n")
1955 print("done (%s sections)." % count)
1956 message = "Writing %s..." % zctas_fn
1957 sys.stdout.write(message)
1960 if os.path.exists(zctas_fn):
1961 os.rename(zctas_fn, "%s_old"%zctas_fn)
1962 zctas_fd = codecs.open(zctas_fn, "w", "utf8")
1963 zctas_fd.write(header)
1964 for zcta in sorted( zctas.keys() ):
1965 zctas_fd.write("\n\n[%s]" % zcta)
1966 for key, value in sorted( zctas[zcta].items() ):
1967 if type(value) is float: value = "%.7f"%value
1968 elif type(value) is tuple:
1970 for element in value:
1971 if type(element) is float: elements.append("%.7f"%element)
1972 else: elements.append( repr(element) )
1973 value = "(%s)"%", ".join(elements)
1974 zctas_fd.write( "\n%s = %s" % (key, value) )
1976 zctas_fd.write("\n")
1978 print("done (%s sections)." % count)
1979 message = "Writing %s..." % zones_fn
1980 sys.stdout.write(message)
1983 if os.path.exists(zones_fn):
1984 os.rename(zones_fn, "%s_old"%zones_fn)
1985 zones_fd = codecs.open(zones_fn, "w", "utf8")
1986 zones_fd.write(header)
1987 for zone in sorted( zones.keys() ):
1988 zones_fd.write("\n\n[%s]" % zone)
1989 for key, value in sorted( zones[zone].items() ):
1990 if type(value) is float: value = "%.7f"%value
1991 elif type(value) is tuple:
1993 for element in value:
1994 if type(element) is float: elements.append("%.7f"%element)
1995 else: elements.append( repr(element) )
1996 value = "(%s)"%", ".join(elements)
1997 zones_fd.write( "\n%s = %s" % (key, value) )
1999 zones_fd.write("\n")
2001 print("done (%s sections)." % count)
2002 message = "Starting QA check..."
2003 sys.stdout.write(message)
2005 airports = configparser.ConfigParser()
2006 airports.read(airports_fn)
2007 places = configparser.ConfigParser()
2008 places.read(places_fn)
2009 stations = configparser.ConfigParser()
2010 stations.read(stations_fn)
2011 zctas = configparser.ConfigParser()
2012 zctas.read(zctas_fn)
2013 zones = configparser.ConfigParser()
2014 zones.read(zones_fn)
2016 places_nocentroid = 0
2017 places_nodescription = 0
2018 for place in sorted( places.sections() ):
2019 if not places.has_option(place, "centroid"):
2020 qalog.append("%s: no centroid\n" % place)
2021 places_nocentroid += 1
2022 if not places.has_option(place, "description"):
2023 qalog.append("%s: no description\n" % place)
2024 places_nodescription += 1
2025 stations_nodescription = 0
2026 stations_nolocation = 0
2027 stations_nometar = 0
2028 for station in sorted( stations.sections() ):
2029 if not stations.has_option(station, "description"):
2030 qalog.append("%s: no description\n" % station)
2031 stations_nodescription += 1
2032 if not stations.has_option(station, "location"):
2033 qalog.append("%s: no location\n" % station)
2034 stations_nolocation += 1
2035 if not stations.has_option(station, "metar"):
2036 qalog.append("%s: no metar\n" % station)
2037 stations_nometar += 1
2038 airports_badstation = 0
2039 airports_nostation = 0
2040 for airport in sorted( airports.sections() ):
2041 if not airports.has_option(airport, "station"):
2042 qalog.append("%s: no station\n" % airport)
2043 airports_nostation += 1
2045 station = airports.get(airport, "station")
2046 if station not in stations.sections():
2047 qalog.append( "%s: bad station %s\n" % (airport, station) )
2048 airports_badstation += 1
2049 zctas_nocentroid = 0
2050 for zcta in sorted( zctas.sections() ):
2051 if not zctas.has_option(zcta, "centroid"):
2052 qalog.append("%s: no centroid\n" % zcta)
2053 zctas_nocentroid += 1
2054 zones_nocentroid = 0
2055 zones_nodescription = 0
2056 zones_noforecast = 0
2057 zones_overlapping = 0
2059 for zone in zones.sections():
2060 if zones.has_option(zone, "centroid"):
2062 "centroid": eval( zones.get(zone, "centroid") )
2064 for zone in sorted( zones.sections() ):
2065 if zones.has_option(zone, "centroid"):
2066 zonetable_local = zonetable.copy()
2067 del( zonetable_local[zone] )
2068 centroid = eval( zones.get(zone, "centroid") )
2070 nearest = closest(centroid, zonetable_local, "centroid", 0.1)
2071 if nearest[1]*radian_to_km < 1:
2072 qalog.append( "%s: within one km of %s\n" % (
2076 zones_overlapping += 1
2078 qalog.append("%s: no centroid\n" % zone)
2079 zones_nocentroid += 1
2080 if not zones.has_option(zone, "description"):
2081 qalog.append("%s: no description\n" % zone)
2082 zones_nodescription += 1
2083 if not zones.has_option(zone, "zone_forecast"):
2084 qalog.append("%s: no forecast\n" % zone)
2085 zones_noforecast += 1
2086 if os.path.exists(qalog_fn):
2087 os.rename(qalog_fn, "%s_old"%qalog_fn)
2088 qalog_fd = codecs.open(qalog_fn, "w", "utf8")
2091 '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
2092 '# use, copy, modify, and distribute this software is granted under terms\n'
2093 '# provided in the LICENSE file distributed with this software.\n\n'
2094 % time.gmtime().tm_year)
2095 qalog_fd.writelines(qalog)
2098 print("issues found (see %s for details):"%qalog_fn)
2099 if airports_badstation:
2100 print(" %s airports with invalid station"%airports_badstation)
2101 if airports_nostation:
2102 print(" %s airports with no station"%airports_nostation)
2103 if places_nocentroid:
2104 print(" %s places with no centroid"%places_nocentroid)
2105 if places_nodescription:
2106 print(" %s places with no description"%places_nodescription)
2107 if stations_nodescription:
2108 print(" %s stations with no description"%stations_nodescription)
2109 if stations_nolocation:
2110 print(" %s stations with no location"%stations_nolocation)
2111 if stations_nometar:
2112 print(" %s stations with no METAR"%stations_nometar)
2113 if zctas_nocentroid:
2114 print(" %s ZCTAs with no centroid"%zctas_nocentroid)
2115 if zones_nocentroid:
2116 print(" %s zones with no centroid"%zones_nocentroid)
2117 if zones_nodescription:
2118 print(" %s zones with no description"%zones_nodescription)
2119 if zones_noforecast:
2120 print(" %s zones with no forecast"%zones_noforecast)
2121 if zones_overlapping:
2122 print(" %s zones within one km of another"%zones_overlapping)
2123 else: print("no issues found.")
2124 print("Indexing complete!")