1 """Contains various object definitions needed by the weather utility."""
3 weather_copyright = """\
4 # Copyright (c) 2006-2016 Jeremy Stanley <fungi@yuggoth.org>. Permission to
5 # use, copy, modify, and distribute this software is granted under terms
6 # provided in the LICENSE file distributed with this software.
9 weather_version = "2.3"
11 radian_to_km = 6372.795484
12 radian_to_mi = 3959.871528
14 def pyversion(ref=None):
15 """Determine the Python version and optionally compare to a reference."""
17 ver = platform.python_version()
20 int(x) for x in ver.split(".")[:2]
22 int(x) for x in ref.split(".")[:2]
27 """An object to contain selection data."""
29 """Store the config, options and arguments."""
30 self.config = get_config()
31 self.options, self.arguments = get_options(self.config)
32 if self.get_bool("cache") and self.get_bool("cache_search") \
33 and not self.get_bool("longlist"):
34 integrate_search_cache(
39 if not self.arguments:
40 if "id" in self.options.__dict__ \
41 and self.options.__dict__["id"]:
42 self.arguments.append( self.options.__dict__["id"] )
43 del( self.options.__dict__["id"] )
45 message = "WARNING: the --id option is deprecated and will eventually be removed\n"
46 sys.stderr.write(message)
47 elif "city" in self.options.__dict__ \
48 and self.options.__dict__["city"] \
49 and "st" in self.options.__dict__ \
50 and self.options.__dict__["st"]:
51 self.arguments.append(
53 self.options.__dict__["city"],
54 self.options.__dict__["st"]
57 del( self.options.__dict__["city"] )
58 del( self.options.__dict__["st"] )
60 message = "WARNING: the --city/--st options are deprecated and will eventually be removed\n"
61 sys.stderr.write(message)
62 def get(self, option, argument=None):
63 """Retrieve data from the config or options."""
65 if self.config.has_section(argument) and (
66 self.config.has_option(argument, "city") \
67 or self.config.has_option(argument, "id") \
68 or self.config.has_option(argument, "st")
70 self.config.remove_section(argument)
72 message = "WARNING: the city/id/st options are now unsupported in aliases\n"
73 sys.stderr.write(message)
74 if not self.config.has_section(argument):
77 path=self.get("setpath"),
78 info=self.get("info"),
80 self.get("cache") and self.get("cache_search")
82 cachedir=self.get("cachedir"),
83 quiet=self.get_bool("quiet")
85 self.config.add_section(argument)
86 for item in guessed.items():
87 self.config.set(argument, *item)
88 if self.config.has_option(argument, option):
89 return self.config.get(argument, option)
90 if option in self.options.__dict__:
91 return self.options.__dict__[option]
94 message = "%s error: no URI defined for %s\n" % (
95 os.path.basename( sys.argv[0] ),
98 sys.stderr.write(message)
100 def get_bool(self, option, argument=None):
101 """Get data and coerce to a boolean if necessary."""
102 return bool(self.get(option, argument))
103 def getint(self, option, argument=None):
104 """Get data and coerce to an integer if necessary."""
105 value = self.get(option, argument)
106 if value: return int(value)
110 """Average a list of coordinates."""
117 return (x/count, y/count)
119 def filter_units(line, units="imperial"):
120 """Filter or convert units in a line of text between US/UK and metric."""
122 # filter lines with both pressures in the form of "X inches (Y hPa)" or
125 "(.* )(\d*(\.\d+)? (inches|in\. Hg)) \((\d*(\.\d+)? hPa)\)(.*)",
129 preamble, in_hg, i_fr, i_un, hpa, h_fr, trailer = dual_p.groups()
130 if units == "imperial": line = preamble + in_hg + trailer
131 elif units == "metric": line = preamble + hpa + trailer
132 # filter lines with both temperatures in the form of "X F (Y C)"
134 "(.* )(-?\d*(\.\d+)? F) \((-?\d*(\.\d+)? C)\)(.*)",
138 preamble, fahrenheit, f_fr, celsius, c_fr, trailer = dual_t.groups()
139 if units == "imperial": line = preamble + fahrenheit + trailer
140 elif units == "metric": line = preamble + celsius + trailer
141 # if metric is desired, convert distances in the form of "X mile(s)" to
143 if units == "metric":
144 imperial_d = re.match(
145 "(.* )(\d+)( mile\(s\))(.*)",
149 preamble, mi, m_u, trailer = imperial_d.groups()
150 line = preamble + str(int(round(int(mi)*1.609344))) \
151 + " kilometer(s)" + trailer
152 # filter speeds in the form of "X MPH (Y KT)" to just "X MPH"; if metric is
153 # desired, convert to "Z KPH"
154 imperial_s = re.match(
155 "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
159 preamble, mph, m_u, kt, trailer = imperial_s.groups()
160 if units == "imperial": line = preamble + mph + m_u + trailer
161 elif units == "metric":
162 line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
164 imperial_s = re.match(
165 "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
169 preamble, mph, m_u, kt, trailer = imperial_s.groups()
170 if units == "imperial": line = preamble + mph + m_u + trailer
171 elif units == "metric":
172 line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
174 # if imperial is desired, qualify given forcast temperatures like "X F"; if
175 # metric is desired, convert to "Y C"
176 imperial_t = re.match(
177 "(.* )(High |high |Low |low )(\d+)(\.|,)(.*)",
181 preamble, parameter, fahrenheit, sep, trailer = imperial_t.groups()
182 if units == "imperial":
183 line = preamble + parameter + fahrenheit + " F" + sep + trailer
184 elif units == "metric":
185 line = preamble + parameter \
186 + str(int(round((int(fahrenheit)-32)*5/9))) + " C" + sep \
188 # hand off the resulting line
198 """Return a string containing the results of a URI GET."""
200 import urllib, urllib.error, urllib.request
201 URLError = urllib.error.URLError
202 urlopen = urllib.request.urlopen
204 import urllib2 as urllib
205 URLError = urllib.URLError
206 urlopen = urllib.urlopen
209 dcachedir = os.path.join( os.path.expanduser(cachedir), "datacache" )
210 if not os.path.exists(dcachedir):
211 try: os.makedirs(dcachedir)
212 except (IOError, OSError): pass
213 dcache_fn = os.path.join(
215 uri.split(":")[1].replace("/","_")
218 if cache_data and os.access(dcache_fn, os.R_OK) \
219 and now-cacheage < os.stat(dcache_fn).st_mtime <= now:
220 dcache_fd = open(dcache_fn)
221 data = dcache_fd.read()
225 if pyversion("3"): data = urlopen(uri).read().decode("utf-8")
226 else: data = urlopen(uri).read()
228 if ignore_fail: return ""
230 import os, sys, traceback
231 message = "%s error: failed to retrieve\n %s\n %s" % (
232 os.path.basename( sys.argv[0] ),
234 traceback.format_exception_only(
239 sys.stderr.write(message)
244 dcache_fd = codecs.open(dcache_fn, "w", "utf-8")
245 dcache_fd.write(data)
247 except (IOError, OSError): pass
261 """Return a summarized METAR for the specified station."""
264 message = "%s error: METAR URI required for conditions\n" % \
265 os.path.basename( sys.argv[0] )
266 sys.stderr.write(message)
270 cache_data=cache_data,
274 if pyversion("3") and type(metar) is bytes: metar = metar.decode("utf-8")
275 if verbose: return metar
278 lines = metar.split("\n")
281 "relative_humidity," \
282 + "precipitation_last_hour," \
283 + "sky conditions," \
289 headerlist = headers.lower().replace("_"," ").split(",")
292 title = "Current conditions at %s"
293 place = lines[0].split(", ")
295 place = "%s, %s" % ( place[0].title(), place[1] )
296 else: place = "<UNKNOWN>"
297 output.append(title%place)
298 output.append("Last updated " + lines[1])
300 for header in headerlist:
302 if line.lower().startswith(header + ":"):
303 if re.match(r".*:\d+$", line): line = line[:line.rfind(":")]
304 if imperial: line = filter_units(line, units="imperial")
305 elif metric: line = filter_units(line, units="metric")
306 if quiet: output.append(line)
307 else: output.append(" " + line)
311 "(no conditions matched your header list, try with --verbose)"
313 return "\n".join(output)
323 """Return alert notice for the specified URI."""
326 message = "%s error: Alert URI required for alerts\n" % \
327 os.path.basename( sys.argv[0] )
328 sys.stderr.write(message)
333 cache_data=cache_data,
337 if pyversion("3") and type(alert) is bytes: alert = alert.decode("utf-8")
339 if verbose: return alert
341 if alert.find("\nNATIONAL WEATHER SERVICE") == -1:
345 lines = alert.split("\n")
347 valid_time = time.strftime("%Y%m%d%H%M")
350 if line.startswith("Expires:") \
351 and "Expires:" + valid_time > line:
353 if muted and line.startswith("NATIONAL WEATHER SERVICE"):
360 if line and not muted:
361 if quiet: output.append(line)
362 else: output.append(" " + line)
363 return "\n".join(output)
365 def get_options(config):
366 """Parse the options passed on the command line."""
368 # for optparse's builtin -h/--help option
370 "usage: %prog [options] [alias1|search1 [alias2|search2 [...]]]"
372 # for optparse's builtin --version option
373 verstring = "%prog " + weather_version
377 option_parser = optparse.OptionParser(usage=usage, version=verstring)
378 # separate options object from list of arguments and return both
380 # the -a/--alert option
381 if config.has_option("default", "alert"):
382 default_alert = bool(config.get("default", "alert"))
383 else: default_alert = False
384 option_parser.add_option("-a", "--alert",
387 default=default_alert,
388 help="include local alert notices")
390 # the --atypes option
391 if config.has_option("default", "atypes"):
392 default_atypes = config.get("default", "atypes")
395 "coastal_flood_statement," \
396 + "flash_flood_statement," \
397 + "flash_flood_warning," \
398 + "flash_flood_watch," \
399 + "flood_statement," \
401 + "marine_weather_statement," \
402 + "river_statement," \
403 + "severe_thunderstorm_warning," \
404 + "severe_weather_statement," \
405 + "short_term_forecast," \
406 + "special_marine_warning," \
407 + "special_weather_statement," \
408 + "tornado_warning," \
409 + "urgent_weather_message"
410 option_parser.add_option("--atypes",
412 default=default_atypes,
413 help="list of alert notification types to display")
415 # the --build-sets option
416 option_parser.add_option("--build-sets",
420 help="(re)build location correlation sets")
422 # the --cacheage option
423 if config.has_option("default", "cacheage"):
424 default_cacheage = config.getint("default", "cacheage")
425 else: default_cacheage = 900
426 option_parser.add_option("--cacheage",
428 default=default_cacheage,
429 help="duration in seconds to refresh cached data")
431 # the --cachedir option
432 if config.has_option("default", "cachedir"):
433 default_cachedir = config.get("default", "cachedir")
434 else: default_cachedir = "~/.weather"
435 option_parser.add_option("--cachedir",
437 default=default_cachedir,
438 help="directory for storing cached searches and data")
440 # the -f/--forecast option
441 if config.has_option("default", "forecast"):
442 default_forecast = bool(config.get("default", "forecast"))
443 else: default_forecast = False
444 option_parser.add_option("-f", "--forecast",
447 default=default_forecast,
448 help="include a local forecast")
450 # the --headers option
451 if config.has_option("default", "headers"):
452 default_headers = config.get("default", "headers")
456 + "relative_humidity," \
461 + "sky_conditions," \
462 + "precipitation_last_hour"
463 option_parser.add_option("--headers",
465 default=default_headers,
466 help="list of conditions headers to display")
468 # the --imperial option
469 if config.has_option("default", "imperial"):
470 default_imperial = bool(config.get("default", "imperial"))
471 else: default_imperial = False
472 option_parser.add_option("--imperial",
475 default=default_imperial,
476 help="filter/convert conditions for US/UK units")
479 option_parser.add_option("--info",
483 help="output detailed information for your search")
485 # the -l/--list option
486 option_parser.add_option("-l", "--list",
490 help="list all configured aliases and cached searches")
492 # the --longlist option
493 option_parser.add_option("--longlist",
497 help="display details of all configured aliases")
499 # the -m/--metric option
500 if config.has_option("default", "metric"):
501 default_metric = bool(config.get("default", "metric"))
502 else: default_metric = False
503 option_parser.add_option("-m", "--metric",
506 default=default_metric,
507 help="filter/convert conditions for metric units")
509 # the -n/--no-conditions option
510 if config.has_option("default", "conditions"):
511 default_conditions = bool(config.get("default", "conditions"))
512 else: default_conditions = True
513 option_parser.add_option("-n", "--no-conditions",
515 action="store_false",
516 default=default_conditions,
517 help="disable output of current conditions")
519 # the --no-cache option
520 if config.has_option("default", "cache"):
521 default_cache = bool(config.get("default", "cache"))
522 else: default_cache = True
523 option_parser.add_option("--no-cache",
525 action="store_false",
527 help="disable all caching (searches and data)")
529 # the --no-cache-data option
530 if config.has_option("default", "cache_data"):
531 default_cache_data = bool(config.get("default", "cache_data"))
532 else: default_cache_data = True
533 option_parser.add_option("--no-cache-data",
535 action="store_false",
537 help="disable retrieved data caching")
539 # the --no-cache-search option
540 if config.has_option("default", "cache_search"):
541 default_cache_search = bool(config.get("default", "cache_search"))
542 else: default_cache_search = True
543 option_parser.add_option("--no-cache-search",
545 action="store_false",
547 help="disable search result caching")
549 # the -q/--quiet option
550 if config.has_option("default", "quiet"):
551 default_quiet = bool(config.get("default", "quiet"))
552 else: default_quiet = False
553 option_parser.add_option("-q", "--quiet",
556 default=default_quiet,
557 help="skip preambles and don't indent")
559 # the --setpath option
560 if config.has_option("default", "setpath"):
561 default_setpath = config.get("default", "setpath")
562 else: default_setpath = ".:~/.weather"
563 option_parser.add_option("--setpath",
565 default=default_setpath,
566 help="directory search path for correlation sets")
568 # the -v/--verbose option
569 if config.has_option("default", "verbose"):
570 default_verbose = bool(config.get("default", "verbose"))
571 else: default_verbose = False
572 option_parser.add_option("-v", "--verbose",
575 default=default_verbose,
576 help="show full decoded feeds")
579 if config.has_option("default", "city"):
580 default_city = config.get("default", "city")
581 else: default_city = ""
582 option_parser.add_option("-c", "--city",
584 default=default_city,
585 help=optparse.SUPPRESS_HELP)
586 if config.has_option("default", "id"):
587 default_id = config.get("default", "id")
588 else: default_id = ""
589 option_parser.add_option("-i", "--id",
592 help=optparse.SUPPRESS_HELP)
593 if config.has_option("default", "st"):
594 default_st = config.get("default", "st")
595 else: default_st = ""
596 option_parser.add_option("-s", "--st",
599 help=optparse.SUPPRESS_HELP)
601 options, arguments = option_parser.parse_args()
602 return options, arguments
605 """Parse the aliases and configuration."""
606 if pyversion("3"): import configparser
607 else: import ConfigParser as configparser
608 config = configparser.ConfigParser()
612 "/etc/weather/weatherrc",
613 os.path.expanduser("~/.weather/weatherrc"),
614 os.path.expanduser("~/.weatherrc"),
617 for rcfile in rcfiles:
618 if os.access(rcfile, os.R_OK): config.read(rcfile)
619 for section in config.sections():
620 if section != section.lower():
621 if config.has_section(section.lower()):
622 config.remove_section(section.lower())
623 config.add_section(section.lower())
624 for option,value in config.items(section):
625 config.set(section.lower(), option, value)
628 def integrate_search_cache(config, cachedir, setpath):
629 """Add cached search results into the configuration."""
630 if pyversion("3"): import configparser
631 else: import ConfigParser as configparser
633 scache_fn = os.path.join( os.path.expanduser(cachedir), "searches" )
634 if not os.access(scache_fn, os.R_OK): return config
635 scache_fd = open(scache_fn)
636 created = float( scache_fd.readline().split(":")[1].strip().split()[0] )
639 datafiles = data_index(setpath)
641 data_freshness = sorted(
642 [ x[1] for x in datafiles.values() ],
645 else: data_freshness = now
646 if created < data_freshness <= now:
649 print( "[clearing outdated %s]" % scache_fn )
650 except (IOError, OSError):
653 scache = configparser.ConfigParser()
654 scache.read(scache_fn)
655 for section in scache.sections():
656 if not config.has_section(section):
657 config.add_section(section)
658 for option,value in scache.items(section):
659 config.set(section, option, value)
662 def list_aliases(config, detail=False):
663 """Return a formatted list of aliases defined in the config."""
665 output = "\n# configured alias details..."
666 for section in sorted(config.sections()):
667 output += "\n\n[%s]" % section
668 for item in sorted(config.items(section)):
669 output += "\n%s = %s" % item
672 output = "configured aliases and cached searches..."
673 for section in sorted(config.sections()):
674 if config.has_option(section, "description"):
675 description = config.get(section, "description")
676 else: description = "(no description provided)"
677 output += "\n %s: %s" % (section, description)
680 def data_index(path):
683 for filename in ("airports", "places", "stations", "zctas", "zones"):
684 for dirname in path.split(":"):
685 for extension in ("", ".gz", ".txt"):
686 candidate = os.path.expanduser(
687 os.path.join( dirname, "".join( (filename, extension) ) )
689 if os.path.exists(candidate):
690 datafiles[filename] = (
692 os.stat(candidate).st_mtime
695 if filename in datafiles:
709 """Find URIs using airport, gecos, placename, station, ZCTA/ZIP, zone."""
710 import codecs, datetime, time, os, re, sys
711 if pyversion("3"): import configparser
712 else: import ConfigParser as configparser
713 datafiles = data_index(path)
714 if re.match("[A-Za-z]{3}$", expression): searchtype = "airport"
715 elif re.match("[A-Za-z0-9]{4}$", expression): searchtype = "station"
716 elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", expression): searchtype = "zone"
717 elif re.match("[0-9]{5}$", expression): searchtype = "ZCTA"
719 r"[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?, *[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?$",
722 searchtype = "coordinates"
723 elif re.match(r"(FIPS|fips)\d+$", expression): searchtype = "FIPS"
727 if cache_search: action = "caching"
728 else: action = "using"
737 (0.995, "excellent"),
740 if not quiet: print("Searching via %s..."%searchtype)
741 stations = configparser.ConfigParser()
742 dataname = "stations"
743 if dataname in datafiles:
744 datafile = datafiles[dataname][0]
745 if datafile.endswith(".gz"):
748 stations.read_string(
749 gzip.open(datafile).read().decode("utf-8") )
750 else: stations.readfp( gzip.open(datafile) )
752 stations.read(datafile)
754 message = "%s error: can't find \"%s\" data file\n" % (
755 os.path.basename( sys.argv[0] ),
758 sys.stderr.write(message)
760 zones = configparser.ConfigParser()
762 if dataname in datafiles:
763 datafile = datafiles[dataname][0]
764 if datafile.endswith(".gz"):
767 zones.read_string( gzip.open(datafile).read().decode("utf-8") )
768 else: zones.readfp( gzip.open(datafile) )
772 message = "%s error: can't find \"%s\" data file\n" % (
773 os.path.basename( sys.argv[0] ),
776 sys.stderr.write(message)
784 if searchtype == "airport":
785 expression = expression.lower()
786 airports = configparser.ConfigParser()
787 dataname = "airports"
788 if dataname in datafiles:
789 datafile = datafiles[dataname][0]
790 if datafile.endswith(".gz"):
793 airports.read_string(
794 gzip.open(datafile).read().decode("utf-8") )
795 else: airports.readfp( gzip.open(datafile) )
797 airports.read(datafile)
799 message = "%s error: can't find \"%s\" data file\n" % (
800 os.path.basename( sys.argv[0] ),
803 sys.stderr.write(message)
805 if airports.has_section(expression) \
806 and airports.has_option(expression, "station"):
807 search = (expression, "IATA/FAA airport code %s" % expression)
808 station = ( airports.get(expression, "station"), 0 )
809 if stations.has_option(station[0], "zone"):
810 zone = eval( stations.get(station[0], "zone") )
812 if not ( info or quiet ) \
813 and stations.has_option( station[0], "description" ):
817 stations.get(station[0], "description")
821 message = "No IATA/FAA airport code \"%s\" in the %s file.\n" % (
823 datafiles["airports"][0]
825 sys.stderr.write(message)
827 elif searchtype == "station":
828 expression = expression.lower()
829 if stations.has_section(expression):
830 station = (expression, 0)
832 search = (expression, "ICAO station code %s" % expression)
833 if stations.has_option(expression, "zone"):
834 zone = eval( stations.get(expression, "zone") )
836 if not ( info or quiet ) \
837 and stations.has_option(expression, "description"):
841 stations.get(expression, "description")
845 message = "No ICAO weather station \"%s\" in the %s file.\n" % (
847 datafiles["stations"][0]
849 sys.stderr.write(message)
851 elif searchtype == "zone":
852 expression = expression.lower()
853 if zones.has_section(expression) \
854 and zones.has_option(expression, "station"):
855 zone = (expression, 0)
856 station = eval( zones.get(expression, "station") )
858 search = (expression, "NWS/NOAA weather zone %s" % expression)
859 if not ( info or quiet ) \
860 and zones.has_option(expression, "description"):
864 zones.get(expression, "description")
868 message = "No usable NWS weather zone \"%s\" in the %s file.\n" % (
870 datafiles["zones"][0]
872 sys.stderr.write(message)
874 elif searchtype == "ZCTA":
875 zctas = configparser.ConfigParser()
877 if dataname in datafiles:
878 datafile = datafiles[dataname][0]
879 if datafile.endswith(".gz"):
883 gzip.open(datafile).read().decode("utf-8") )
884 else: zctas.readfp( gzip.open(datafile) )
888 message = "%s error: can't find \"%s\" data file\n" % (
889 os.path.basename( sys.argv[0] ),
892 sys.stderr.write(message)
895 if zctas.has_section(expression) \
896 and zctas.has_option(expression, "station"):
897 station = eval( zctas.get(expression, "station") )
898 search = (expression, "Census ZCTA (ZIP code) %s" % expression)
899 if zctas.has_option(expression, "zone"):
900 zone = eval( zctas.get(expression, "zone") )
902 message = "No census ZCTA (ZIP code) \"%s\" in the %s file.\n" % (
904 datafiles["zctas"][0]
906 sys.stderr.write(message)
908 elif searchtype == "coordinates":
909 search = (expression, "Geographic coordinates %s" % expression)
911 for station in stations.sections():
912 if stations.has_option(station, "location"):
913 stationtable[station] = {
914 "location": eval( stations.get(station, "location") )
916 station = closest( gecos(expression), stationtable, "location", 0.1 )
918 message = "No ICAO weather station found near %s.\n" % expression
919 sys.stderr.write(message)
922 for zone in zones.sections():
923 if zones.has_option(zone, "centroid"):
925 "centroid": eval( zones.get(zone, "centroid") )
927 zone = closest( gecos(expression), zonetable, "centroid", 0.1 )
929 message = "No NWS weather zone near %s; forecasts unavailable.\n" \
931 sys.stderr.write(message)
932 elif searchtype in ("FIPS", "name"):
933 places = configparser.ConfigParser()
935 if dataname in datafiles:
936 datafile = datafiles[dataname][0]
937 if datafile.endswith(".gz"):
941 gzip.open(datafile).read().decode("utf-8") )
942 else: places.readfp( gzip.open(datafile) )
944 places.read(datafile)
946 message = "%s error: can't find \"%s\" data file\n" % (
947 os.path.basename( sys.argv[0] ),
950 sys.stderr.write(message)
953 place = expression.lower()
954 if places.has_section(place) and places.has_option(place, "station"):
955 station = eval( places.get(place, "station") )
956 search = (expression, "Census Place %s" % expression)
957 if places.has_option(place, "description"):
960 search[1] + ", %s" % places.get(place, "description")
962 if places.has_option(place, "zone"):
963 zone = eval( places.get(place, "zone") )
964 if not ( info or quiet ) \
965 and places.has_option(place, "description"):
969 places.get(place, "description")
973 for place in places.sections():
974 if places.has_option(place, "description") \
975 and places.has_option(place, "station") \
978 places.get(place, "description"),
981 possibilities.append(place)
982 for place in stations.sections():
983 if stations.has_option(place, "description") \
986 stations.get(place, "description"),
989 possibilities.append(place)
990 for place in zones.sections():
991 if zones.has_option(place, "description") \
992 and zones.has_option(place, "station") \
995 zones.get(place, "description"),
998 possibilities.append(place)
999 if len(possibilities) == 1:
1000 place = possibilities[0]
1001 if places.has_section(place):
1002 station = eval( places.get(place, "station") )
1003 description = places.get(place, "description")
1004 if places.has_option(place, "zone"):
1005 zone = eval( places.get(place, "zone" ) )
1006 search = ( expression, "%s: %s" % (place, description) )
1007 elif stations.has_section(place):
1008 station = (place, 0.0)
1009 description = stations.get(place, "description")
1010 if stations.has_option(place, "zone"):
1011 zone = eval( stations.get(place, "zone" ) )
1012 search = ( expression, "ICAO station code %s" % place )
1013 elif zones.has_section(place):
1014 station = eval( zones.get(place, "station") )
1015 description = zones.get(place, "description")
1017 search = ( expression, "NWS/NOAA weather zone %s" % place )
1018 if not ( info or quiet ):
1019 print( "[%s result %s]" % (action, description) )
1020 if not possibilities and not station[0]:
1021 message = "No FIPS code/census area match in the %s file.\n" % (
1022 datafiles["places"][0]
1024 sys.stderr.write(message)
1027 uris["metar"] = stations.get( station[0], "metar" )
1029 for key,value in zones.items( zone[0] ):
1030 if key not in ("centroid", "description", "station"):
1033 count = len(possibilities)
1034 if count <= max_results:
1035 print( "Your search is ambiguous, returning %s matches:" % count )
1036 for place in sorted(possibilities):
1037 if places.has_section(place):
1041 places.get(place, "description")
1044 elif stations.has_section(place):
1048 stations.get(place, "description")
1051 elif zones.has_section(place):
1055 zones.get(place, "description")
1060 "Your search is too ambiguous, returning %s matches." % count
1067 for section in dataset.sections():
1068 if dataset.has_option(section, "station"):
1070 eval( dataset.get(section, "station") )[1]
1072 if dataset.has_option(section, "zone"):
1073 zonelist.append( eval( dataset.get(section, "zone") )[1] )
1076 scount = len(stationlist)
1077 zcount = len(zonelist)
1080 for score in scores:
1082 sranks.append( stationlist[ int( (1-score[0]) * scount ) ] )
1084 zranks.append( zonelist[ int( (1-score[0]) * zcount ) ] )
1085 description = search[1]
1086 uris["description"] = description
1088 "%s\n%s" % ( description, "-" * len(description) )
1093 stations.get( station[0], "description" )
1096 km = radian_to_km*station[1]
1097 mi = radian_to_mi*station[1]
1098 if sranks and not description.startswith("ICAO station code "):
1099 for index in range(0, len(scores)):
1100 if station[1] >= sranks[index]:
1101 score = scores[index][1]
1104 " (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1106 elif searchtype is "coordinates":
1107 print( " (%.3gkm, %.3gmi)" % (km, mi) )
1110 "%s: %s" % ( zone[0], zones.get( zone[0], "description" ) )
1112 km = radian_to_km*zone[1]
1113 mi = radian_to_mi*zone[1]
1114 if zranks and not description.startswith("NWS/NOAA weather zone "):
1115 for index in range(0, len(scores)):
1116 if zone[1] >= zranks[index]:
1117 score = scores[index][1]
1120 " (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1122 elif searchtype is "coordinates" and zone[0]:
1123 print( " (%.3gkm, %.3gmi)" % (km, mi) )
1126 nowstamp = "%s (%s)" % (
1128 datetime.datetime.isoformat(
1129 datetime.datetime.fromtimestamp(now),
1133 search_cache = ["\n"]
1134 search_cache.append( "[%s]\n" % search[0] )
1135 search_cache.append( "description = cached %s\n" % nowstamp )
1136 for uriname in sorted(uris.keys()):
1137 search_cache.append( "%s = %s\n" % ( uriname, uris[uriname] ) )
1138 real_cachedir = os.path.expanduser(cachedir)
1139 if not os.path.exists(real_cachedir):
1140 try: os.makedirs(real_cachedir)
1141 except (IOError, OSError): pass
1142 scache_fn = os.path.join(real_cachedir, "searches")
1143 if not os.path.exists(scache_fn):
1145 [ x[1] for x in datafiles.values() ],
1148 thenstamp = "%s (%s)" % (
1150 datetime.datetime.isoformat(
1151 datetime.datetime.fromtimestamp(then),
1155 search_cache.insert(
1157 "# based on data files from: %s\n" % thenstamp
1160 scache_existing = configparser.ConfigParser()
1161 scache_existing.read(scache_fn)
1162 if not scache_existing.has_section(search[0]):
1163 scache_fd = codecs.open(scache_fn, "a", "utf-8")
1164 scache_fd.writelines(search_cache)
1166 except (IOError, OSError): pass
1170 def closest(position, nodes, fieldname, angle=None):
1172 if not angle: angle = 2*math.pi
1175 if fieldname in nodes[name]:
1176 node = nodes[name][fieldname]
1177 if node and abs( position[0]-node[0] ) < angle:
1178 if abs( position[1]-node[1] ) < angle \
1179 or abs( abs( position[1]-node[1] ) - 2*math.pi ) < angle:
1180 if position == node:
1184 candidate = math.acos(
1185 math.sin( position[0] ) * math.sin( node[0] ) \
1186 + math.cos( position[0] ) \
1187 * math.cos( node[0] ) \
1188 * math.cos( position[1] - node[1] )
1190 if candidate < angle:
1193 if match: match = str(match)
1194 return (match, angle)
1196 def gecos(formatted):
1198 coordinates = formatted.split(",")
1199 for coordinate in range(0, 2):
1200 degrees, foo, minutes, bar, seconds, hemisphere = re.match(
1201 r"([\+-]?\d+\.?\d*)(-(\d+))?(-(\d+))?([ensw]?)$",
1202 coordinates[coordinate].strip().lower()
1204 value = float(degrees)
1205 if minutes: value += float(minutes)/60
1206 if seconds: value += float(seconds)/3600
1207 if hemisphere and hemisphere in "sw": value *= -1
1208 coordinates[coordinate] = math.radians(value)
1209 return tuple(coordinates)
1212 import codecs, datetime, hashlib, os, re, sys, tarfile, time, zipfile
1213 if pyversion("3"): import configparser
1214 else: import ConfigParser as configparser
1215 gcounties_an = "2015_Gaz_counties_national.zip"
1216 gcounties_fn = "2015_Gaz_counties_national.txt"
1217 gcousubs_an = "2015_Gaz_cousubs_national.zip"
1218 gcousubs_fn = "2015_Gaz_cousubs_national.txt"
1219 gplace_an = "2015_Gaz_place_national.zip"
1220 gplace_fn = "2015_Gaz_place_national.txt"
1221 gzcta_an = "2015_Gaz_zcta_national.zip"
1222 gzcta_fn = "2015_Gaz_zcta_national.txt"
1223 for filename in os.listdir("."):
1224 if re.match("bp[0-9][0-9][a-z][a-z][0-9][0-9].dbx$", filename):
1225 cpfzcf_fn = filename
1227 nsdcccc_fn = "nsd_cccc.txt"
1228 zcatalog_an = "zonecatalog.curr.tar"
1229 metartbl_fn = "metar.tbl"
1230 coopstn_fn = "coop-stations.txt"
1231 overrides_fn = "overrides.conf"
1232 overrideslog_fn = "overrides.log"
1236 airports_fn = "airports"
1237 places_fn = "places"
1238 stations_fn = "stations"
1243 # generated by %s on %s from these public domain sources:
1245 # http://www.census.gov/geo/maps-data/data/gazetteer2015.html
1251 # http://www.weather.gov/geodata/catalog/wsom/html/cntyzone.htm
1254 # http://tgftp.nws.noaa.gov/data/nsd_cccc.txt
1257 # http://tgftp.nws.noaa.gov/data/zonecatalog.curr.tar
1260 # http://www.nco.ncep.noaa.gov/pmb/codes/nwprod/dictionaries/metar.tbl
1263 # http://www.ncdc.noaa.gov/homr/reports
1266 # ...and these manually-generated or hand-compiled adjustments:
1272 os.path.basename( sys.argv[0] ),
1273 datetime.date.isoformat(
1274 datetime.datetime.fromtimestamp( time.time() )
1276 hashlib.md5( open(gcounties_an, "rb").read() ).hexdigest(),
1277 datetime.date.isoformat(
1278 datetime.datetime.fromtimestamp( os.path.getmtime(gcounties_an) )
1281 hashlib.md5( open(gcousubs_an, "rb").read() ).hexdigest(),
1282 datetime.date.isoformat(
1283 datetime.datetime.fromtimestamp( os.path.getmtime(gcousubs_an) )
1286 hashlib.md5( open(gplace_an, "rb").read() ).hexdigest(),
1287 datetime.date.isoformat(
1288 datetime.datetime.fromtimestamp( os.path.getmtime(gplace_an) )
1291 hashlib.md5( open(gzcta_an, "rb").read() ).hexdigest(),
1292 datetime.date.isoformat(
1293 datetime.datetime.fromtimestamp( os.path.getmtime(gzcta_an) )
1296 hashlib.md5( open(cpfzcf_fn, "rb").read() ).hexdigest(),
1297 datetime.date.isoformat(
1298 datetime.datetime.fromtimestamp( os.path.getmtime(cpfzcf_fn) )
1301 hashlib.md5( open(nsdcccc_fn, "rb").read() ).hexdigest(),
1302 datetime.date.isoformat(
1303 datetime.datetime.fromtimestamp( os.path.getmtime(nsdcccc_fn) )
1306 hashlib.md5( open(zcatalog_an, "rb").read() ).hexdigest(),
1307 datetime.date.isoformat(
1308 datetime.datetime.fromtimestamp( os.path.getmtime(zcatalog_an) )
1311 hashlib.md5( open(metartbl_fn, "rb").read() ).hexdigest(),
1312 datetime.date.isoformat(
1313 datetime.datetime.fromtimestamp( os.path.getmtime(metartbl_fn) )
1316 hashlib.md5( open(coopstn_fn, "rb").read() ).hexdigest(),
1317 datetime.date.isoformat(
1318 datetime.datetime.fromtimestamp( os.path.getmtime(coopstn_fn) )
1321 hashlib.md5( open(overrides_fn, "rb").read() ).hexdigest(),
1322 datetime.date.isoformat(
1323 datetime.datetime.fromtimestamp( os.path.getmtime(overrides_fn) )
1326 hashlib.md5( open(slist_fn, "rb").read() ).hexdigest(),
1327 datetime.date.isoformat(
1328 datetime.datetime.fromtimestamp( os.path.getmtime(slist_fn) )
1331 hashlib.md5( open(zlist_fn, "rb").read() ).hexdigest(),
1332 datetime.date.isoformat(
1333 datetime.datetime.fromtimestamp( os.path.getmtime(zlist_fn) )
1342 message = "Reading %s:%s..." % (gcounties_an, gcounties_fn)
1343 sys.stdout.write(message)
1346 gcounties = zipfile.ZipFile(gcounties_an).open(gcounties_fn, "rU")
1347 columns = gcounties.readline().decode("latin1").strip().split("\t")
1348 for line in gcounties:
1349 fields = line.decode("latin1").strip().split("\t")
1350 f_geoid = fields[ columns.index("GEOID") ].strip()
1351 f_name = fields[ columns.index("NAME") ].strip()
1352 f_usps = fields[ columns.index("USPS") ].strip()
1353 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1354 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1355 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1356 fips = "fips%s" % f_geoid
1357 if fips not in places: places[fips] = {}
1358 places[fips]["centroid"] = gecos(
1359 "%s,%s" % (f_intptlat, f_intptlong)
1361 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1364 print("done (%s lines)." % count)
1365 message = "Reading %s:%s..." % (gcousubs_an, gcousubs_fn)
1366 sys.stdout.write(message)
1369 gcousubs = zipfile.ZipFile(gcousubs_an).open(gcousubs_fn, "rU")
1370 columns = gcousubs.readline().decode("latin1").strip().split("\t")
1371 for line in gcousubs:
1372 fields = line.decode("latin1").strip().split("\t")
1373 f_geoid = fields[ columns.index("GEOID") ].strip()
1374 f_name = fields[ columns.index("NAME") ].strip()
1375 f_usps = fields[ columns.index("USPS") ].strip()
1376 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1377 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1378 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1379 fips = "fips%s" % f_geoid
1380 if fips not in places: places[fips] = {}
1381 places[fips]["centroid"] = gecos(
1382 "%s,%s" % (f_intptlat, f_intptlong)
1384 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1387 print("done (%s lines)." % count)
1388 message = "Reading %s:%s..." % (gplace_an, gplace_fn)
1389 sys.stdout.write(message)
1392 gplace = zipfile.ZipFile(gplace_an).open(gplace_fn, "rU")
1393 columns = gplace.readline().decode("latin1").strip().split("\t")
1395 fields = line.decode("latin1").strip().split("\t")
1396 f_geoid = fields[ columns.index("GEOID") ].strip()
1397 f_name = fields[ columns.index("NAME") ].strip()
1398 f_usps = fields[ columns.index("USPS") ].strip()
1399 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1400 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1401 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1402 fips = "fips%s" % f_geoid
1403 if fips not in places: places[fips] = {}
1404 places[fips]["centroid"] = gecos(
1405 "%s,%s" % (f_intptlat, f_intptlong)
1407 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1410 print("done (%s lines)." % count)
1411 message = "Reading %s..." % slist_fn
1412 sys.stdout.write(message)
1415 slist = codecs.open(slist_fn, "rU")
1417 icao = line.split("#")[0].strip()
1420 "metar": "http://tgftp.nws.noaa.gov/data/observations/"\
1421 + "metar/decoded/%s.TXT" % icao.upper()
1425 print("done (%s lines)." % count)
1426 message = "Reading %s..." % metartbl_fn
1427 sys.stdout.write(message)
1430 metartbl = codecs.open(metartbl_fn, "rU")
1431 for line in metartbl:
1432 icao = line[:4].strip().lower()
1433 if icao in stations:
1436 line[16:48].replace("_", " ").strip().title().split()
1438 if name: description.append(name)
1439 st = line[49:51].strip()
1440 if st: description.append(st)
1441 cn = line[52:54].strip()
1442 if cn: description.append(cn)
1444 stations[icao]["description"] = ", ".join(description)
1445 lat = line[55:60].strip()
1447 lat = int(lat)/100.0
1448 lon = line[61:67].strip()
1450 lon = int(lon)/100.0
1451 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1454 print("done (%s lines)." % count)
1455 message = "Reading %s..." % nsdcccc_fn
1456 sys.stdout.write(message)
1459 nsdcccc = codecs.open(nsdcccc_fn, "rU", "latin1")
1460 for line in nsdcccc:
1462 fields = line.split(";")
1463 icao = fields[0].strip().lower()
1464 if icao in stations:
1466 name = " ".join( fields[3].strip().title().split() )
1467 if name: description.append(name)
1468 st = fields[4].strip()
1469 if st: description.append(st)
1470 country = " ".join( fields[5].strip().title().split() )
1471 if country: description.append(country)
1473 stations[icao]["description"] = ", ".join(description)
1474 lat, lon = fields[7:9]
1476 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1477 elif "location" not in stations[icao]:
1478 lat, lon = fields[5:7]
1480 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1483 print("done (%s lines)." % count)
1484 message = "Reading %s..." % coopstn_fn
1485 sys.stdout.write(message)
1488 coopstn = open(coopstn_fn)
1489 for line in coopstn:
1490 icao = line[33:37].strip().lower()
1491 if icao in stations:
1492 iata = line[22:26].strip().lower()
1493 if len(iata) == 3: airports[iata] = { "station": icao }
1494 if "description" not in stations[icao]:
1496 name = " ".join( line[99:129].strip().title().split() )
1497 if name: description.append(name)
1498 st = line[59:61].strip()
1499 if st: description.append(st)
1500 country = " ".join( line[38:58].strip().title().split() )
1501 if country: description.append(country)
1503 stations[icao]["description"] = ", ".join(description)
1504 if "location" not in stations[icao]:
1505 lat = line[130:139].strip()
1507 lat = lat.replace(" ", "-")
1508 lon = line[140:150].strip()
1510 lon = lon.replace(" ", "-")
1511 stations[icao]["location"] = gecos(
1512 "%s,%s" % (lat, lon)
1516 print("done (%s lines)." % count)
1517 message = "Reading %s..." % zlist_fn
1518 sys.stdout.write(message)
1521 zlist = codecs.open(zlist_fn, "rU")
1523 line = line.split("#")[0].strip()
1528 print("done (%s lines)." % count)
1529 message = "Reading %s:*..." % zcatalog_an
1530 sys.stdout.write(message)
1533 zcatalog = tarfile.open(zcatalog_an)
1534 for entry in zcatalog.getmembers():
1537 r"([a-z]+z[0-9]+)\.txt$",
1538 os.path.basename(entry.name)
1541 zone = fnmatch.group(1)
1543 data = zcatalog.extractfile(entry).readlines()
1544 description = data[0].decode("ascii").strip()
1545 zones[zone]["description"] = description
1546 for line in data[1:]:
1547 line = line.decode("latin1").strip()
1548 urimatch = re.match("/webdocs/pub/(.+):(.+) for ",
1551 uritype = urimatch.group(2).lower().replace(" ","_")
1552 zones[zone][uritype] = (
1553 "http://tgftp.nws.noaa.gov/%s"
1554 % urimatch.group(1))
1557 print("done (%s files)." % count)
1558 message = "Reading %s..." % cpfzcf_fn
1559 sys.stdout.write(message)
1563 cpfzcf = open(cpfzcf_fn)
1565 fields = line.strip().split("|")
1566 if len(fields) == 11 \
1567 and fields[0] and fields[1] and fields[9] and fields[10]:
1568 zone = "z".join( fields[:2] ).lower()
1570 zones[zone]["centroid"] = gecos( ",".join( fields[9:11] ) )
1573 description = fields[3]
1575 fips = "fips%s"%fields[6]
1577 "%s, %s" % (county, state),
1578 "%s County, %s" % (county, state),
1580 if description.endswith(" Counties"):
1581 description = description[:-9]
1582 for addition in description.split(" and "):
1583 possible.append( "%s, %s" % (addition, state) )
1584 possible.append( "%s County, %s" % (addition, state) )
1585 if fips in places and "centroid" in places[fips]:
1586 for candidate in zones:
1587 if "centroid" not in zones[candidate] and \
1588 "description" in zones[candidate] and \
1589 zones[candidate]["description"] in possible:
1590 zones[candidate]["centroid"] = \
1591 places[fips]["centroid"]
1594 print("done (%s lines)." % count)
1595 message = "Reading %s:%s..." % (gzcta_an, gzcta_fn)
1596 sys.stdout.write(message)
1599 gzcta = zipfile.ZipFile(gzcta_an).open(gzcta_fn, "rU")
1600 columns = gzcta.readline().decode("latin1").strip().split("\t")
1602 fields = line.decode("latin1").strip().split("\t")
1603 f_geoid = fields[ columns.index("GEOID") ].strip()
1604 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1605 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1606 if f_geoid and f_intptlat and f_intptlong:
1607 if f_geoid not in zctas: zctas[f_geoid] = {}
1608 zctas[f_geoid]["centroid"] = gecos(
1609 "%s,%s" % (f_intptlat, f_intptlong)
1613 print("done (%s lines)." % count)
1614 message = "Reading %s..." % overrides_fn
1615 sys.stdout.write(message)
1621 overrides = configparser.ConfigParser()
1622 overrides.readfp( codecs.open(overrides_fn, "r", "utf8") )
1624 for section in overrides.sections():
1627 if section.startswith("-"):
1628 section = section[1:]
1630 else: delete = False
1631 if re.match("[A-Za-z]{3}$", section):
1633 if section in airports:
1634 del( airports[section] )
1635 logact = "removed airport %s" % section
1638 logact = "tried to remove nonexistent airport %s" % section
1640 if section in airports:
1641 logact = "changed airport %s" % section
1644 airports[section] = {}
1645 logact = "added airport %s" % section
1647 for key,value in overrides.items(section):
1648 if key in airports[section]: chgopt += 1
1650 if key in ("centroid", "location"):
1651 airports[section][key] = eval(value)
1653 airports[section][key] = value
1654 if addopt and chgopt:
1655 logact += " (+%s/!%s options)" % (addopt, chgopt)
1656 elif addopt: logact += " (+%s options)" % addopt
1657 elif chgopt: logact += " (!%s options)" % chgopt
1658 elif re.match("[A-Za-z0-9]{4}$", section):
1660 if section in stations:
1661 del( stations[section] )
1662 logact = "removed station %s" % section
1665 logact = "tried to remove nonexistent station %s" % section
1667 if section in stations:
1668 logact = "changed station %s" % section
1671 stations[section] = {}
1672 logact = "added station %s" % section
1674 for key,value in overrides.items(section):
1675 if key in stations[section]: chgopt += 1
1677 if key in ("centroid", "location"):
1678 stations[section][key] = eval(value)
1680 stations[section][key] = value
1681 if addopt and chgopt:
1682 logact += " (+%s/!%s options)" % (addopt, chgopt)
1683 elif addopt: logact += " (+%s options)" % addopt
1684 elif chgopt: logact += " (!%s options)" % chgopt
1685 elif re.match("[0-9]{5}$", section):
1687 if section in zctas:
1688 del( zctas[section] )
1689 logact = "removed zcta %s" % section
1692 logact = "tried to remove nonexistent zcta %s" % section
1694 if section in zctas:
1695 logact = "changed zcta %s" % section
1699 logact = "added zcta %s" % section
1701 for key,value in overrides.items(section):
1702 if key in zctas[section]: chgopt += 1
1704 if key in ("centroid", "location"):
1705 zctas[section][key] = eval(value)
1707 zctas[section][key] = value
1708 if addopt and chgopt:
1709 logact += " (+%s/!%s options)" % (addopt, chgopt)
1710 elif addopt: logact += " (+%s options)" % addopt
1711 elif chgopt: logact += " (!%s options)" % chgopt
1712 elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", section):
1714 if section in zones:
1715 del( zones[section] )
1716 logact = "removed zone %s" % section
1719 logact = "tried to remove nonexistent zone %s" % section
1721 if section in zones:
1722 logact = "changed zone %s" % section
1726 logact = "added zone %s" % section
1728 for key,value in overrides.items(section):
1729 if key in zones[section]: chgopt += 1
1731 if key in ("centroid", "location"):
1732 zones[section][key] = eval(value)
1734 zones[section][key] = value
1735 if addopt and chgopt:
1736 logact += " (+%s/!%s options)" % (addopt, chgopt)
1737 elif addopt: logact += " (+%s options)" % addopt
1738 elif chgopt: logact += " (!%s options)" % chgopt
1739 elif re.match("fips[0-9]+$", section):
1741 if section in places:
1742 del( places[section] )
1743 logact = "removed place %s" % section
1746 logact = "tried to remove nonexistent place %s" % section
1748 if section in places:
1749 logact = "changed place %s" % section
1752 places[section] = {}
1753 logact = "added place %s" % section
1755 for key,value in overrides.items(section):
1756 if key in places[section]: chgopt += 1
1758 if key in ("centroid", "location"):
1759 places[section][key] = eval(value)
1761 places[section][key] = value
1762 if addopt and chgopt:
1763 logact += " (+%s/!%s options)" % (addopt, chgopt)
1764 elif addopt: logact += " (+%s options)" % addopt
1765 elif chgopt: logact += " (!%s options)" % chgopt
1767 overrideslog.append("%s\n" % logact)
1769 if os.path.exists(overrideslog_fn):
1770 os.rename(overrideslog_fn, "%s_old"%overrideslog_fn)
1771 overrideslog_fd = codecs.open(overrideslog_fn, "w", "utf8")
1772 overrideslog_fd.writelines(overrideslog)
1773 overrideslog_fd.close()
1774 print("done (%s overridden sections: +%s/-%s/!%s)." % (
1780 estimate = 2*len(places) + len(stations) + 2*len(zctas) + len(zones)
1782 "Correlating places, stations, ZCTAs and zones (upper bound is %s):" % \
1786 milestones = list( range(51) )
1788 sys.stdout.write(message)
1791 centroid = places[fips]["centroid"]
1793 station = closest(centroid, stations, "location", 0.1)
1795 places[fips]["station"] = station
1798 level = int(50*count/estimate)
1799 if level in milestones:
1800 for remaining in milestones[:milestones.index(level)+1]:
1803 sys.stdout.write(message)
1806 message = "%s%%" % (remaining*2,)
1807 sys.stdout.write(message)
1809 milestones.remove(remaining)
1811 zone = closest(centroid, zones, "centroid", 0.1)
1813 places[fips]["zone"] = zone
1816 level = int(50*count/estimate)
1817 if level in milestones:
1818 for remaining in milestones[:milestones.index(level)+1]:
1821 sys.stdout.write(message)
1824 message = "%s%%" % (remaining*2,)
1825 sys.stdout.write(message)
1827 milestones.remove(remaining)
1828 for station in stations:
1829 if "location" in stations[station]:
1830 location = stations[station]["location"]
1832 zone = closest(location, zones, "centroid", 0.1)
1834 stations[station]["zone"] = zone
1837 level = int(50*count/estimate)
1838 if level in milestones:
1839 for remaining in milestones[:milestones.index(level)+1]:
1842 sys.stdout.write(message)
1845 message = "%s%%" % (remaining*2,)
1846 sys.stdout.write(message)
1848 milestones.remove(remaining)
1849 for zcta in zctas.keys():
1850 centroid = zctas[zcta]["centroid"]
1852 station = closest(centroid, stations, "location", 0.1)
1854 zctas[zcta]["station"] = station
1857 level = int(50*count/estimate)
1858 if level in milestones:
1859 for remaining in milestones[ : milestones.index(level)+1 ]:
1862 sys.stdout.write(message)
1865 message = "%s%%" % (remaining*2,)
1866 sys.stdout.write(message)
1868 milestones.remove(remaining)
1870 zone = closest(centroid, zones, "centroid", 0.1)
1872 zctas[zcta]["zone"] = zone
1875 level = int(50*count/estimate)
1876 if level in milestones:
1877 for remaining in milestones[:milestones.index(level)+1]:
1880 sys.stdout.write(message)
1883 message = "%s%%" % (remaining*2,)
1884 sys.stdout.write(message)
1886 milestones.remove(remaining)
1887 for zone in zones.keys():
1888 if "centroid" in zones[zone]:
1889 centroid = zones[zone]["centroid"]
1891 station = closest(centroid, stations, "location", 0.1)
1893 zones[zone]["station"] = station
1896 level = int(50*count/estimate)
1897 if level in milestones:
1898 for remaining in milestones[:milestones.index(level)+1]:
1901 sys.stdout.write(message)
1904 message = "%s%%" % (remaining*2,)
1905 sys.stdout.write(message)
1907 milestones.remove(remaining)
1908 for remaining in milestones:
1911 sys.stdout.write(message)
1914 message = "%s%%" % (remaining*2,)
1915 sys.stdout.write(message)
1917 print("\n done (%s correlations)." % count)
1918 message = "Writing %s..." % airports_fn
1919 sys.stdout.write(message)
1922 if os.path.exists(airports_fn):
1923 os.rename(airports_fn, "%s_old"%airports_fn)
1924 airports_fd = codecs.open(airports_fn, "w", "utf8")
1925 airports_fd.write(header)
1926 for airport in sorted( airports.keys() ):
1927 airports_fd.write("\n\n[%s]" % airport)
1928 for key, value in sorted( airports[airport].items() ):
1929 if type(value) is float: value = "%.7f"%value
1930 elif type(value) is tuple:
1932 for element in value:
1933 if type(element) is float: elements.append("%.7f"%element)
1934 else: elements.append( repr(element) )
1935 value = "(%s)"%", ".join(elements)
1936 airports_fd.write( "\n%s = %s" % (key, value) )
1938 airports_fd.write("\n")
1940 print("done (%s sections)." % count)
1941 message = "Writing %s..." % places_fn
1942 sys.stdout.write(message)
1945 if os.path.exists(places_fn):
1946 os.rename(places_fn, "%s_old"%places_fn)
1947 places_fd = codecs.open(places_fn, "w", "utf8")
1948 places_fd.write(header)
1949 for fips in sorted( places.keys() ):
1950 places_fd.write("\n\n[%s]" % fips)
1951 for key, value in sorted( places[fips].items() ):
1952 if type(value) is float: value = "%.7f"%value
1953 elif type(value) is tuple:
1955 for element in value:
1956 if type(element) is float: elements.append("%.7f"%element)
1957 else: elements.append( repr(element) )
1958 value = "(%s)"%", ".join(elements)
1959 places_fd.write( "\n%s = %s" % (key, value) )
1961 places_fd.write("\n")
1963 print("done (%s sections)." % count)
1964 message = "Writing %s..." % stations_fn
1965 sys.stdout.write(message)
1968 if os.path.exists(stations_fn):
1969 os.rename(stations_fn, "%s_old"%stations_fn)
1970 stations_fd = codecs.open(stations_fn, "w", "utf8")
1971 stations_fd.write(header)
1972 for station in sorted( stations.keys() ):
1973 stations_fd.write("\n\n[%s]" % station)
1974 for key, value in sorted( stations[station].items() ):
1975 if type(value) is float: value = "%.7f"%value
1976 elif type(value) is tuple:
1978 for element in value:
1979 if type(element) is float: elements.append("%.7f"%element)
1980 else: elements.append( repr(element) )
1981 value = "(%s)"%", ".join(elements)
1982 stations_fd.write( "\n%s = %s" % (key, value) )
1984 stations_fd.write("\n")
1986 print("done (%s sections)." % count)
1987 message = "Writing %s..." % zctas_fn
1988 sys.stdout.write(message)
1991 if os.path.exists(zctas_fn):
1992 os.rename(zctas_fn, "%s_old"%zctas_fn)
1993 zctas_fd = codecs.open(zctas_fn, "w", "utf8")
1994 zctas_fd.write(header)
1995 for zcta in sorted( zctas.keys() ):
1996 zctas_fd.write("\n\n[%s]" % zcta)
1997 for key, value in sorted( zctas[zcta].items() ):
1998 if type(value) is float: value = "%.7f"%value
1999 elif type(value) is tuple:
2001 for element in value:
2002 if type(element) is float: elements.append("%.7f"%element)
2003 else: elements.append( repr(element) )
2004 value = "(%s)"%", ".join(elements)
2005 zctas_fd.write( "\n%s = %s" % (key, value) )
2007 zctas_fd.write("\n")
2009 print("done (%s sections)." % count)
2010 message = "Writing %s..." % zones_fn
2011 sys.stdout.write(message)
2014 if os.path.exists(zones_fn):
2015 os.rename(zones_fn, "%s_old"%zones_fn)
2016 zones_fd = codecs.open(zones_fn, "w", "utf8")
2017 zones_fd.write(header)
2018 for zone in sorted( zones.keys() ):
2019 zones_fd.write("\n\n[%s]" % zone)
2020 for key, value in sorted( zones[zone].items() ):
2021 if type(value) is float: value = "%.7f"%value
2022 elif type(value) is tuple:
2024 for element in value:
2025 if type(element) is float: elements.append("%.7f"%element)
2026 else: elements.append( repr(element) )
2027 value = "(%s)"%", ".join(elements)
2028 zones_fd.write( "\n%s = %s" % (key, value) )
2030 zones_fd.write("\n")
2032 print("done (%s sections)." % count)
2033 message = "Starting QA check..."
2034 sys.stdout.write(message)
2036 airports = configparser.ConfigParser()
2037 airports.read(airports_fn)
2038 places = configparser.ConfigParser()
2039 places.read(places_fn)
2040 stations = configparser.ConfigParser()
2041 stations.read(stations_fn)
2042 zctas = configparser.ConfigParser()
2043 zctas.read(zctas_fn)
2044 zones = configparser.ConfigParser()
2045 zones.read(zones_fn)
2047 places_nocentroid = 0
2048 places_nodescription = 0
2049 for place in sorted( places.sections() ):
2050 if not places.has_option(place, "centroid"):
2051 qalog.append("%s: no centroid\n" % place)
2052 places_nocentroid += 1
2053 if not places.has_option(place, "description"):
2054 qalog.append("%s: no description\n" % place)
2055 places_nodescription += 1
2056 stations_nodescription = 0
2057 stations_nolocation = 0
2058 stations_nometar = 0
2059 for station in sorted( stations.sections() ):
2060 if not stations.has_option(station, "description"):
2061 qalog.append("%s: no description\n" % station)
2062 stations_nodescription += 1
2063 if not stations.has_option(station, "location"):
2064 qalog.append("%s: no location\n" % station)
2065 stations_nolocation += 1
2066 if not stations.has_option(station, "metar"):
2067 qalog.append("%s: no metar\n" % station)
2068 stations_nometar += 1
2069 airports_badstation = 0
2070 airports_nostation = 0
2071 for airport in sorted( airports.sections() ):
2072 if not airports.has_option(airport, "station"):
2073 qalog.append("%s: no station\n" % airport)
2074 airports_nostation += 1
2076 station = airports.get(airport, "station")
2077 if station not in stations.sections():
2078 qalog.append( "%s: bad station %s\n" % (airport, station) )
2079 airports_badstation += 1
2080 zctas_nocentroid = 0
2081 for zcta in sorted( zctas.sections() ):
2082 if not zctas.has_option(zcta, "centroid"):
2083 qalog.append("%s: no centroid\n" % zcta)
2084 zctas_nocentroid += 1
2085 zones_nocentroid = 0
2086 zones_nodescription = 0
2087 zones_noforecast = 0
2088 zones_overlapping = 0
2090 for zone in zones.sections():
2091 if zones.has_option(zone, "centroid"):
2093 "centroid": eval( zones.get(zone, "centroid") )
2095 for zone in sorted( zones.sections() ):
2096 if zones.has_option(zone, "centroid"):
2097 zonetable_local = zonetable.copy()
2098 del( zonetable_local[zone] )
2099 centroid = eval( zones.get(zone, "centroid") )
2101 nearest = closest(centroid, zonetable_local, "centroid", 0.1)
2102 if nearest[1]*radian_to_km < 1:
2103 qalog.append( "%s: within one km of %s\n" % (
2107 zones_overlapping += 1
2109 qalog.append("%s: no centroid\n" % zone)
2110 zones_nocentroid += 1
2111 if not zones.has_option(zone, "description"):
2112 qalog.append("%s: no description\n" % zone)
2113 zones_nodescription += 1
2114 if not zones.has_option(zone, "zone_forecast"):
2115 qalog.append("%s: no forecast\n" % zone)
2116 zones_noforecast += 1
2117 if os.path.exists(qalog_fn):
2118 os.rename(qalog_fn, "%s_old"%qalog_fn)
2119 qalog_fd = codecs.open(qalog_fn, "w", "utf8")
2120 qalog_fd.writelines(qalog)
2123 print("issues found (see %s for details):"%qalog_fn)
2124 if airports_badstation:
2125 print(" %s airports with invalid station"%airports_badstation)
2126 if airports_nostation:
2127 print(" %s airports with no station"%airports_nostation)
2128 if places_nocentroid:
2129 print(" %s places with no centroid"%places_nocentroid)
2130 if places_nodescription:
2131 print(" %s places with no description"%places_nodescription)
2132 if stations_nodescription:
2133 print(" %s stations with no description"%stations_nodescription)
2134 if stations_nolocation:
2135 print(" %s stations with no location"%stations_nolocation)
2136 if stations_nometar:
2137 print(" %s stations with no METAR"%stations_nometar)
2138 if zctas_nocentroid:
2139 print(" %s ZCTAs with no centroid"%zctas_nocentroid)
2140 if zones_nocentroid:
2141 print(" %s zones with no centroid"%zones_nocentroid)
2142 if zones_nodescription:
2143 print(" %s zones with no description"%zones_nodescription)
2144 if zones_noforecast:
2145 print(" %s zones with no forecast"%zones_noforecast)
2146 if zones_overlapping:
2147 print(" %s zones within one km of another"%zones_overlapping)
2148 else: print("no issues found.")
2149 print("Indexing complete!")