1 """Contains various object definitions needed by the weather utility."""
3 weather_copyright = """\
4 # Copyright (c) 2006-2012 Jeremy Stanley <fungi@yuggoth.org>. Permission to
5 # use, copy, modify, and distribute this software is granted under terms
6 # provided in the LICENSE file distributed with this software.
9 weather_version = "2.0"
11 radian_to_km = 6372.795484
12 radian_to_mi = 3959.871528
14 def pyversion(ref=None):
15 """Determine the Python version and optionally compare to a reference."""
17 ver = platform.python_version()
20 int(x) for x in ver.split(".")[:2]
22 int(x) for x in ref.split(".")[:2]
27 """An object to contain selection data."""
29 """Store the config, options and arguments."""
30 self.config = get_config()
31 self.options, self.arguments = get_options(self.config)
32 if self.get_bool("cache") and self.get_bool("cache_search") \
33 and not self.get_bool("longlist"):
34 integrate_search_cache(
39 if not self.arguments:
40 if "id" in self.options.__dict__ \
41 and self.options.__dict__["id"]:
42 self.arguments.append( self.options.__dict__["id"] )
43 del( self.options.__dict__["id"] )
45 message = "WARNING: the --id option is deprecated and will eventually be removed\n"
46 sys.stderr.write(message)
47 elif "city" in self.options.__dict__ \
48 and self.options.__dict__["city"] \
49 and "st" in self.options.__dict__ \
50 and self.options.__dict__["st"]:
51 self.arguments.append(
53 self.options.__dict__["city"],
54 self.options.__dict__["st"]
57 del( self.options.__dict__["city"] )
58 del( self.options.__dict__["st"] )
60 message = "WARNING: the --city/--st options are deprecated and will eventually be removed\n"
61 sys.stderr.write(message)
62 def get(self, option, argument=None):
63 """Retrieve data from the config or options."""
65 if self.config.has_section(argument) and (
66 self.config.has_option(argument, "city") \
67 or self.config.has_option(argument, "id") \
68 or self.config.has_option(argument, "st")
70 self.config.remove_section(argument)
72 message = "WARNING: the city/id/st options are now unsupported in aliases\n"
73 sys.stderr.write(message)
74 if not self.config.has_section(argument):
77 path=self.get("setpath"),
78 info=self.get("info"),
80 self.get("cache") and self.get("cache_search")
82 cachedir=self.get("cachedir")
84 self.config.add_section(argument)
85 for item in guessed.items():
86 self.config.set(argument, *item)
87 if self.config.has_option(argument, option):
88 return self.config.get(argument, option)
89 if option in self.options.__dict__:
90 return self.options.__dict__[option]
93 message = "%s error: no URI defined for %s\n" % (
94 os.path.basename( sys.argv[0] ),
97 sys.stderr.write(message)
99 def get_bool(self, option, argument=None):
100 """Get data and coerce to a boolean if necessary."""
101 return bool(self.get(option, argument))
102 def getint(self, option, argument=None):
103 """Get data and coerce to an integer if necessary."""
104 value = self.get(option, argument)
105 if value: return int(value)
109 """Average a list of coordinates."""
116 return (x/count, y/count)
118 def filter_units(line, units="imperial"):
119 """Filter or convert units in a line of text between US/UK and metric."""
121 # filter lines with both pressures in the form of "X inches (Y hPa)" or
124 "(.* )(\d*(\.\d+)? (inches|in\. Hg)) \((\d*(\.\d+)? hPa)\)(.*)",
128 preamble, in_hg, i_fr, i_un, hpa, h_fr, trailer = dual_p.groups()
129 if units == "imperial": line = preamble + in_hg + trailer
130 elif units == "metric": line = preamble + hpa + trailer
131 # filter lines with both temperatures in the form of "X F (Y C)"
133 "(.* )(-?\d*(\.\d+)? F) \((-?\d*(\.\d+)? C)\)(.*)",
137 preamble, fahrenheit, f_fr, celsius, c_fr, trailer = dual_t.groups()
138 if units == "imperial": line = preamble + fahrenheit + trailer
139 elif units == "metric": line = preamble + celsius + trailer
140 # if metric is desired, convert distances in the form of "X mile(s)" to
142 if units == "metric":
143 imperial_d = re.match(
144 "(.* )(\d+)( mile\(s\))(.*)",
148 preamble, mi, m_u, trailer = imperial_d.groups()
149 line = preamble + str(int(round(int(mi)*1.609344))) \
150 + " kilometer(s)" + trailer
151 # filter speeds in the form of "X MPH (Y KT)" to just "X MPH"; if metric is
152 # desired, convert to "Z KPH"
153 imperial_s = re.match(
154 "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
158 preamble, mph, m_u, kt, trailer = imperial_s.groups()
159 if units == "imperial": line = preamble + mph + m_u + trailer
160 elif units == "metric":
161 line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
163 imperial_s = re.match(
164 "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
168 preamble, mph, m_u, kt, trailer = imperial_s.groups()
169 if units == "imperial": line = preamble + mph + m_u + trailer
170 elif units == "metric":
171 line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
173 # if imperial is desired, qualify given forcast temperatures like "X F"; if
174 # metric is desired, convert to "Y C"
175 imperial_t = re.match(
176 "(.* )(High |high |Low |low )(\d+)(\.|,)(.*)",
180 preamble, parameter, fahrenheit, sep, trailer = imperial_t.groups()
181 if units == "imperial":
182 line = preamble + parameter + fahrenheit + " F" + sep + trailer
183 elif units == "metric":
184 line = preamble + parameter \
185 + str(int(round((int(fahrenheit)-32)*5/9))) + " C" + sep \
187 # hand off the resulting line
197 """Return a string containing the results of a URI GET."""
199 import urllib, urllib.error, urllib.request
200 URLError = urllib.error.URLError
201 urlopen = urllib.request.urlopen
203 import urllib2 as urllib
204 URLError = urllib.URLError
205 urlopen = urllib.urlopen
208 dcachedir = os.path.join( os.path.expanduser(cachedir), "datacache" )
209 if not os.path.exists(dcachedir):
210 try: os.makedirs(dcachedir)
211 except (IOError, OSError): pass
212 dcache_fn = os.path.join(
214 uri.split(":")[1].replace("/","_")
217 if cache_data and os.access(dcache_fn, os.R_OK) \
218 and now-cacheage < os.stat(dcache_fn).st_mtime <= now:
219 dcache_fd = open(dcache_fn)
220 data = dcache_fd.read()
224 if pyversion("3"): data = urlopen(uri).read().decode("utf-8")
225 else: data = urlopen(uri).read()
227 if ignore_fail: return ""
229 import os, sys, traceback
230 message = "%s error: failed to retrieve\n %s\n %s" % (
231 os.path.basename( sys.argv[0] ),
233 traceback.format_exception_only(
238 sys.stderr.write(message)
243 dcache_fd = codecs.open(dcache_fn, "w", "utf-8")
244 dcache_fd.write(data)
246 except (IOError, OSError): pass
260 """Return a summarized METAR for the specified station."""
263 message = "%s error: METAR URI required for conditions\n" % \
264 os.path.basename( sys.argv[0] )
265 sys.stderr.write(message)
269 cache_data=cache_data,
273 if pyversion("3") and type(metar) is bytes: metar = metar.decode("utf-8")
274 if verbose: return metar
277 lines = metar.split("\n")
280 "relative_humidity," \
281 + "precipitation_last_hour," \
282 + "sky conditions," \
288 headerlist = headers.lower().replace("_"," ").split(",")
291 title = "Current conditions at %s"
292 place = lines[0].split(", ")
294 place = "%s, %s" % ( place[0].title(), place[1] )
295 else: place = "<UNKNOWN>"
296 output.append(title%place)
297 output.append("Last updated " + lines[1])
299 for header in headerlist:
301 if line.lower().startswith(header + ":"):
302 if re.match(r".*:\d+$", line): line = line[:line.rfind(":")]
303 if imperial: line = filter_units(line, units="imperial")
304 elif metric: line = filter_units(line, units="metric")
305 if quiet: output.append(line)
306 else: output.append(" " + line)
310 "(no conditions matched your header list, try with --verbose)"
312 return "\n".join(output)
322 """Return alert notice for the specified URI."""
325 message = "%s error: Alert URI required for alerts\n" % \
326 os.path.basename( sys.argv[0] )
327 sys.stderr.write(message)
332 cache_data=cache_data,
336 if pyversion("3") and type(alert) is bytes: alert = alert.decode("utf-8")
338 if verbose: return alert
340 if alert.find("\nNATIONAL WEATHER SERVICE") == -1:
344 lines = alert.split("\n")
346 valid_time = time.strftime("%Y%m%d%H%M")
349 if line.startswith("Expires:") \
350 and "Expires:" + valid_time > line:
352 if muted and line.startswith("NATIONAL WEATHER SERVICE"):
359 if line and not muted:
360 if quiet: output.append(line)
361 else: output.append(" " + line)
362 return "\n".join(output)
364 def get_options(config):
365 """Parse the options passed on the command line."""
367 # for optparse's builtin -h/--help option
369 "usage: %prog [options] [alias1|search1 [alias2|search2 [...]]]"
371 # for optparse's builtin --version option
372 verstring = "%prog " + weather_version
376 option_parser = optparse.OptionParser(usage=usage, version=verstring)
377 # separate options object from list of arguments and return both
379 # the -a/--alert option
380 if config.has_option("default", "alert"):
381 default_alert = bool(config.get("default", "alert"))
382 else: default_alert = False
383 option_parser.add_option("-a", "--alert",
386 default=default_alert,
387 help="include local alert notices")
389 # the --atypes option
390 if config.has_option("default", "atypes"):
391 default_atypes = config.get("default", "atypes")
394 "coastal_flood_statement," \
395 + "flash_flood_statement," \
396 + "flash_flood_warning," \
397 + "flash_flood_watch," \
398 + "flood_statement," \
400 + "marine_weather_statement," \
401 + "river_statement," \
402 + "severe_thunderstorm_warning," \
403 + "severe_weather_statement," \
404 + "short_term_forecast," \
405 + "special_marine_warning," \
406 + "special_weather_statement," \
407 + "tornado_warning," \
408 + "urgent_weather_message"
409 option_parser.add_option("--atypes",
411 default=default_atypes,
412 help="list of alert notification types to display")
414 # the --build-sets option
415 option_parser.add_option("--build-sets",
419 help="(re)build location correlation sets")
421 # the --cacheage option
422 if config.has_option("default", "cacheage"):
423 default_cacheage = config.getint("default", "cacheage")
424 else: default_cacheage = 900
425 option_parser.add_option("--cacheage",
427 default=default_cacheage,
428 help="duration in seconds to refresh cached data")
430 # the --cachedir option
431 if config.has_option("default", "cachedir"):
432 default_cachedir = config.get("default", "cachedir")
433 else: default_cachedir = "~/.weather"
434 option_parser.add_option("--cachedir",
436 default=default_cachedir,
437 help="directory for storing cached searches and data")
439 # the -f/--forecast option
440 if config.has_option("default", "forecast"):
441 default_forecast = bool(config.get("default", "forecast"))
442 else: default_forecast = False
443 option_parser.add_option("-f", "--forecast",
446 default=default_forecast,
447 help="include a local forecast")
449 # the --headers option
450 if config.has_option("default", "headers"):
451 default_headers = config.get("default", "headers")
455 + "relative_humidity," \
460 + "sky_conditions," \
461 + "precipitation_last_hour"
462 option_parser.add_option("--headers",
464 default=default_headers,
465 help="list of conditions headers to display")
467 # the --imperial option
468 if config.has_option("default", "imperial"):
469 default_imperial = bool(config.get("default", "imperial"))
470 else: default_imperial = False
471 option_parser.add_option("--imperial",
474 default=default_imperial,
475 help="filter/convert conditions for US/UK units")
478 option_parser.add_option("--info",
482 help="output detailed information for your search")
484 # the -l/--list option
485 option_parser.add_option("-l", "--list",
489 help="list all configured aliases and cached searches")
491 # the --longlist option
492 option_parser.add_option("--longlist",
496 help="display details of all configured aliases")
498 # the -m/--metric option
499 if config.has_option("default", "metric"):
500 default_metric = bool(config.get("default", "metric"))
501 else: default_metric = False
502 option_parser.add_option("-m", "--metric",
505 default=default_metric,
506 help="filter/convert conditions for metric units")
508 # the -n/--no-conditions option
509 if config.has_option("default", "conditions"):
510 default_conditions = bool(config.get("default", "conditions"))
511 else: default_conditions = True
512 option_parser.add_option("-n", "--no-conditions",
514 action="store_false",
515 default=default_conditions,
516 help="disable output of current conditions")
518 # the --no-cache option
519 if config.has_option("default", "cache"):
520 default_cache = bool(config.get("default", "cache"))
521 else: default_cache = True
522 option_parser.add_option("--no-cache",
524 action="store_false",
526 help="disable all caching (searches and data)")
528 # the --no-cache-data option
529 if config.has_option("default", "cache_data"):
530 default_cache_data = bool(config.get("default", "cache_data"))
531 else: default_cache_data = True
532 option_parser.add_option("--no-cache-data",
534 action="store_false",
536 help="disable retrieved data caching")
538 # the --no-cache-search option
539 if config.has_option("default", "cache_search"):
540 default_cache_search = bool(config.get("default", "cache_search"))
541 else: default_cache_search = True
542 option_parser.add_option("--no-cache-search",
544 action="store_false",
546 help="disable search result caching")
548 # the -q/--quiet option
549 if config.has_option("default", "quiet"):
550 default_quiet = bool(config.get("default", "quiet"))
551 else: default_quiet = False
552 option_parser.add_option("-q", "--quiet",
555 default=default_quiet,
556 help="skip preambles and don't indent")
558 # the --setpath option
559 if config.has_option("default", "setpath"):
560 default_setpath = config.get("default", "setpath")
561 else: default_setpath = ".:~/.weather"
562 option_parser.add_option("--setpath",
564 default=default_setpath,
565 help="directory search path for correlation sets")
567 # the -v/--verbose option
568 if config.has_option("default", "verbose"):
569 default_verbose = bool(config.get("default", "verbose"))
570 else: default_verbose = False
571 option_parser.add_option("-v", "--verbose",
574 default=default_verbose,
575 help="show full decoded feeds")
578 if config.has_option("default", "city"):
579 default_city = config.get("default", "city")
580 else: default_city = ""
581 option_parser.add_option("-c", "--city",
583 default=default_city,
584 help=optparse.SUPPRESS_HELP)
585 if config.has_option("default", "id"):
586 default_id = config.get("default", "id")
587 else: default_id = ""
588 option_parser.add_option("-i", "--id",
591 help=optparse.SUPPRESS_HELP)
592 if config.has_option("default", "st"):
593 default_st = config.get("default", "st")
594 else: default_st = ""
595 option_parser.add_option("-s", "--st",
598 help=optparse.SUPPRESS_HELP)
600 options, arguments = option_parser.parse_args()
601 return options, arguments
604 """Parse the aliases and configuration."""
605 if pyversion("3"): import configparser
606 else: import ConfigParser as configparser
607 config = configparser.ConfigParser()
611 os.path.expanduser("~/.weather/weatherrc"),
612 os.path.expanduser("~/.weatherrc"),
615 for rcfile in rcfiles:
616 if os.access(rcfile, os.R_OK): config.read(rcfile)
617 for section in config.sections():
618 if section != section.lower():
619 if config.has_section(section.lower()):
620 config.remove_section(section.lower())
621 config.add_section(section.lower())
622 for option,value in config.items(section):
623 config.set(section.lower(), option, value)
626 def integrate_search_cache(config, cachedir, setpath):
627 """Add cached search results into the configuration."""
628 if pyversion("3"): import configparser
629 else: import ConfigParser as configparser
631 scache_fn = os.path.join( os.path.expanduser(cachedir), "searches" )
632 if not os.access(scache_fn, os.R_OK): return config
633 scache_fd = open(scache_fn)
634 created = float( scache_fd.readline().split(":")[1].strip().split()[0] )
637 datafiles = data_index(setpath)
639 data_freshness = sorted(
640 [ x[1] for x in datafiles.values() ],
643 else: data_freshness = now
644 if created < data_freshness <= now:
647 print( "[clearing outdated %s]" % scache_fn )
648 except (IOError, OSError):
651 scache = configparser.ConfigParser()
652 scache.read(scache_fn)
653 for section in scache.sections():
654 if not config.has_section(section):
655 config.add_section(section)
656 for option,value in scache.items(section):
657 config.set(section, option, value)
660 def list_aliases(config, detail=False):
661 """Return a formatted list of aliases defined in the config."""
663 output = "\n# configured alias details..."
664 for section in sorted(config.sections()):
665 output += "\n\n[%s]" % section
666 for item in sorted(config.items(section)):
667 output += "\n%s = %s" % item
670 output = "configured aliases and cached searches..."
671 for section in sorted(config.sections()):
672 if config.has_option(section, "description"):
673 description = config.get(section, "description")
674 else: description = "(no description provided)"
675 output += "\n %s: %s" % (section, description)
678 def data_index(path):
681 for filename in ("airports", "places", "stations", "zctas", "zones"):
682 for dirname in path.split(":"):
683 for extension in ("", ".gz", ".txt"):
684 candidate = os.path.expanduser(
685 os.path.join( dirname, "".join( (filename, extension) ) )
687 if os.path.exists(candidate):
688 datafiles[filename] = (
690 os.stat(candidate).st_mtime
704 """Find URIs using airport, gecos, placename, station, ZCTA/ZIP, zone."""
705 import codecs, datetime, time, os, re, sys
706 if pyversion("3"): import configparser
707 else: import ConfigParser as configparser
708 datafiles = data_index(path)
709 if re.match("[A-Za-z]{3}$", expression): searchtype = "airport"
710 elif re.match("[A-Za-z0-9]{4}$", expression): searchtype = "station"
711 elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", expression): searchtype = "zone"
712 elif re.match("[0-9]{5}$", expression): searchtype = "ZCTA"
714 r"[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?, *[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?$",
717 searchtype = "coordinates"
718 elif re.match(r"(FIPS|fips)\d+$", expression): searchtype = "FIPS"
722 if cache_search: action = "caching"
723 else: action = "using"
732 (0.995, "excellent"),
735 print("Searching via %s..."%searchtype)
736 stations = configparser.ConfigParser()
737 dataname = "stations"
738 if dataname in datafiles:
739 datafile = datafiles[dataname][0]
740 if datafile.endswith(".gz"):
742 stations.readfp( gzip.open(datafile) )
744 stations.read(datafile)
746 message = "%s error: can't find \"%s\" data file\n" % (
747 os.path.basename( sys.argv[0] ),
750 sys.stderr.write(message)
752 zones = configparser.ConfigParser()
754 if dataname in datafiles:
755 datafile = datafiles[dataname][0]
756 if datafile.endswith(".gz"):
758 zones.readfp( gzip.open(datafile) )
762 message = "%s error: can't find \"%s\" data file\n" % (
763 os.path.basename( sys.argv[0] ),
766 sys.stderr.write(message)
774 if searchtype == "airport":
775 expression = expression.lower()
776 airports = configparser.ConfigParser()
777 dataname = "airports"
778 if dataname in datafiles:
779 datafile = datafiles[dataname][0]
780 if datafile.endswith(".gz"):
782 airports.readfp( gzip.open(datafile) )
784 airports.read(datafile)
786 message = "%s error: can't find \"%s\" data file\n" % (
787 os.path.basename( sys.argv[0] ),
790 sys.stderr.write(message)
792 if airports.has_section(expression) \
793 and airports.has_option(expression, "station"):
794 search = (expression, "IATA/FAA airport code %s" % expression)
795 station = ( airports.get(expression, "station"), 0 )
796 if stations.has_option(station[0], "zone"):
797 zone = eval( stations.get(station[0], "zone") )
799 if not info and stations.has_option( station[0], "description" ):
803 stations.get(station[0], "description")
807 message = "No IATA/FAA airport code \"%s\" in the %s file.\n" % (
809 datafiles["airports"][0]
811 sys.stderr.write(message)
813 elif searchtype == "station":
814 expression = expression.lower()
815 if stations.has_section(expression):
816 station = (expression, 0)
818 search = (expression, "ICAO station code %s" % expression)
819 if stations.has_option(expression, "zone"):
820 zone = eval( stations.get(expression, "zone") )
822 if not info and stations.has_option(expression, "description"):
826 stations.get(expression, "description")
830 message = "No ICAO weather station \"%s\" in the %s file.\n" % (
832 datafiles["stations"][0]
834 sys.stderr.write(message)
836 elif searchtype == "zone":
837 expression = expression.lower()
838 if zones.has_section(expression) \
839 and zones.has_option(expression, "station"):
840 zone = (expression, 0)
841 station = eval( zones.get(expression, "station") )
843 search = (expression, "NWS/NOAA weather zone %s" % expression)
844 if not info and zones.has_option(expression, "description"):
848 zones.get(expression, "description")
852 message = "No usable NWS weather zone \"%s\" in the %s file.\n" % (
854 datafiles["zones"][0]
856 sys.stderr.write(message)
858 elif searchtype == "ZCTA":
859 zctas = configparser.ConfigParser()
861 if dataname in datafiles:
862 datafile = datafiles[dataname][0]
863 if datafile.endswith(".gz"):
865 zctas.readfp( gzip.open(datafile) )
869 message = "%s error: can't find \"%s\" data file\n" % (
870 os.path.basename( sys.argv[0] ),
873 sys.stderr.write(message)
876 if zctas.has_section(expression) \
877 and zctas.has_option(expression, "station"):
878 station = eval( zctas.get(expression, "station") )
879 search = (expression, "Census ZCTA (ZIP code) %s" % expression)
880 if zctas.has_option(expression, "zone"):
881 zone = eval( zctas.get(expression, "zone") )
883 message = "No census ZCTA (ZIP code) \"%s\" in the %s file.\n" % (
885 datafiles["zctas"][0]
887 sys.stderr.write(message)
889 elif searchtype == "coordinates":
890 search = (expression, "Geographic coordinates %s" % expression)
892 for station in stations.sections():
893 if stations.has_option(station, "location"):
894 stationtable[station] = {
895 "location": eval( stations.get(station, "location") )
897 station = closest( gecos(expression), stationtable, "location", 0.1 )
899 message = "No ICAO weather station found near %s.\n" % expression
900 sys.stderr.write(message)
903 for zone in zones.sections():
904 if zones.has_option(zone, "centroid"):
906 "centroid": eval( zones.get(zone, "centroid") )
908 zone = closest( gecos(expression), zonetable, "centroid", 0.1 )
910 message = "No NWS weather zone near %s; forecasts unavailable.\n" \
912 sys.stderr.write(message)
913 elif searchtype in ("FIPS", "name"):
914 places = configparser.ConfigParser()
916 if dataname in datafiles:
917 datafile = datafiles[dataname][0]
918 if datafile.endswith(".gz"):
920 places.readfp( gzip.open(datafile) )
922 places.read(datafile)
924 message = "%s error: can't find \"%s\" data file\n" % (
925 os.path.basename( sys.argv[0] ),
928 sys.stderr.write(message)
931 place = expression.lower()
932 if places.has_section(place) and places.has_option(place, "station"):
933 station = eval( places.get(place, "station") )
934 search = (expression, "Census Place %s" % expression)
935 if places.has_option(place, "description"):
938 search[1] + ", %s" % places.get(place, "description")
940 if places.has_option(place, "zone"):
941 zone = eval( places.get(place, "zone") )
942 if not info and places.has_option(place, "description"):
946 places.get(place, "description")
950 for place in places.sections():
951 if places.has_option(place, "description") \
952 and places.has_option(place, "station") \
955 places.get(place, "description"),
958 possibilities.append(place)
959 for place in stations.sections():
960 if stations.has_option(place, "description") \
963 stations.get(place, "description"),
966 possibilities.append(place)
967 for place in zones.sections():
968 if zones.has_option(place, "description") \
969 and zones.has_option(place, "station") \
972 zones.get(place, "description"),
975 possibilities.append(place)
976 if len(possibilities) == 1:
977 place = possibilities[0]
978 if places.has_section(place):
979 station = eval( places.get(place, "station") )
980 description = places.get(place, "description")
981 if places.has_option(place, "zone"):
982 zone = eval( places.get(place, "zone" ) )
983 search = ( expression, "%s: %s" % (place, description) )
984 elif stations.has_section(place):
985 station = (place, 0.0)
986 description = stations.get(place, "description")
987 if stations.has_option(place, "zone"):
988 zone = eval( stations.get(place, "zone" ) )
989 search = ( expression, "ICAO station code %s" % place )
990 elif zones.has_section(place):
991 station = eval( zones.get(place, "station") )
992 description = zones.get(place, "description")
994 search = ( expression, "NWS/NOAA weather zone %s" % place )
995 if not info: print( "[%s result %s]" % (action, description) )
996 if not possibilities and not station[0]:
997 message = "No FIPS code/census area match in the %s file.\n" % (
998 datafiles["places"][0]
1000 sys.stderr.write(message)
1003 uris["metar"] = stations.get( station[0], "metar" )
1005 for key,value in zones.items( zone[0] ):
1006 if key not in ("centroid", "description", "station"):
1009 count = len(possibilities)
1010 if count <= max_results:
1011 print( "Your search is ambiguous, returning %s matches:" % count )
1012 for place in sorted(possibilities):
1013 if places.has_section(place):
1017 places.get(place, "description")
1020 elif stations.has_section(place):
1024 stations.get(place, "description")
1027 elif zones.has_section(place):
1031 zones.get(place, "description")
1036 "Your search is too ambiguous, returning %s matches." % count
1043 for section in dataset.sections():
1044 if dataset.has_option(section, "station"):
1046 eval( dataset.get(section, "station") )[1]
1048 if dataset.has_option(section, "zone"):
1049 zonelist.append( eval( dataset.get(section, "zone") )[1] )
1052 scount = len(stationlist)
1053 zcount = len(zonelist)
1056 for score in scores:
1058 sranks.append( stationlist[ int( (1-score[0]) * scount ) ] )
1060 zranks.append( zonelist[ int( (1-score[0]) * zcount ) ] )
1061 description = search[1]
1062 uris["description"] = description
1064 "%s\n%s" % ( description, "-" * len(description) )
1069 stations.get( station[0], "description" )
1072 km = radian_to_km*station[1]
1073 mi = radian_to_mi*station[1]
1074 if sranks and not description.startswith("ICAO station code "):
1075 for index in range(0, len(scores)):
1076 if station[1] >= sranks[index]:
1077 score = scores[index][1]
1080 " (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1082 elif searchtype is "coordinates":
1083 print( " (%.3gkm, %.3gmi)" % (km, mi) )
1086 "%s: %s" % ( zone[0], zones.get( zone[0], "description" ) )
1088 km = radian_to_km*zone[1]
1089 mi = radian_to_mi*zone[1]
1090 if zranks and not description.startswith("NWS/NOAA weather zone "):
1091 for index in range(0, len(scores)):
1092 if zone[1] >= zranks[index]:
1093 score = scores[index][1]
1096 " (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1098 elif searchtype is "coordinates" and zone[0]:
1099 print( " (%.3gkm, %.3gmi)" % (km, mi) )
1102 nowstamp = "%s (%s)" % (
1104 datetime.datetime.isoformat(
1105 datetime.datetime.fromtimestamp(now),
1109 search_cache = ["\n"]
1110 search_cache.append( "[%s]\n" % search[0] )
1111 search_cache.append( "description = cached %s\n" % nowstamp )
1112 for uriname in sorted(uris.keys()):
1113 search_cache.append( "%s = %s\n" % ( uriname, uris[uriname] ) )
1114 real_cachedir = os.path.expanduser(cachedir)
1115 if not os.path.exists(real_cachedir):
1116 try: os.makedirs(real_cachedir)
1117 except (IOError, OSError): pass
1118 scache_fn = os.path.join(real_cachedir, "searches")
1119 if not os.path.exists(scache_fn):
1121 [ x[1] for x in datafiles.values() ],
1124 thenstamp = "%s (%s)" % (
1126 datetime.datetime.isoformat(
1127 datetime.datetime.fromtimestamp(then),
1131 search_cache.insert(
1133 "# based on data files from: %s\n" % thenstamp
1136 scache_existing = configparser.ConfigParser()
1137 scache_existing.read(scache_fn)
1138 if not scache_existing.has_section(search[0]):
1139 scache_fd = codecs.open(scache_fn, "a", "utf-8")
1140 scache_fd.writelines(search_cache)
1142 except (IOError, OSError): pass
1146 def closest(position, nodes, fieldname, angle=None):
1148 if not angle: angle = 2*math.pi
1151 if fieldname in nodes[name]:
1152 node = nodes[name][fieldname]
1153 if node and abs( position[0]-node[0] ) < angle:
1154 if abs( position[1]-node[1] ) < angle \
1155 or abs( abs( position[1]-node[1] ) - 2*math.pi ) < angle:
1156 if position == node:
1160 candidate = math.acos(
1161 math.sin( position[0] ) * math.sin( node[0] ) \
1162 + math.cos( position[0] ) \
1163 * math.cos( node[0] ) \
1164 * math.cos( position[1] - node[1] )
1166 if candidate < angle:
1169 if match: match = str(match)
1170 return (match, angle)
1172 def gecos(formatted):
1174 coordinates = formatted.split(",")
1175 for coordinate in range(0, 2):
1176 degrees, foo, minutes, bar, seconds, hemisphere = re.match(
1177 r"([\+-]?\d+\.?\d*)(-(\d+))?(-(\d+))?([ensw]?)$",
1178 coordinates[coordinate].strip().lower()
1180 value = float(degrees)
1181 if minutes: value += float(minutes)/60
1182 if seconds: value += float(seconds)/3600
1183 if hemisphere and hemisphere in "sw": value *= -1
1184 coordinates[coordinate] = math.radians(value)
1185 return tuple(coordinates)
1188 import codecs, datetime, hashlib, os, re, sys, tarfile, time, zipfile
1189 if pyversion("3"): import configparser
1190 else: import ConfigParser as configparser
1191 gcounties_an = "Gaz_counties_national.zip"
1192 gcounties_fn = "Gaz_counties_national.txt"
1193 gcousubs_an = "Gaz_cousubs_national.zip"
1194 gcousubs_fn = "Gaz_cousubs_national.txt"
1195 gplaces_an = "Gaz_places_national.zip"
1196 gplaces_fn = "Gaz_places_national.txt"
1197 gzcta_an = "Gaz_zcta_national.zip"
1198 gzcta_fn = "Gaz_zcta_national.txt"
1199 for filename in os.listdir("."):
1200 if re.match("bp[0-9][0-9][a-z][a-z][0-9][0-9].dbx$", filename):
1201 cpfzcf_fn = filename
1203 nsdcccc_fn = "nsd_cccc.txt"
1204 zcatalog_an = "zonecatalog.curr.tar"
1205 metartbl_fn = "metar.tbl"
1206 coopact_fn = "COOP-ACT.TXT"
1207 overrides_fn = "overrides.conf"
1208 overrideslog_fn = "overrides.log"
1212 airports_fn = "airports"
1213 places_fn = "places"
1214 stations_fn = "stations"
1219 # generated by %s on %s from these public domain sources:
1221 # http://www.census.gov/geo/www/gazetteer/gazetteer2010.html
1227 # http://www.weather.gov/geodata/catalog/wsom/html/cntyzone.htm
1230 # http://weather.noaa.gov/data/nsd_cccc.txt
1233 # http://weather.noaa.gov/pub/data/zonecatalog.curr.tar
1236 # http://www.nco.ncep.noaa.gov/pmb/codes/nwprod/dictionaries/metar.tbl
1239 # ftp://ftp.ncdc.noaa.gov/pub/data/inventories/COOP-ACT.TXT
1242 # ...and these manually-generated or hand-compiled adjustments:
1248 os.path.basename( sys.argv[0] ),
1249 datetime.date.isoformat(
1250 datetime.datetime.fromtimestamp( time.time() )
1252 hashlib.md5( open(gcounties_an, "rb").read() ).hexdigest(),
1253 datetime.date.isoformat(
1254 datetime.datetime.fromtimestamp( os.path.getmtime(gcounties_an) )
1257 hashlib.md5( open(gcousubs_an, "rb").read() ).hexdigest(),
1258 datetime.date.isoformat(
1259 datetime.datetime.fromtimestamp( os.path.getmtime(gcousubs_an) )
1262 hashlib.md5( open(gplaces_an, "rb").read() ).hexdigest(),
1263 datetime.date.isoformat(
1264 datetime.datetime.fromtimestamp( os.path.getmtime(gplaces_an) )
1267 hashlib.md5( open(gzcta_an, "rb").read() ).hexdigest(),
1268 datetime.date.isoformat(
1269 datetime.datetime.fromtimestamp( os.path.getmtime(gzcta_an) )
1272 hashlib.md5( open(cpfzcf_fn, "rb").read() ).hexdigest(),
1273 datetime.date.isoformat(
1274 datetime.datetime.fromtimestamp( os.path.getmtime(cpfzcf_fn) )
1277 hashlib.md5( open(nsdcccc_fn, "rb").read() ).hexdigest(),
1278 datetime.date.isoformat(
1279 datetime.datetime.fromtimestamp( os.path.getmtime(nsdcccc_fn) )
1282 hashlib.md5( open(zcatalog_an, "rb").read() ).hexdigest(),
1283 datetime.date.isoformat(
1284 datetime.datetime.fromtimestamp( os.path.getmtime(zcatalog_an) )
1287 hashlib.md5( open(metartbl_fn, "rb").read() ).hexdigest(),
1288 datetime.date.isoformat(
1289 datetime.datetime.fromtimestamp( os.path.getmtime(metartbl_fn) )
1292 hashlib.md5( open(coopact_fn, "rb").read() ).hexdigest(),
1293 datetime.date.isoformat(
1294 datetime.datetime.fromtimestamp( os.path.getmtime(coopact_fn) )
1297 hashlib.md5( open(overrides_fn, "rb").read() ).hexdigest(),
1298 datetime.date.isoformat(
1299 datetime.datetime.fromtimestamp( os.path.getmtime(overrides_fn) )
1302 hashlib.md5( open(slist_fn, "rb").read() ).hexdigest(),
1303 datetime.date.isoformat(
1304 datetime.datetime.fromtimestamp( os.path.getmtime(slist_fn) )
1307 hashlib.md5( open(zlist_fn, "rb").read() ).hexdigest(),
1308 datetime.date.isoformat(
1309 datetime.datetime.fromtimestamp( os.path.getmtime(zlist_fn) )
1318 message = "Reading %s:%s..." % (gcounties_an, gcounties_fn)
1319 sys.stdout.write(message)
1322 gcounties = zipfile.ZipFile(gcounties_an).open(gcounties_fn, "rU")
1323 for line in gcounties:
1324 fields = line.decode("latin1").strip().split("\t")
1325 if len(fields) == 10 and fields[0] != "STUSPS":
1326 fips = "fips%s" % fields[1]
1327 description = "%s, %s" % ( fields[3], fields[0] )
1328 centroid = gecos( ",".join( fields[8:10] ) )
1329 if fips not in places: places[fips] = {}
1330 places[fips]["centroid"] = centroid
1331 places[fips]["description"] = description
1334 print("done (%s lines)." % count)
1335 message = "Reading %s:%s..." % (gcousubs_an, gcousubs_fn)
1336 sys.stdout.write(message)
1339 gcousubs = zipfile.ZipFile(gcousubs_an).open(gcousubs_fn, "rU")
1340 for line in gcousubs:
1341 fields = line.decode("latin1").strip().split("\t")
1342 if len(fields) == 10 and fields[0] != "STUSPS":
1343 fips = "fips%s" % fields[1]
1344 description = "%s, %s" % ( fields[3], fields[0] )
1345 centroid = gecos( ",".join( fields[8:10] ) )
1346 if fips not in places: places[fips] = {}
1347 places[fips]["centroid"] = centroid
1348 places[fips]["description"] = description
1351 print("done (%s lines)." % count)
1352 message = "Reading %s:%s..." % (gplaces_an, gplaces_fn)
1353 sys.stdout.write(message)
1356 gplaces = zipfile.ZipFile(gplaces_an).open(gplaces_fn, "rU")
1357 for line in gplaces:
1358 fields = line.decode("latin1").strip().split("\t")
1359 if len(fields) == 10 and fields[0] != "STUSPS":
1360 fips = "fips%s" % fields[1]
1361 description = "%s, %s" % ( fields[3], fields[0] )
1362 centroid = gecos( ",".join( fields[8:10] ) )
1363 if fips not in places: places[fips] = {}
1364 places[fips]["centroid"] = centroid
1365 places[fips]["description"] = description
1368 print("done (%s lines)." % count)
1369 message = "Reading %s..." % slist_fn
1370 sys.stdout.write(message)
1373 slist = codecs.open(slist_fn, "rU")
1375 icao = line.split("#")[0].strip()
1378 "metar": "http://weather.noaa.gov/pub/data/observations/"\
1379 + "metar/decoded/%s.TXT" % icao.upper()
1383 print("done (%s lines)." % count)
1384 message = "Reading %s..." % metartbl_fn
1385 sys.stdout.write(message)
1388 metartbl = codecs.open(metartbl_fn, "rU")
1389 for line in metartbl:
1390 icao = line[:4].strip().lower()
1391 if icao in stations:
1394 line[16:48].replace("_", " ").strip().title().split()
1396 if name: description.append(name)
1397 st = line[49:51].strip()
1398 if st: description.append(st)
1399 cn = line[52:54].strip()
1400 if cn: description.append(cn)
1402 stations[icao]["description"] = ", ".join(description)
1403 lat = line[55:60].strip()
1405 lat = int(lat)/100.0
1406 lon = line[61:67].strip()
1408 lon = int(lon)/100.0
1409 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1412 print("done (%s lines)." % count)
1413 message = "Reading %s..." % nsdcccc_fn
1414 sys.stdout.write(message)
1417 nsdcccc = codecs.open(nsdcccc_fn, "rU", "latin1")
1418 for line in nsdcccc:
1420 fields = line.split(";")
1421 icao = fields[0].strip().lower()
1422 if icao in stations:
1424 name = " ".join( fields[3].strip().title().split() )
1425 if name: description.append(name)
1426 st = fields[4].strip()
1427 if st: description.append(st)
1428 country = " ".join( fields[5].strip().title().split() )
1429 if country: description.append(country)
1431 stations[icao]["description"] = ", ".join(description)
1432 lat, lon = fields[7:9]
1434 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1435 elif "location" not in stations[icao]:
1436 lat, lon = fields[5:7]
1438 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1441 print("done (%s lines)." % count)
1442 message = "Reading %s..." % coopact_fn
1443 sys.stdout.write(message)
1446 coopact = open(coopact_fn)
1447 for line in coopact:
1448 icao = line[33:37].strip().lower()
1449 if icao in stations:
1450 iata = line[22:26].strip().lower()
1451 if len(iata) == 3: airports[iata] = { "station": icao }
1452 if "description" not in stations[icao]:
1454 name = " ".join( line[99:129].strip().title().split() )
1455 if name: description.append(name)
1456 st = line[59:61].strip()
1457 if st: description.append(st)
1458 country = " ".join( line[38:58].strip().title().split() )
1459 if country: description.append(country)
1461 stations[icao]["description"] = ", ".join(description)
1462 if "location" not in stations[icao]:
1463 lat = line[130:139].strip()
1465 lat = lat.replace(" ", "-")
1466 lon = line[140:150].strip()
1468 lon = lon.replace(" ", "-")
1469 stations[icao]["location"] = gecos(
1470 "%s,%s" % (lat, lon)
1474 print("done (%s lines)." % count)
1475 message = "Reading %s..." % zlist_fn
1476 sys.stdout.write(message)
1479 zlist = codecs.open(zlist_fn, "rU")
1481 line = line.split("#")[0].strip()
1486 print("done (%s lines)." % count)
1487 message = "Reading %s:*..." % zcatalog_an
1488 sys.stdout.write(message)
1491 zcatalog = tarfile.open(zcatalog_an)
1492 for entry in zcatalog.getmembers():
1495 r"([a-z]+z[0-9]+)\.txt$",
1496 os.path.basename(entry.name)
1499 zone = fnmatch.group(1)
1501 data = zcatalog.extractfile(entry).readlines()
1502 description = data[0].decode("ascii").strip()
1503 zones[zone]["description"] = description
1504 for line in data[1:]:
1505 line = line.decode("latin1").strip()
1506 urimatch = re.match("/webdocs/(.+):(.+) for ", line)
1508 uritype = urimatch.group(2).lower().replace(" ","_")
1509 zones[zone][uritype] \
1510 = "http://weather.noaa.gov/%s" \
1514 print("done (%s files)." % count)
1515 message = "Reading %s..." % cpfzcf_fn
1516 sys.stdout.write(message)
1520 cpfzcf = open(cpfzcf_fn)
1522 fields = line.split("|")
1523 if len(fields) == 11 \
1524 and fields[0] and fields[1] and fields[9] and fields[10]:
1525 zone = "z".join( fields[:2] ).lower()
1527 zones[zone]["centroid"] = gecos( ",".join( fields[9:] ) )
1530 description = fields[3]
1532 fips = "fips%s"%fields[6]
1534 "%s, %s" % (county, state),
1535 "%s County, %s" % (county, state),
1537 if description.endswith(" Counties"):
1538 description = description[:-9]
1539 for addition in description.split(" and "):
1540 possible.append( "%s, %s" % (addition, state) )
1541 possible.append( "%s County, %s" % (addition, state) )
1542 if fips in places and "centroid" in places[fips]:
1543 for candidate in zones:
1544 if "centroid" not in zones[candidate] and \
1545 "description" in zones[candidate] and \
1546 zones[candidate]["description"] in possible:
1547 zones[candidate]["centroid"] = \
1548 places[fips]["centroid"]
1551 print("done (%s lines)." % count)
1552 message = "Reading %s:%s..." % (gzcta_an, gzcta_fn)
1553 sys.stdout.write(message)
1556 gzcta = zipfile.ZipFile(gzcta_an).open(gzcta_fn, "rU")
1558 fields = line.decode("latin1").strip().split("\t")
1559 if len(fields) == 7 and fields[0] != "GEOID":
1561 if zcta not in zctas: zctas[zcta] = {}
1562 zctas[zcta]["centroid"] = gecos(
1563 ",".join( ( fields[6], fields[5] ) )
1567 print("done (%s lines)." % count)
1568 message = "Reading %s..." % overrides_fn
1569 sys.stdout.write(message)
1575 overrides = configparser.ConfigParser()
1576 overrides.readfp( codecs.open(overrides_fn, "r", "utf8") )
1578 for section in overrides.sections():
1581 if section.startswith("-"):
1582 section = section[1:]
1584 else: delete = False
1585 if re.match("[A-Za-z]{3}$", section):
1587 if section in airports:
1588 del( airports[section] )
1589 logact = "removed airport %s" % section
1592 logact = "tried to remove nonexistent airport %s" % section
1594 if section in airports:
1595 logact = "changed airport %s" % section
1598 airports[section] = {}
1599 logact = "added airport %s" % section
1601 for key,value in overrides.items(section):
1602 if key in airports[section]: chgopt += 1
1604 if key in ("centroid", "location"):
1605 airports[section][key] = eval(value)
1607 airports[section][key] = value
1608 if addopt and chgopt:
1609 logact += " (+%s/!%s options)" % (addopt, chgopt)
1610 elif addopt: logact += " (+%s options)" % addopt
1611 elif chgopt: logact += " (!%s options)" % chgopt
1612 elif re.match("[A-Za-z0-9]{4}$", section):
1614 if section in stations:
1615 del( stations[section] )
1616 logact = "removed station %s" % section
1619 logact = "tried to remove nonexistent station %s" % section
1621 if section in stations:
1622 logact = "changed station %s" % section
1625 stations[section] = {}
1626 logact = "added station %s" % section
1628 for key,value in overrides.items(section):
1629 if key in stations[section]: chgopt += 1
1631 if key in ("centroid", "location"):
1632 stations[section][key] = eval(value)
1634 stations[section][key] = value
1635 if addopt and chgopt:
1636 logact += " (+%s/!%s options)" % (addopt, chgopt)
1637 elif addopt: logact += " (+%s options)" % addopt
1638 elif chgopt: logact += " (!%s options)" % chgopt
1639 elif re.match("[0-9]{5}$", section):
1641 if section in zctas:
1642 del( zctas[section] )
1643 logact = "removed zcta %s" % section
1646 logact = "tried to remove nonexistent zcta %s" % section
1648 if section in zctas:
1649 logact = "changed zcta %s" % section
1653 logact = "added zcta %s" % section
1655 for key,value in overrides.items(section):
1656 if key in zctas[section]: chgopt += 1
1658 if key in ("centroid", "location"):
1659 zctas[section][key] = eval(value)
1661 zctas[section][key] = value
1662 if addopt and chgopt:
1663 logact += " (+%s/!%s options)" % (addopt, chgopt)
1664 elif addopt: logact += " (+%s options)" % addopt
1665 elif chgopt: logact += " (!%s options)" % chgopt
1666 elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", section):
1668 if section in zones:
1669 del( zones[section] )
1670 logact = "removed zone %s" % section
1673 logact = "tried to remove nonexistent zone %s" % section
1675 if section in zones:
1676 logact = "changed zone %s" % section
1680 logact = "added zone %s" % section
1682 for key,value in overrides.items(section):
1683 if key in zones[section]: chgopt += 1
1685 if key in ("centroid", "location"):
1686 zones[section][key] = eval(value)
1688 zones[section][key] = value
1689 if addopt and chgopt:
1690 logact += " (+%s/!%s options)" % (addopt, chgopt)
1691 elif addopt: logact += " (+%s options)" % addopt
1692 elif chgopt: logact += " (!%s options)" % chgopt
1693 elif re.match("fips[0-9]+$", section):
1695 if section in places:
1696 del( places[section] )
1697 logact = "removed place %s" % section
1700 logact = "tried to remove nonexistent place %s" % section
1702 if section in places:
1703 logact = "changed place %s" % section
1706 places[section] = {}
1707 logact = "added place %s" % section
1709 for key,value in overrides.items(section):
1710 if key in places[section]: chgopt += 1
1712 if key in ("centroid", "location"):
1713 places[section][key] = eval(value)
1715 places[section][key] = value
1716 if addopt and chgopt:
1717 logact += " (+%s/!%s options)" % (addopt, chgopt)
1718 elif addopt: logact += " (+%s options)" % addopt
1719 elif chgopt: logact += " (!%s options)" % chgopt
1721 overrideslog.append("%s\n" % logact)
1723 if os.path.exists(overrideslog_fn):
1724 os.rename(overrideslog_fn, "%s_old"%overrideslog_fn)
1725 overrideslog_fd = codecs.open(overrideslog_fn, "w", "utf8")
1726 overrideslog_fd.writelines(overrideslog)
1727 overrideslog_fd.close()
1728 print("done (%s overridden sections: +%s/-%s/!%s)." % (
1734 estimate = 2*len(places) + len(stations) + 2*len(zctas) + len(zones)
1736 "Correlating places, stations, ZCTAs and zones (upper bound is %s):" % \
1740 milestones = list( range(51) )
1742 sys.stdout.write(message)
1745 centroid = places[fips]["centroid"]
1747 station = closest(centroid, stations, "location", 0.1)
1749 places[fips]["station"] = station
1752 level = int(50*count/estimate)
1753 if level in milestones:
1754 for remaining in milestones[:milestones.index(level)+1]:
1757 sys.stdout.write(message)
1760 message = "%s%%" % (remaining*2,)
1761 sys.stdout.write(message)
1763 milestones.remove(remaining)
1765 zone = closest(centroid, zones, "centroid", 0.1)
1767 places[fips]["zone"] = zone
1770 level = int(50*count/estimate)
1771 if level in milestones:
1772 for remaining in milestones[:milestones.index(level)+1]:
1775 sys.stdout.write(message)
1778 message = "%s%%" % (remaining*2,)
1779 sys.stdout.write(message)
1781 milestones.remove(remaining)
1782 for station in stations:
1783 if "location" in stations[station]:
1784 location = stations[station]["location"]
1786 zone = closest(location, zones, "centroid", 0.1)
1788 stations[station]["zone"] = zone
1791 level = int(50*count/estimate)
1792 if level in milestones:
1793 for remaining in milestones[:milestones.index(level)+1]:
1796 sys.stdout.write(message)
1799 message = "%s%%" % (remaining*2,)
1800 sys.stdout.write(message)
1802 milestones.remove(remaining)
1803 for zcta in zctas.keys():
1804 centroid = zctas[zcta]["centroid"]
1806 station = closest(centroid, stations, "location", 0.1)
1808 zctas[zcta]["station"] = station
1811 level = int(50*count/estimate)
1812 if level in milestones:
1813 for remaining in milestones[ : milestones.index(level)+1 ]:
1816 sys.stdout.write(message)
1819 message = "%s%%" % (remaining*2,)
1820 sys.stdout.write(message)
1822 milestones.remove(remaining)
1824 zone = closest(centroid, zones, "centroid", 0.1)
1826 zctas[zcta]["zone"] = zone
1829 level = int(50*count/estimate)
1830 if level in milestones:
1831 for remaining in milestones[:milestones.index(level)+1]:
1834 sys.stdout.write(message)
1837 message = "%s%%" % (remaining*2,)
1838 sys.stdout.write(message)
1840 milestones.remove(remaining)
1841 for zone in zones.keys():
1842 if "centroid" in zones[zone]:
1843 centroid = zones[zone]["centroid"]
1845 station = closest(centroid, stations, "location", 0.1)
1847 zones[zone]["station"] = station
1850 level = int(50*count/estimate)
1851 if level in milestones:
1852 for remaining in milestones[:milestones.index(level)+1]:
1855 sys.stdout.write(message)
1858 message = "%s%%" % (remaining*2,)
1859 sys.stdout.write(message)
1861 milestones.remove(remaining)
1862 for remaining in milestones:
1865 sys.stdout.write(message)
1868 message = "%s%%" % (remaining*2,)
1869 sys.stdout.write(message)
1871 print("\n done (%s correlations)." % count)
1872 message = "Writing %s..." % airports_fn
1873 sys.stdout.write(message)
1876 if os.path.exists(airports_fn):
1877 os.rename(airports_fn, "%s_old"%airports_fn)
1878 airports_fd = codecs.open(airports_fn, "w", "utf8")
1879 airports_fd.write(header)
1880 for airport in sorted( airports.keys() ):
1881 airports_fd.write("\n\n[%s]" % airport)
1882 for key, value in sorted( airports[airport].items() ):
1883 airports_fd.write( "\n%s = %s" % (key, value) )
1886 print("done (%s sections)." % count)
1887 message = "Writing %s..." % places_fn
1888 sys.stdout.write(message)
1891 if os.path.exists(places_fn):
1892 os.rename(places_fn, "%s_old"%places_fn)
1893 places_fd = codecs.open(places_fn, "w", "utf8")
1894 places_fd.write(header)
1895 for fips in sorted( places.keys() ):
1896 places_fd.write("\n\n[%s]" % fips)
1897 for key, value in sorted( places[fips].items() ):
1898 places_fd.write( "\n%s = %s" % (key, value) )
1901 print("done (%s sections)." % count)
1902 message = "Writing %s..." % stations_fn
1903 sys.stdout.write(message)
1906 if os.path.exists(stations_fn):
1907 os.rename(stations_fn, "%s_old"%stations_fn)
1908 stations_fd = codecs.open(stations_fn, "w", "utf8")
1909 stations_fd.write(header)
1910 for station in sorted( stations.keys() ):
1911 stations_fd.write("\n\n[%s]" % station)
1912 for key, value in sorted( stations[station].items() ):
1913 stations_fd.write( "\n%s = %s" % (key, value) )
1916 print("done (%s sections)." % count)
1917 message = "Writing %s..." % zctas_fn
1918 sys.stdout.write(message)
1921 if os.path.exists(zctas_fn):
1922 os.rename(zctas_fn, "%s_old"%zctas_fn)
1923 zctas_fd = codecs.open(zctas_fn, "w", "utf8")
1924 zctas_fd.write(header)
1925 for zcta in sorted( zctas.keys() ):
1926 zctas_fd.write("\n\n[%s]" % zcta)
1927 for key, value in sorted( zctas[zcta].items() ):
1928 zctas_fd.write( "\n%s = %s" % (key, value) )
1931 print("done (%s sections)." % count)
1932 message = "Writing %s..." % zones_fn
1933 sys.stdout.write(message)
1936 if os.path.exists(zones_fn):
1937 os.rename(zones_fn, "%s_old"%zones_fn)
1938 zones_fd = codecs.open(zones_fn, "w", "utf8")
1939 zones_fd.write(header)
1940 for zone in sorted( zones.keys() ):
1941 zones_fd.write("\n\n[%s]" % zone)
1942 for key, value in sorted( zones[zone].items() ):
1943 zones_fd.write( "\n%s = %s" % (key, value) )
1946 print("done (%s sections)." % count)
1947 message = "Starting QA check..."
1948 sys.stdout.write(message)
1950 airports = configparser.ConfigParser()
1951 airports.read(airports_fn)
1952 places = configparser.ConfigParser()
1953 places.read(places_fn)
1954 stations = configparser.ConfigParser()
1955 stations.read(stations_fn)
1956 zctas = configparser.ConfigParser()
1957 zctas.read(zctas_fn)
1958 zones = configparser.ConfigParser()
1959 zones.read(zones_fn)
1961 places_nocentroid = 0
1962 places_nodescription = 0
1963 for place in sorted( places.sections() ):
1964 if not places.has_option(place, "centroid"):
1965 qalog.append("%s: no centroid\n" % place)
1966 places_nocentroid += 1
1967 if not places.has_option(place, "description"):
1968 qalog.append("%s: no description\n" % place)
1969 places_nodescription += 1
1970 stations_nodescription = 0
1971 stations_nolocation = 0
1972 stations_nometar = 0
1973 for station in sorted( stations.sections() ):
1974 if not stations.has_option(station, "description"):
1975 qalog.append("%s: no description\n" % station)
1976 stations_nodescription += 1
1977 if not stations.has_option(station, "location"):
1978 qalog.append("%s: no location\n" % station)
1979 stations_nolocation += 1
1980 if not stations.has_option(station, "metar"):
1981 qalog.append("%s: no metar\n" % station)
1982 stations_nometar += 1
1983 airports_badstation = 0
1984 airports_nostation = 0
1985 for airport in sorted( airports.sections() ):
1986 if not airports.has_option(airport, "station"):
1987 qalog.append("%s: no station\n" % airport)
1988 airports_nostation += 1
1990 station = airports.get(airport, "station")
1991 if station not in stations.sections():
1992 qalog.append( "%s: bad station %s\n" % (airport, station) )
1993 airports_badstation += 1
1994 zctas_nocentroid = 0
1995 for zcta in sorted( zctas.sections() ):
1996 if not zctas.has_option(zcta, "centroid"):
1997 qalog.append("%s: no centroid\n" % zcta)
1998 zctas_nocentroid += 1
1999 zones_nocentroid = 0
2000 zones_nodescription = 0
2001 zones_noforecast = 0
2002 zones_overlapping = 0
2004 for zone in zones.sections():
2005 if zones.has_option(zone, "centroid"):
2007 "centroid": eval( zones.get(zone, "centroid") )
2009 for zone in sorted( zones.sections() ):
2010 if zones.has_option(zone, "centroid"):
2011 zonetable_local = zonetable.copy()
2012 del( zonetable_local[zone] )
2013 centroid = eval( zones.get(zone, "centroid") )
2015 nearest = closest(centroid, zonetable_local, "centroid", 0.1)
2016 if nearest[1]*radian_to_km < 1:
2017 qalog.append( "%s: within one km of %s\n" % (
2021 zones_overlapping += 1
2023 qalog.append("%s: no centroid\n" % zone)
2024 zones_nocentroid += 1
2025 if not zones.has_option(zone, "description"):
2026 qalog.append("%s: no description\n" % zone)
2027 zones_nodescription += 1
2028 if not zones.has_option(zone, "zone_forecast"):
2029 qalog.append("%s: no forecast\n" % zone)
2030 zones_noforecast += 1
2031 if os.path.exists(qalog_fn):
2032 os.rename(qalog_fn, "%s_old"%qalog_fn)
2033 qalog_fd = codecs.open(qalog_fn, "w", "utf8")
2034 qalog_fd.writelines(qalog)
2037 print("issues found (see %s for details):"%qalog_fn)
2038 if airports_badstation:
2039 print(" %s airports with invalid station"%airports_badstation)
2040 if airports_nostation:
2041 print(" %s airports with no station"%airports_nostation)
2042 if places_nocentroid:
2043 print(" %s places with no centroid"%places_nocentroid)
2044 if places_nodescription:
2045 print(" %s places with no description"%places_nodescription)
2046 if stations_nodescription:
2047 print(" %s stations with no description"%stations_nodescription)
2048 if stations_nolocation:
2049 print(" %s stations with no location"%stations_nolocation)
2050 if stations_nometar:
2051 print(" %s stations with no METAR"%stations_nometar)
2052 if zctas_nocentroid:
2053 print(" %s ZCTAs with no centroid"%zctas_nocentroid)
2054 if zones_nocentroid:
2055 print(" %s zones with no centroid"%zones_nocentroid)
2056 if zones_nodescription:
2057 print(" %s zones with no description"%zones_nodescription)
2058 if zones_noforecast:
2059 print(" %s zones with no forecast"%zones_noforecast)
2060 if zones_overlapping:
2061 print(" %s zones within one km of another"%zones_overlapping)
2062 else: print("no issues found.")
2063 print("Indexing complete!")