1 """Contains various object definitions needed by the weather utility."""
3 weather_copyright = """\
4 # Copyright (c) 2006-2020 Jeremy Stanley <fungi@yuggoth.org>. Permission to
5 # use, copy, modify, and distribute this software is granted under terms
6 # provided in the LICENSE file distributed with this software.
9 weather_version = "2.4.1"
11 radian_to_km = 6372.795484
12 radian_to_mi = 3959.871528
14 def pyversion(ref=None):
15 """Determine the Python version and optionally compare to a reference."""
17 ver = platform.python_version()
20 int(x) for x in ver.split(".")[:2]
22 int(x) for x in ref.split(".")[:2]
27 """An object to contain selection data."""
29 """Store the config, options and arguments."""
30 self.config = get_config()
31 self.options, self.arguments = get_options(self.config)
32 if self.get_bool("cache") and self.get_bool("cache_search") \
33 and not self.get_bool("longlist"):
34 integrate_search_cache(
39 if not self.arguments:
40 if "id" in self.options.__dict__ \
41 and self.options.__dict__["id"]:
42 self.arguments.append( self.options.__dict__["id"] )
43 del( self.options.__dict__["id"] )
45 message = "WARNING: the --id option is deprecated and will eventually be removed\n"
46 sys.stderr.write(message)
47 elif "city" in self.options.__dict__ \
48 and self.options.__dict__["city"] \
49 and "st" in self.options.__dict__ \
50 and self.options.__dict__["st"]:
51 self.arguments.append(
53 self.options.__dict__["city"],
54 self.options.__dict__["st"]
57 del( self.options.__dict__["city"] )
58 del( self.options.__dict__["st"] )
60 message = "WARNING: the --city/--st options are deprecated and will eventually be removed\n"
61 sys.stderr.write(message)
62 def get(self, option, argument=None):
63 """Retrieve data from the config or options."""
65 if self.config.has_section(argument) and (
66 self.config.has_option(argument, "city") \
67 or self.config.has_option(argument, "id") \
68 or self.config.has_option(argument, "st")
70 self.config.remove_section(argument)
72 message = "WARNING: the city/id/st options are now unsupported in aliases\n"
73 sys.stderr.write(message)
74 if not self.config.has_section(argument):
77 path=self.get("setpath"),
78 info=self.get("info"),
80 self.get("cache") and self.get("cache_search")
82 cachedir=self.get("cachedir"),
83 quiet=self.get_bool("quiet")
85 self.config.add_section(argument)
86 for item in guessed.items():
87 self.config.set(argument, *item)
88 if self.config.has_option(argument, option):
89 return self.config.get(argument, option)
90 if option in self.options.__dict__:
91 return self.options.__dict__[option]
93 message = "WARNING: no URI defined for %s\n" % option
94 sys.stderr.write(message)
96 def get_bool(self, option, argument=None):
97 """Get data and coerce to a boolean if necessary."""
98 return bool(self.get(option, argument))
99 def getint(self, option, argument=None):
100 """Get data and coerce to an integer if necessary."""
101 value = self.get(option, argument)
102 if value: return int(value)
106 """Average a list of coordinates."""
113 return (x/count, y/count)
115 def filter_units(line, units="imperial"):
116 """Filter or convert units in a line of text between US/UK and metric."""
118 # filter lines with both pressures in the form of "X inches (Y hPa)" or
121 "(.* )(\d*(\.\d+)? (inches|in\. Hg)) \((\d*(\.\d+)? hPa)\)(.*)",
125 preamble, in_hg, i_fr, i_un, hpa, h_fr, trailer = dual_p.groups()
126 if units == "imperial": line = preamble + in_hg + trailer
127 elif units == "metric": line = preamble + hpa + trailer
128 # filter lines with both temperatures in the form of "X F (Y C)"
130 "(.* )(-?\d*(\.\d+)? F) \((-?\d*(\.\d+)? C)\)(.*)",
134 preamble, fahrenheit, f_fr, celsius, c_fr, trailer = dual_t.groups()
135 if units == "imperial": line = preamble + fahrenheit + trailer
136 elif units == "metric": line = preamble + celsius + trailer
137 # if metric is desired, convert distances in the form of "X mile(s)" to
139 if units == "metric":
140 imperial_d = re.match(
141 "(.* )(\d+)( mile\(s\))(.*)",
145 preamble, mi, m_u, trailer = imperial_d.groups()
146 line = preamble + str(int(round(int(mi)*1.609344))) \
147 + " kilometer(s)" + trailer
148 # filter speeds in the form of "X MPH (Y KT)" to just "X MPH"; if metric is
149 # desired, convert to "Z KPH"
150 imperial_s = re.match(
151 "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
155 preamble, mph, m_u, kt, trailer = imperial_s.groups()
156 if units == "imperial": line = preamble + mph + m_u + trailer
157 elif units == "metric":
158 line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
160 imperial_s = re.match(
161 "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
165 preamble, mph, m_u, kt, trailer = imperial_s.groups()
166 if units == "imperial": line = preamble + mph + m_u + trailer
167 elif units == "metric":
168 line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
170 # if imperial is desired, qualify given forcast temperatures like "X F"; if
171 # metric is desired, convert to "Y C"
172 imperial_t = re.match(
173 "(.* )(High |high |Low |low )(\d+)(\.|,)(.*)",
177 preamble, parameter, fahrenheit, sep, trailer = imperial_t.groups()
178 if units == "imperial":
179 line = preamble + parameter + fahrenheit + " F" + sep + trailer
180 elif units == "metric":
181 line = preamble + parameter \
182 + str(int(round((int(fahrenheit)-32)*5/9))) + " C" + sep \
184 # hand off the resulting line
194 """Return a string containing the results of a URI GET."""
196 import urllib, urllib.error, urllib.request
197 URLError = urllib.error.URLError
198 urlopen = urllib.request.urlopen
200 import urllib2 as urllib
201 URLError = urllib.URLError
202 urlopen = urllib.urlopen
205 dcachedir = os.path.join( os.path.expanduser(cachedir), "datacache" )
206 if not os.path.exists(dcachedir):
207 try: os.makedirs(dcachedir)
208 except (IOError, OSError): pass
209 dcache_fn = os.path.join(
211 uri.split(":",1)[1].replace("/","_")
214 if cache_data and os.access(dcache_fn, os.R_OK) \
215 and now-cacheage < os.stat(dcache_fn).st_mtime <= now:
216 dcache_fd = open(dcache_fn)
217 data = dcache_fd.read()
221 data = urlopen(uri).read().decode("utf-8")
223 if ignore_fail: return ""
225 sys.stderr.write("%s error: failed to retrieve\n %s\n\n" % (
226 os.path.basename( sys.argv[0] ), uri))
228 # Some data sources are HTML with the plain text wrapped in pre tags
230 data = data[data.find("<pre>")+5:data.find("</pre>")]
234 dcache_fd = codecs.open(dcache_fn, "w", "utf-8")
235 dcache_fd.write(data)
237 except (IOError, OSError): pass
251 """Return a summarized METAR for the specified station."""
254 message = "%s error: METAR URI required for conditions\n" % \
255 os.path.basename( sys.argv[0] )
256 sys.stderr.write(message)
260 cache_data=cache_data,
264 if pyversion("3") and type(metar) is bytes: metar = metar.decode("utf-8")
265 if verbose: return metar
268 lines = metar.split("\n")
271 "relative_humidity," \
272 + "precipitation_last_hour," \
273 + "sky conditions," \
279 headerlist = headers.lower().replace("_"," ").split(",")
282 title = "Current conditions at %s"
283 place = lines[0].split(", ")
285 place = "%s, %s" % ( place[0].title(), place[1] )
286 else: place = "<UNKNOWN>"
287 output.append(title%place)
288 output.append("Last updated " + lines[1])
290 for header in headerlist:
292 if line.lower().startswith(header + ":"):
293 if re.match(r".*:\d+$", line): line = line[:line.rfind(":")]
294 if imperial: line = filter_units(line, units="imperial")
295 elif metric: line = filter_units(line, units="metric")
296 if quiet: output.append(line)
297 else: output.append(" " + line)
301 "(no conditions matched your header list, try with --verbose)"
303 return "\n".join(output)
313 """Return alert notice for the specified URI."""
319 cache_data=cache_data,
323 if pyversion("3") and type(alert) is bytes: alert = alert.decode("utf-8")
325 if verbose: return alert
327 if alert.find("\nNATIONAL WEATHER SERVICE") == -1:
331 lines = alert.split("\n")
333 valid_time = time.strftime("%Y%m%d%H%M")
336 if line.startswith("Expires:") \
337 and "Expires:" + valid_time > line:
339 if muted and line.startswith("NATIONAL WEATHER SERVICE"):
346 if line and not muted:
347 if quiet: output.append(line)
348 else: output.append(" " + line)
349 return "\n".join(output)
351 def get_options(config):
352 """Parse the options passed on the command line."""
354 # for optparse's builtin -h/--help option
356 "usage: %prog [options] [alias1|search1 [alias2|search2 [...]]]"
358 # for optparse's builtin --version option
359 verstring = "%prog " + weather_version
363 option_parser = optparse.OptionParser(usage=usage, version=verstring)
364 # separate options object from list of arguments and return both
366 # the -a/--alert option
367 if config.has_option("default", "alert"):
368 default_alert = bool(config.get("default", "alert"))
369 else: default_alert = False
370 option_parser.add_option("-a", "--alert",
373 default=default_alert,
374 help="include local alert notices")
376 # the --atypes option
377 if config.has_option("default", "atypes"):
378 default_atypes = config.get("default", "atypes")
381 "coastal_flood_statement," \
382 + "flash_flood_statement," \
383 + "flash_flood_warning," \
384 + "flash_flood_watch," \
385 + "flood_statement," \
387 + "severe_thunderstorm_warning," \
388 + "severe_weather_statement," \
389 + "special_weather_statement," \
390 + "urgent_weather_message"
391 option_parser.add_option("--atypes",
393 default=default_atypes,
394 help="list of alert notification types to display")
396 # the --build-sets option
397 option_parser.add_option("--build-sets",
401 help="(re)build location correlation sets")
403 # the --cacheage option
404 if config.has_option("default", "cacheage"):
405 default_cacheage = config.getint("default", "cacheage")
406 else: default_cacheage = 900
407 option_parser.add_option("--cacheage",
409 default=default_cacheage,
410 help="duration in seconds to refresh cached data")
412 # the --cachedir option
413 if config.has_option("default", "cachedir"):
414 default_cachedir = config.get("default", "cachedir")
415 else: default_cachedir = "~/.weather"
416 option_parser.add_option("--cachedir",
418 default=default_cachedir,
419 help="directory for storing cached searches and data")
421 # the -f/--forecast option
422 if config.has_option("default", "forecast"):
423 default_forecast = bool(config.get("default", "forecast"))
424 else: default_forecast = False
425 option_parser.add_option("-f", "--forecast",
428 default=default_forecast,
429 help="include a local forecast")
431 # the --headers option
432 if config.has_option("default", "headers"):
433 default_headers = config.get("default", "headers")
437 + "relative_humidity," \
442 + "sky_conditions," \
443 + "precipitation_last_hour"
444 option_parser.add_option("--headers",
446 default=default_headers,
447 help="list of conditions headers to display")
449 # the --imperial option
450 if config.has_option("default", "imperial"):
451 default_imperial = bool(config.get("default", "imperial"))
452 else: default_imperial = False
453 option_parser.add_option("--imperial",
456 default=default_imperial,
457 help="filter/convert conditions for US/UK units")
460 option_parser.add_option("--info",
464 help="output detailed information for your search")
466 # the -l/--list option
467 option_parser.add_option("-l", "--list",
471 help="list all configured aliases and cached searches")
473 # the --longlist option
474 option_parser.add_option("--longlist",
478 help="display details of all configured aliases")
480 # the -m/--metric option
481 if config.has_option("default", "metric"):
482 default_metric = bool(config.get("default", "metric"))
483 else: default_metric = False
484 option_parser.add_option("-m", "--metric",
487 default=default_metric,
488 help="filter/convert conditions for metric units")
490 # the -n/--no-conditions option
491 if config.has_option("default", "conditions"):
492 default_conditions = bool(config.get("default", "conditions"))
493 else: default_conditions = True
494 option_parser.add_option("-n", "--no-conditions",
496 action="store_false",
497 default=default_conditions,
498 help="disable output of current conditions")
500 # the --no-cache option
501 if config.has_option("default", "cache"):
502 default_cache = bool(config.get("default", "cache"))
503 else: default_cache = True
504 option_parser.add_option("--no-cache",
506 action="store_false",
508 help="disable all caching (searches and data)")
510 # the --no-cache-data option
511 if config.has_option("default", "cache_data"):
512 default_cache_data = bool(config.get("default", "cache_data"))
513 else: default_cache_data = True
514 option_parser.add_option("--no-cache-data",
516 action="store_false",
518 help="disable retrieved data caching")
520 # the --no-cache-search option
521 if config.has_option("default", "cache_search"):
522 default_cache_search = bool(config.get("default", "cache_search"))
523 else: default_cache_search = True
524 option_parser.add_option("--no-cache-search",
526 action="store_false",
528 help="disable search result caching")
530 # the -q/--quiet option
531 if config.has_option("default", "quiet"):
532 default_quiet = bool(config.get("default", "quiet"))
533 else: default_quiet = False
534 option_parser.add_option("-q", "--quiet",
537 default=default_quiet,
538 help="skip preambles and don't indent")
540 # the --setpath option
541 if config.has_option("default", "setpath"):
542 default_setpath = config.get("default", "setpath")
543 else: default_setpath = ".:~/.weather"
544 option_parser.add_option("--setpath",
546 default=default_setpath,
547 help="directory search path for correlation sets")
549 # the -v/--verbose option
550 if config.has_option("default", "verbose"):
551 default_verbose = bool(config.get("default", "verbose"))
552 else: default_verbose = False
553 option_parser.add_option("-v", "--verbose",
556 default=default_verbose,
557 help="show full decoded feeds")
560 if config.has_option("default", "city"):
561 default_city = config.get("default", "city")
562 else: default_city = ""
563 option_parser.add_option("-c", "--city",
565 default=default_city,
566 help=optparse.SUPPRESS_HELP)
567 if config.has_option("default", "id"):
568 default_id = config.get("default", "id")
569 else: default_id = ""
570 option_parser.add_option("-i", "--id",
573 help=optparse.SUPPRESS_HELP)
574 if config.has_option("default", "st"):
575 default_st = config.get("default", "st")
576 else: default_st = ""
577 option_parser.add_option("-s", "--st",
580 help=optparse.SUPPRESS_HELP)
582 options, arguments = option_parser.parse_args()
583 return options, arguments
586 """Parse the aliases and configuration."""
587 if pyversion("3"): import configparser
588 else: import ConfigParser as configparser
589 config = configparser.ConfigParser()
593 "/etc/weather/weatherrc",
594 os.path.expanduser("~/.weather/weatherrc"),
595 os.path.expanduser("~/.weatherrc"),
598 for rcfile in rcfiles:
599 if os.access(rcfile, os.R_OK): config.read(rcfile)
600 for section in config.sections():
601 if section != section.lower():
602 if config.has_section(section.lower()):
603 config.remove_section(section.lower())
604 config.add_section(section.lower())
605 for option,value in config.items(section):
606 config.set(section.lower(), option, value)
609 def integrate_search_cache(config, cachedir, setpath):
610 """Add cached search results into the configuration."""
611 if pyversion("3"): import configparser
612 else: import ConfigParser as configparser
614 scache_fn = os.path.join( os.path.expanduser(cachedir), "searches" )
615 if not os.access(scache_fn, os.R_OK): return config
616 scache_fd = open(scache_fn)
617 created = float( scache_fd.readline().split(":")[1].strip().split()[0] )
620 datafiles = data_index(setpath)
622 data_freshness = sorted(
623 [ x[1] for x in datafiles.values() ],
626 else: data_freshness = now
627 if created < data_freshness <= now:
630 print( "[clearing outdated %s]" % scache_fn )
631 except (IOError, OSError):
634 scache = configparser.ConfigParser()
635 scache.read(scache_fn)
636 for section in scache.sections():
637 if not config.has_section(section):
638 config.add_section(section)
639 for option,value in scache.items(section):
640 config.set(section, option, value)
643 def list_aliases(config, detail=False):
644 """Return a formatted list of aliases defined in the config."""
646 output = "\n# configured alias details..."
647 for section in sorted(config.sections()):
648 output += "\n\n[%s]" % section
649 for item in sorted(config.items(section)):
650 output += "\n%s = %s" % item
653 output = "configured aliases and cached searches..."
654 for section in sorted(config.sections()):
655 if config.has_option(section, "description"):
656 description = config.get(section, "description")
657 else: description = "(no description provided)"
658 output += "\n %s: %s" % (section, description)
661 def data_index(path):
664 for filename in ("airports", "places", "stations", "zctas", "zones"):
665 for dirname in path.split(":"):
666 for extension in ("", ".gz", ".txt"):
667 candidate = os.path.expanduser(
668 os.path.join( dirname, "".join( (filename, extension) ) )
670 if os.path.exists(candidate):
671 datafiles[filename] = (
673 os.stat(candidate).st_mtime
676 if filename in datafiles:
690 """Find URIs using airport, gecos, placename, station, ZCTA/ZIP, zone."""
691 import codecs, datetime, time, os, re, sys
692 if pyversion("3"): import configparser
693 else: import ConfigParser as configparser
694 datafiles = data_index(path)
695 if re.match("[A-Za-z]{3}$", expression): searchtype = "airport"
696 elif re.match("[A-Za-z0-9]{4}$", expression): searchtype = "station"
697 elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", expression): searchtype = "zone"
698 elif re.match("[0-9]{5}$", expression): searchtype = "ZCTA"
700 r"[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?, *[\+-]?\d+(\.\d+)?(-\d+){,2}[ENSWensw]?$",
703 searchtype = "coordinates"
704 elif re.match(r"(FIPS|fips)\d+$", expression): searchtype = "FIPS"
708 if cache_search: action = "caching"
709 else: action = "using"
718 (0.995, "excellent"),
721 if not quiet: print("Searching via %s..."%searchtype)
722 stations = configparser.ConfigParser()
723 dataname = "stations"
724 if dataname in datafiles:
725 datafile = datafiles[dataname][0]
726 if datafile.endswith(".gz"):
729 stations.read_string(
730 gzip.open(datafile).read().decode("utf-8") )
731 else: stations.readfp( gzip.open(datafile) )
733 stations.read(datafile)
735 message = "%s error: can't find \"%s\" data file\n" % (
736 os.path.basename( sys.argv[0] ),
739 sys.stderr.write(message)
741 zones = configparser.ConfigParser()
743 if dataname in datafiles:
744 datafile = datafiles[dataname][0]
745 if datafile.endswith(".gz"):
748 zones.read_string( gzip.open(datafile).read().decode("utf-8") )
749 else: zones.readfp( gzip.open(datafile) )
753 message = "%s error: can't find \"%s\" data file\n" % (
754 os.path.basename( sys.argv[0] ),
757 sys.stderr.write(message)
765 if searchtype == "airport":
766 expression = expression.lower()
767 airports = configparser.ConfigParser()
768 dataname = "airports"
769 if dataname in datafiles:
770 datafile = datafiles[dataname][0]
771 if datafile.endswith(".gz"):
774 airports.read_string(
775 gzip.open(datafile).read().decode("utf-8") )
776 else: airports.readfp( gzip.open(datafile) )
778 airports.read(datafile)
780 message = "%s error: can't find \"%s\" data file\n" % (
781 os.path.basename( sys.argv[0] ),
784 sys.stderr.write(message)
786 if airports.has_section(expression) \
787 and airports.has_option(expression, "station"):
788 search = (expression, "IATA/FAA airport code %s" % expression)
789 station = ( airports.get(expression, "station"), 0 )
790 if stations.has_option(station[0], "zone"):
791 zone = eval( stations.get(station[0], "zone") )
793 if not ( info or quiet ) \
794 and stations.has_option( station[0], "description" ):
798 stations.get(station[0], "description")
802 message = "No IATA/FAA airport code \"%s\" in the %s file.\n" % (
804 datafiles["airports"][0]
806 sys.stderr.write(message)
808 elif searchtype == "station":
809 expression = expression.lower()
810 if stations.has_section(expression):
811 station = (expression, 0)
813 search = (expression, "ICAO station code %s" % expression)
814 if stations.has_option(expression, "zone"):
815 zone = eval( stations.get(expression, "zone") )
817 if not ( info or quiet ) \
818 and stations.has_option(expression, "description"):
822 stations.get(expression, "description")
826 message = "No ICAO weather station \"%s\" in the %s file.\n" % (
828 datafiles["stations"][0]
830 sys.stderr.write(message)
832 elif searchtype == "zone":
833 expression = expression.lower()
834 if zones.has_section(expression) \
835 and zones.has_option(expression, "station"):
836 zone = (expression, 0)
837 station = eval( zones.get(expression, "station") )
839 search = (expression, "NWS/NOAA weather zone %s" % expression)
840 if not ( info or quiet ) \
841 and zones.has_option(expression, "description"):
845 zones.get(expression, "description")
849 message = "No usable NWS weather zone \"%s\" in the %s file.\n" % (
851 datafiles["zones"][0]
853 sys.stderr.write(message)
855 elif searchtype == "ZCTA":
856 zctas = configparser.ConfigParser()
858 if dataname in datafiles:
859 datafile = datafiles[dataname][0]
860 if datafile.endswith(".gz"):
864 gzip.open(datafile).read().decode("utf-8") )
865 else: zctas.readfp( gzip.open(datafile) )
869 message = "%s error: can't find \"%s\" data file\n" % (
870 os.path.basename( sys.argv[0] ),
873 sys.stderr.write(message)
876 if zctas.has_section(expression) \
877 and zctas.has_option(expression, "station"):
878 station = eval( zctas.get(expression, "station") )
879 search = (expression, "Census ZCTA (ZIP code) %s" % expression)
880 if zctas.has_option(expression, "zone"):
881 zone = eval( zctas.get(expression, "zone") )
883 message = "No census ZCTA (ZIP code) \"%s\" in the %s file.\n" % (
885 datafiles["zctas"][0]
887 sys.stderr.write(message)
889 elif searchtype == "coordinates":
890 search = (expression, "Geographic coordinates %s" % expression)
892 for station in stations.sections():
893 if stations.has_option(station, "location"):
894 stationtable[station] = {
895 "location": eval( stations.get(station, "location") )
897 station = closest( gecos(expression), stationtable, "location", 0.1 )
899 message = "No ICAO weather station found near %s.\n" % expression
900 sys.stderr.write(message)
903 for zone in zones.sections():
904 if zones.has_option(zone, "centroid"):
906 "centroid": eval( zones.get(zone, "centroid") )
908 zone = closest( gecos(expression), zonetable, "centroid", 0.1 )
910 message = "No NWS weather zone near %s; forecasts unavailable.\n" \
912 sys.stderr.write(message)
913 elif searchtype in ("FIPS", "name"):
914 places = configparser.ConfigParser()
916 if dataname in datafiles:
917 datafile = datafiles[dataname][0]
918 if datafile.endswith(".gz"):
922 gzip.open(datafile).read().decode("utf-8") )
923 else: places.readfp( gzip.open(datafile) )
925 places.read(datafile)
927 message = "%s error: can't find \"%s\" data file\n" % (
928 os.path.basename( sys.argv[0] ),
931 sys.stderr.write(message)
934 place = expression.lower()
935 if places.has_section(place) and places.has_option(place, "station"):
936 station = eval( places.get(place, "station") )
937 search = (expression, "Census Place %s" % expression)
938 if places.has_option(place, "description"):
941 search[1] + ", %s" % places.get(place, "description")
943 if places.has_option(place, "zone"):
944 zone = eval( places.get(place, "zone") )
945 if not ( info or quiet ) \
946 and places.has_option(place, "description"):
950 places.get(place, "description")
954 for place in places.sections():
955 if places.has_option(place, "description") \
956 and places.has_option(place, "station") \
959 places.get(place, "description"),
962 possibilities.append(place)
963 for place in stations.sections():
964 if stations.has_option(place, "description") \
967 stations.get(place, "description"),
970 possibilities.append(place)
971 for place in zones.sections():
972 if zones.has_option(place, "description") \
973 and zones.has_option(place, "station") \
976 zones.get(place, "description"),
979 possibilities.append(place)
980 if len(possibilities) == 1:
981 place = possibilities[0]
982 if places.has_section(place):
983 station = eval( places.get(place, "station") )
984 description = places.get(place, "description")
985 if places.has_option(place, "zone"):
986 zone = eval( places.get(place, "zone" ) )
987 search = ( expression, "%s: %s" % (place, description) )
988 elif stations.has_section(place):
989 station = (place, 0.0)
990 description = stations.get(place, "description")
991 if stations.has_option(place, "zone"):
992 zone = eval( stations.get(place, "zone" ) )
993 search = ( expression, "ICAO station code %s" % place )
994 elif zones.has_section(place):
995 station = eval( zones.get(place, "station") )
996 description = zones.get(place, "description")
998 search = ( expression, "NWS/NOAA weather zone %s" % place )
999 if not ( info or quiet ):
1000 print( "[%s result %s]" % (action, description) )
1001 if not possibilities and not station[0]:
1002 message = "No FIPS code/census area match in the %s file.\n" % (
1003 datafiles["places"][0]
1005 sys.stderr.write(message)
1008 uris["metar"] = stations.get( station[0], "metar" )
1010 for key,value in zones.items( zone[0] ):
1011 if key not in ("centroid", "description", "station"):
1014 count = len(possibilities)
1015 if count <= max_results:
1016 print( "Your search is ambiguous, returning %s matches:" % count )
1017 for place in sorted(possibilities):
1018 if places.has_section(place):
1022 places.get(place, "description")
1025 elif stations.has_section(place):
1029 stations.get(place, "description")
1032 elif zones.has_section(place):
1036 zones.get(place, "description")
1041 "Your search is too ambiguous, returning %s matches." % count
1048 for section in dataset.sections():
1049 if dataset.has_option(section, "station"):
1051 eval( dataset.get(section, "station") )[1]
1053 if dataset.has_option(section, "zone"):
1054 zonelist.append( eval( dataset.get(section, "zone") )[1] )
1057 scount = len(stationlist)
1058 zcount = len(zonelist)
1061 for score in scores:
1063 sranks.append( stationlist[ int( (1-score[0]) * scount ) ] )
1065 zranks.append( zonelist[ int( (1-score[0]) * zcount ) ] )
1066 description = search[1]
1067 uris["description"] = description
1069 "%s\n%s" % ( description, "-" * len(description) )
1074 stations.get( station[0], "description" )
1077 km = radian_to_km*station[1]
1078 mi = radian_to_mi*station[1]
1079 if sranks and not description.startswith("ICAO station code "):
1080 for index in range(0, len(scores)):
1081 if station[1] >= sranks[index]:
1082 score = scores[index][1]
1085 " (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1087 elif searchtype == "coordinates":
1088 print( " (%.3gkm, %.3gmi)" % (km, mi) )
1091 "%s: %s" % ( zone[0], zones.get( zone[0], "description" ) )
1093 km = radian_to_km*zone[1]
1094 mi = radian_to_mi*zone[1]
1095 if zranks and not description.startswith("NWS/NOAA weather zone "):
1096 for index in range(0, len(scores)):
1097 if zone[1] >= zranks[index]:
1098 score = scores[index][1]
1101 " (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
1103 elif searchtype == "coordinates" and zone[0]:
1104 print( " (%.3gkm, %.3gmi)" % (km, mi) )
1107 nowstamp = "%s (%s)" % (
1109 datetime.datetime.isoformat(
1110 datetime.datetime.fromtimestamp(now),
1114 search_cache = ["\n"]
1115 search_cache.append( "[%s]\n" % search[0] )
1116 search_cache.append( "cached = %s\n" % nowstamp )
1117 for uriname in sorted(uris.keys()):
1118 search_cache.append( "%s = %s\n" % ( uriname, uris[uriname] ) )
1119 real_cachedir = os.path.expanduser(cachedir)
1120 if not os.path.exists(real_cachedir):
1121 try: os.makedirs(real_cachedir)
1122 except (IOError, OSError): pass
1123 scache_fn = os.path.join(real_cachedir, "searches")
1124 if not os.path.exists(scache_fn):
1126 [ x[1] for x in datafiles.values() ],
1129 thenstamp = "%s (%s)" % (
1131 datetime.datetime.isoformat(
1132 datetime.datetime.fromtimestamp(then),
1136 search_cache.insert(
1138 "# based on data files from: %s\n" % thenstamp
1141 scache_existing = configparser.ConfigParser()
1142 scache_existing.read(scache_fn)
1143 if not scache_existing.has_section(search[0]):
1144 scache_fd = codecs.open(scache_fn, "a", "utf-8")
1145 scache_fd.writelines(search_cache)
1147 except (IOError, OSError): pass
1151 def closest(position, nodes, fieldname, angle=None):
1153 if not angle: angle = 2*math.pi
1156 if fieldname in nodes[name]:
1157 node = nodes[name][fieldname]
1158 if node and abs( position[0]-node[0] ) < angle:
1159 if abs( position[1]-node[1] ) < angle \
1160 or abs( abs( position[1]-node[1] ) - 2*math.pi ) < angle:
1161 if position == node:
1165 candidate = math.acos(
1166 math.sin( position[0] ) * math.sin( node[0] ) \
1167 + math.cos( position[0] ) \
1168 * math.cos( node[0] ) \
1169 * math.cos( position[1] - node[1] )
1171 if candidate < angle:
1174 if match: match = str(match)
1175 return (match, angle)
1177 def gecos(formatted):
1179 coordinates = formatted.split(",")
1180 for coordinate in range(0, 2):
1181 degrees, foo, minutes, bar, seconds, hemisphere = re.match(
1182 r"([\+-]?\d+\.?\d*)(-(\d+))?(-(\d+))?([ensw]?)$",
1183 coordinates[coordinate].strip().lower()
1185 value = float(degrees)
1186 if minutes: value += float(minutes)/60
1187 if seconds: value += float(seconds)/3600
1188 if hemisphere and hemisphere in "sw": value *= -1
1189 coordinates[coordinate] = math.radians(value)
1190 return tuple(coordinates)
1193 import codecs, csv, datetime, hashlib, os, re, sys, tarfile, time, zipfile
1194 if pyversion("3"): import configparser
1195 else: import ConfigParser as configparser
1196 for filename in os.listdir("."):
1197 if re.match("[0-9]{4}_Gaz_counties_national.zip$", filename):
1198 gcounties_an = filename
1199 gcounties_fn = filename[:-4] + ".txt"
1200 elif re.match("[0-9]{4}_Gaz_cousubs_national.zip$", filename):
1201 gcousubs_an = filename
1202 gcousubs_fn = filename[:-4] + ".txt"
1203 elif re.match("[0-9]{4}_Gaz_place_national.zip$", filename):
1204 gplace_an = filename
1205 gplace_fn = filename[:-4] + ".txt"
1206 elif re.match("[0-9]{4}_Gaz_zcta_national.zip$", filename):
1208 gzcta_fn = filename[:-4] + ".txt"
1209 elif re.match("bp[0-9]{2}[a-z]{2}[0-9]{2}.dbx$", filename):
1210 cpfzcf_fn = filename
1211 nsdcccc_fn = "nsd_cccc.txt"
1212 ourairports_fn = "airports.csv"
1213 overrides_fn = "overrides.conf"
1214 overrideslog_fn = "overrides.log"
1218 airports_fn = "airports"
1219 places_fn = "places"
1220 stations_fn = "stations"
1225 # generated by %s on %s from these public domain sources:
1227 # https://www.census.gov/geographies/reference-files/time-series/geo/gazetteer-files.html
1233 # https://www.weather.gov/gis/ZoneCounty/
1236 # https://tgftp.nws.noaa.gov/data/
1239 # https://ourairports.com/data/
1242 # ...and these manually-generated or hand-compiled adjustments:
1248 os.path.basename( sys.argv[0] ),
1249 datetime.date.isoformat(
1250 datetime.datetime.utcfromtimestamp( int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) )
1252 hashlib.md5( open(gcounties_an, "rb").read() ).hexdigest(),
1253 datetime.date.isoformat(
1254 datetime.datetime.utcfromtimestamp( os.path.getmtime(gcounties_an) )
1257 hashlib.md5( open(gcousubs_an, "rb").read() ).hexdigest(),
1258 datetime.date.isoformat(
1259 datetime.datetime.utcfromtimestamp( os.path.getmtime(gcousubs_an) )
1262 hashlib.md5( open(gplace_an, "rb").read() ).hexdigest(),
1263 datetime.date.isoformat(
1264 datetime.datetime.utcfromtimestamp( os.path.getmtime(gplace_an) )
1267 hashlib.md5( open(gzcta_an, "rb").read() ).hexdigest(),
1268 datetime.date.isoformat(
1269 datetime.datetime.utcfromtimestamp( os.path.getmtime(gzcta_an) )
1272 hashlib.md5( open(cpfzcf_fn, "rb").read() ).hexdigest(),
1273 datetime.date.isoformat(
1274 datetime.datetime.utcfromtimestamp( os.path.getmtime(cpfzcf_fn) )
1277 hashlib.md5( open(nsdcccc_fn, "rb").read() ).hexdigest(),
1278 datetime.date.isoformat(
1279 datetime.datetime.utcfromtimestamp( os.path.getmtime(nsdcccc_fn) )
1282 hashlib.md5( open(ourairports_fn, "rb").read() ).hexdigest(),
1283 datetime.date.isoformat(
1284 datetime.datetime.utcfromtimestamp( os.path.getmtime(ourairports_fn) )
1287 hashlib.md5( open(overrides_fn, "rb").read() ).hexdigest(),
1288 datetime.date.isoformat(
1289 datetime.datetime.utcfromtimestamp( os.path.getmtime(overrides_fn) )
1292 hashlib.md5( open(slist_fn, "rb").read() ).hexdigest(),
1293 datetime.date.isoformat(
1294 datetime.datetime.utcfromtimestamp( os.path.getmtime(slist_fn) )
1297 hashlib.md5( open(zlist_fn, "rb").read() ).hexdigest(),
1298 datetime.date.isoformat(
1299 datetime.datetime.utcfromtimestamp( os.path.getmtime(zlist_fn) )
1308 message = "Reading %s:%s..." % (gcounties_an, gcounties_fn)
1309 sys.stdout.write(message)
1312 gcounties = zipfile.ZipFile(gcounties_an).open(gcounties_fn, "r")
1313 columns = gcounties.readline().decode("utf-8").strip().split("\t")
1314 for line in gcounties:
1315 fields = line.decode("utf-8").strip().split("\t")
1316 f_geoid = fields[ columns.index("GEOID") ].strip()
1317 f_name = fields[ columns.index("NAME") ].strip()
1318 f_usps = fields[ columns.index("USPS") ].strip()
1319 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1320 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1321 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1322 fips = "fips%s" % f_geoid
1323 if fips not in places: places[fips] = {}
1324 places[fips]["centroid"] = gecos(
1325 "%s,%s" % (f_intptlat, f_intptlong)
1327 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1330 print("done (%s lines)." % count)
1331 message = "Reading %s:%s..." % (gcousubs_an, gcousubs_fn)
1332 sys.stdout.write(message)
1335 gcousubs = zipfile.ZipFile(gcousubs_an).open(gcousubs_fn, "r")
1336 columns = gcousubs.readline().decode("utf-8").strip().split("\t")
1337 for line in gcousubs:
1338 fields = line.decode("utf-8").strip().split("\t")
1339 f_geoid = fields[ columns.index("GEOID") ].strip()
1340 f_name = fields[ columns.index("NAME") ].strip()
1341 f_usps = fields[ columns.index("USPS") ].strip()
1342 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1343 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1344 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1345 fips = "fips%s" % f_geoid
1346 if fips not in places: places[fips] = {}
1347 places[fips]["centroid"] = gecos(
1348 "%s,%s" % (f_intptlat, f_intptlong)
1350 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1353 print("done (%s lines)." % count)
1354 message = "Reading %s:%s..." % (gplace_an, gplace_fn)
1355 sys.stdout.write(message)
1358 gplace = zipfile.ZipFile(gplace_an).open(gplace_fn, "r")
1359 columns = gplace.readline().decode("utf-8").strip().split("\t")
1361 fields = line.decode("utf-8").strip().split("\t")
1362 f_geoid = fields[ columns.index("GEOID") ].strip()
1363 f_name = fields[ columns.index("NAME") ].strip()
1364 f_usps = fields[ columns.index("USPS") ].strip()
1365 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1366 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1367 if f_geoid and f_name and f_usps and f_intptlat and f_intptlong:
1368 fips = "fips%s" % f_geoid
1369 if fips not in places: places[fips] = {}
1370 places[fips]["centroid"] = gecos(
1371 "%s,%s" % (f_intptlat, f_intptlong)
1373 places[fips]["description"] = "%s, %s" % (f_name, f_usps)
1376 print("done (%s lines)." % count)
1377 message = "Reading %s..." % slist_fn
1378 sys.stdout.write(message)
1381 slist = codecs.open(slist_fn, "rU", "utf-8")
1383 icao = line.split("#")[0].strip()
1386 "metar": "https://tgftp.nws.noaa.gov/data/observations/"\
1387 + "metar/decoded/%s.TXT" % icao.upper()
1391 print("done (%s lines)." % count)
1392 message = "Reading %s..." % nsdcccc_fn
1393 sys.stdout.write(message)
1396 nsdcccc = codecs.open(nsdcccc_fn, "rU", "utf-8")
1397 for line in nsdcccc:
1399 fields = line.split(";")
1400 icao = fields[0].strip().lower()
1401 if icao in stations:
1403 name = " ".join( fields[3].strip().title().split() )
1404 if name: description.append(name)
1405 st = fields[4].strip()
1406 if st: description.append(st)
1407 country = " ".join( fields[5].strip().title().split() )
1408 if country: description.append(country)
1410 stations[icao]["description"] = ", ".join(description)
1411 lat, lon = fields[7:9]
1413 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1414 elif "location" not in stations[icao]:
1415 lat, lon = fields[5:7]
1417 stations[icao]["location"] = gecos( "%s,%s" % (lat, lon) )
1420 print("done (%s lines)." % count)
1421 message = "Reading %s..." % ourairports_fn
1422 sys.stdout.write(message)
1425 ourairports = open(ourairports_fn, "rU")
1426 for row in csv.reader(ourairports):
1427 icao = row[12].lower()
1428 if icao in stations:
1429 iata = row[13].lower()
1430 if len(iata) == 3: airports[iata] = { "station": icao }
1431 if "description" not in stations[icao]:
1434 if name: description.append(name)
1435 municipality = row[10]
1436 if municipality: description.append(municipality)
1441 c,r = region.split("-", 1)
1442 if c == country: region = r
1443 description.append(region)
1445 description.append(country)
1447 stations[icao]["description"] = ", ".join(description)
1448 if "location" not in stations[icao]:
1453 stations[icao]["location"] = gecos(
1454 "%s,%s" % (lat, lon)
1458 print("done (%s lines)." % count)
1459 message = "Reading %s..." % zlist_fn
1460 sys.stdout.write(message)
1463 zlist = codecs.open(zlist_fn, "rU", "utf-8")
1465 line = line.split("#")[0].strip()
1470 print("done (%s lines)." % count)
1471 message = "Reading %s..." % cpfzcf_fn
1472 sys.stdout.write(message)
1476 cpfzcf = codecs.open(cpfzcf_fn, "rU", "utf-8")
1478 fields = line.strip().split("|")
1479 if len(fields) == 11 \
1480 and fields[0] and fields[1] and fields[9] and fields[10]:
1481 zone = "z".join( fields[:2] ).lower()
1485 zones[zone]["coastal_flood_statement"] = (
1486 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1487 "flood/coastal/%s/%s.txt" % (state.lower(), zone))
1488 zones[zone]["flash_flood_statement"] = (
1489 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1490 "flash_flood/statement/%s/%s.txt"
1491 % (state.lower(), zone))
1492 zones[zone]["flash_flood_warning"] = (
1493 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1494 "flash_flood/warning/%s/%s.txt"
1495 % (state.lower(), zone))
1496 zones[zone]["flash_flood_watch"] = (
1497 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1498 "flash_flood/watch/%s/%s.txt" % (state.lower(), zone))
1499 zones[zone]["flood_statement"] = (
1500 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1501 "flood/statement/%s/%s.txt" % (state.lower(), zone))
1502 zones[zone]["flood_warning"] = (
1503 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1504 "flood/warning/%s/%s.txt" % (state.lower(), zone))
1505 zones[zone]["severe_thunderstorm_warning"] = (
1506 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1507 "thunderstorm/%s/%s.txt" % (state.lower(), zone))
1508 zones[zone]["severe_weather_statement"] = (
1509 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1510 "severe_weather_stmt/%s/%s.txt"
1511 % (state.lower(), zone))
1512 zones[zone]["short_term_forecast"] = (
1513 "https://tgftp.nws.noaa.gov/data/forecasts/nowcast/"
1514 "%s/%s.txt" % (state.lower(), zone))
1515 zones[zone]["special_weather_statement"] = (
1516 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1517 "special_weather_stmt/%s/%s.txt"
1518 % (state.lower(), zone))
1519 zones[zone]["state_forecast"] = (
1520 "https://tgftp.nws.noaa.gov/data/forecasts/state/"
1521 "%s/%s.txt" % (state.lower(), zone))
1522 zones[zone]["urgent_weather_message"] = (
1523 "https://tgftp.nws.noaa.gov/data/watches_warnings/"
1524 "non_precip/%s/%s.txt" % (state.lower(), zone))
1525 zones[zone]["zone_forecast"] = (
1526 "https://tgftp.nws.noaa.gov/data/forecasts/zone/"
1527 "%s/%s.txt" % (state.lower(), zone))
1528 description = fields[3].strip()
1529 fips = "fips%s"%fields[6]
1532 if description.endswith(county):
1533 description += " County"
1535 description += ", %s County" % county
1536 description += ", %s, US" % state
1537 zones[zone]["description"] = description
1538 zones[zone]["centroid"] = gecos( ",".join( fields[9:11] ) )
1539 if fips in places and not zones[zone]["centroid"]:
1540 zones[zone]["centroid"] = places[fips]["centroid"]
1543 print("done (%s lines)." % count)
1544 message = "Reading %s:%s..." % (gzcta_an, gzcta_fn)
1545 sys.stdout.write(message)
1548 gzcta = zipfile.ZipFile(gzcta_an).open(gzcta_fn, "r")
1549 columns = gzcta.readline().decode("utf-8").strip().split("\t")
1551 fields = line.decode("utf-8").strip().split("\t")
1552 f_geoid = fields[ columns.index("GEOID") ].strip()
1553 f_intptlat = fields[ columns.index("INTPTLAT") ].strip()
1554 f_intptlong = fields[ columns.index("INTPTLONG") ].strip()
1555 if f_geoid and f_intptlat and f_intptlong:
1556 if f_geoid not in zctas: zctas[f_geoid] = {}
1557 zctas[f_geoid]["centroid"] = gecos(
1558 "%s,%s" % (f_intptlat, f_intptlong)
1562 print("done (%s lines)." % count)
1563 message = "Reading %s..." % overrides_fn
1564 sys.stdout.write(message)
1570 overrides = configparser.ConfigParser()
1571 overrides.readfp( codecs.open(overrides_fn, "r", "utf8") )
1573 for section in overrides.sections():
1576 if section.startswith("-"):
1577 section = section[1:]
1579 else: delete = False
1580 if re.match("[A-Za-z]{3}$", section):
1582 if section in airports:
1583 del( airports[section] )
1584 logact = "removed airport %s" % section
1587 logact = "tried to remove nonexistent airport %s" % section
1589 if section in airports:
1590 logact = "changed airport %s" % section
1593 airports[section] = {}
1594 logact = "added airport %s" % section
1596 for key,value in overrides.items(section):
1597 if key in airports[section]: chgopt += 1
1599 if key in ("centroid", "location"):
1600 airports[section][key] = eval(value)
1602 airports[section][key] = value
1603 if addopt and chgopt:
1604 logact += " (+%s/!%s options)" % (addopt, chgopt)
1605 elif addopt: logact += " (+%s options)" % addopt
1606 elif chgopt: logact += " (!%s options)" % chgopt
1607 elif re.match("[A-Za-z0-9]{4}$", section):
1609 if section in stations:
1610 del( stations[section] )
1611 logact = "removed station %s" % section
1614 logact = "tried to remove nonexistent station %s" % section
1616 if section in stations:
1617 logact = "changed station %s" % section
1620 stations[section] = {}
1621 logact = "added station %s" % section
1623 for key,value in overrides.items(section):
1624 if key in stations[section]: chgopt += 1
1626 if key in ("centroid", "location"):
1627 stations[section][key] = eval(value)
1629 stations[section][key] = value
1630 if addopt and chgopt:
1631 logact += " (+%s/!%s options)" % (addopt, chgopt)
1632 elif addopt: logact += " (+%s options)" % addopt
1633 elif chgopt: logact += " (!%s options)" % chgopt
1634 elif re.match("[0-9]{5}$", section):
1636 if section in zctas:
1637 del( zctas[section] )
1638 logact = "removed zcta %s" % section
1641 logact = "tried to remove nonexistent zcta %s" % section
1643 if section in zctas:
1644 logact = "changed zcta %s" % section
1648 logact = "added zcta %s" % section
1650 for key,value in overrides.items(section):
1651 if key in zctas[section]: chgopt += 1
1653 if key in ("centroid", "location"):
1654 zctas[section][key] = eval(value)
1656 zctas[section][key] = value
1657 if addopt and chgopt:
1658 logact += " (+%s/!%s options)" % (addopt, chgopt)
1659 elif addopt: logact += " (+%s options)" % addopt
1660 elif chgopt: logact += " (!%s options)" % chgopt
1661 elif re.match("[A-Za-z]{2}[Zz][0-9]{3}$", section):
1663 if section in zones:
1664 del( zones[section] )
1665 logact = "removed zone %s" % section
1668 logact = "tried to remove nonexistent zone %s" % section
1670 if section in zones:
1671 logact = "changed zone %s" % section
1675 logact = "added zone %s" % section
1677 for key,value in overrides.items(section):
1678 if key in zones[section]: chgopt += 1
1680 if key in ("centroid", "location"):
1681 zones[section][key] = eval(value)
1683 zones[section][key] = value
1684 if addopt and chgopt:
1685 logact += " (+%s/!%s options)" % (addopt, chgopt)
1686 elif addopt: logact += " (+%s options)" % addopt
1687 elif chgopt: logact += " (!%s options)" % chgopt
1688 elif re.match("fips[0-9]+$", section):
1690 if section in places:
1691 del( places[section] )
1692 logact = "removed place %s" % section
1695 logact = "tried to remove nonexistent place %s" % section
1697 if section in places:
1698 logact = "changed place %s" % section
1701 places[section] = {}
1702 logact = "added place %s" % section
1704 for key,value in overrides.items(section):
1705 if key in places[section]: chgopt += 1
1707 if key in ("centroid", "location"):
1708 places[section][key] = eval(value)
1710 places[section][key] = value
1711 if addopt and chgopt:
1712 logact += " (+%s/!%s options)" % (addopt, chgopt)
1713 elif addopt: logact += " (+%s options)" % addopt
1714 elif chgopt: logact += " (!%s options)" % chgopt
1716 overrideslog.append("%s\n" % logact)
1718 if os.path.exists(overrideslog_fn):
1719 os.rename(overrideslog_fn, "%s_old"%overrideslog_fn)
1720 overrideslog_fd = codecs.open(overrideslog_fn, "w", "utf8")
1722 overrideslog_fd.write(
1723 '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
1724 '# use, copy, modify, and distribute this software is granted under terms\n'
1725 '# provided in the LICENSE file distributed with this software.\n\n'
1726 % time.gmtime().tm_year)
1727 overrideslog_fd.writelines(overrideslog)
1728 overrideslog_fd.close()
1729 print("done (%s overridden sections: +%s/-%s/!%s)." % (
1735 estimate = 2*len(places) + len(stations) + 2*len(zctas) + len(zones)
1737 "Correlating places, stations, ZCTAs and zones (upper bound is %s):" % \
1741 milestones = list( range(51) )
1743 sys.stdout.write(message)
1746 centroid = places[fips]["centroid"]
1748 station = closest(centroid, stations, "location", 0.1)
1750 places[fips]["station"] = station
1753 level = int(50*count/estimate)
1754 if level in milestones:
1755 for remaining in milestones[:milestones.index(level)+1]:
1758 sys.stdout.write(message)
1761 message = "%s%%" % (remaining*2,)
1762 sys.stdout.write(message)
1764 milestones.remove(remaining)
1766 zone = closest(centroid, zones, "centroid", 0.1)
1768 places[fips]["zone"] = zone
1771 level = int(50*count/estimate)
1772 if level in milestones:
1773 for remaining in milestones[:milestones.index(level)+1]:
1776 sys.stdout.write(message)
1779 message = "%s%%" % (remaining*2,)
1780 sys.stdout.write(message)
1782 milestones.remove(remaining)
1783 for station in stations:
1784 if "location" in stations[station]:
1785 location = stations[station]["location"]
1787 zone = closest(location, zones, "centroid", 0.1)
1789 stations[station]["zone"] = zone
1792 level = int(50*count/estimate)
1793 if level in milestones:
1794 for remaining in milestones[:milestones.index(level)+1]:
1797 sys.stdout.write(message)
1800 message = "%s%%" % (remaining*2,)
1801 sys.stdout.write(message)
1803 milestones.remove(remaining)
1804 for zcta in zctas.keys():
1805 centroid = zctas[zcta]["centroid"]
1807 station = closest(centroid, stations, "location", 0.1)
1809 zctas[zcta]["station"] = station
1812 level = int(50*count/estimate)
1813 if level in milestones:
1814 for remaining in milestones[ : milestones.index(level)+1 ]:
1817 sys.stdout.write(message)
1820 message = "%s%%" % (remaining*2,)
1821 sys.stdout.write(message)
1823 milestones.remove(remaining)
1825 zone = closest(centroid, zones, "centroid", 0.1)
1827 zctas[zcta]["zone"] = zone
1830 level = int(50*count/estimate)
1831 if level in milestones:
1832 for remaining in milestones[:milestones.index(level)+1]:
1835 sys.stdout.write(message)
1838 message = "%s%%" % (remaining*2,)
1839 sys.stdout.write(message)
1841 milestones.remove(remaining)
1842 for zone in zones.keys():
1843 if "centroid" in zones[zone]:
1844 centroid = zones[zone]["centroid"]
1846 station = closest(centroid, stations, "location", 0.1)
1848 zones[zone]["station"] = station
1851 level = int(50*count/estimate)
1852 if level in milestones:
1853 for remaining in milestones[:milestones.index(level)+1]:
1856 sys.stdout.write(message)
1859 message = "%s%%" % (remaining*2,)
1860 sys.stdout.write(message)
1862 milestones.remove(remaining)
1863 for remaining in milestones:
1866 sys.stdout.write(message)
1869 message = "%s%%" % (remaining*2,)
1870 sys.stdout.write(message)
1872 print("\n done (%s correlations)." % count)
1873 message = "Writing %s..." % airports_fn
1874 sys.stdout.write(message)
1877 if os.path.exists(airports_fn):
1878 os.rename(airports_fn, "%s_old"%airports_fn)
1879 airports_fd = codecs.open(airports_fn, "w", "utf8")
1880 airports_fd.write(header)
1881 for airport in sorted( airports.keys() ):
1882 airports_fd.write("\n\n[%s]" % airport)
1883 for key, value in sorted( airports[airport].items() ):
1884 if type(value) is float: value = "%.7f"%value
1885 elif type(value) is tuple:
1887 for element in value:
1888 if type(element) is float: elements.append("%.7f"%element)
1889 else: elements.append( repr(element) )
1890 value = "(%s)"%", ".join(elements)
1891 airports_fd.write( "\n%s = %s" % (key, value) )
1893 airports_fd.write("\n")
1895 print("done (%s sections)." % count)
1896 message = "Writing %s..." % places_fn
1897 sys.stdout.write(message)
1900 if os.path.exists(places_fn):
1901 os.rename(places_fn, "%s_old"%places_fn)
1902 places_fd = codecs.open(places_fn, "w", "utf8")
1903 places_fd.write(header)
1904 for fips in sorted( places.keys() ):
1905 places_fd.write("\n\n[%s]" % fips)
1906 for key, value in sorted( places[fips].items() ):
1907 if type(value) is float: value = "%.7f"%value
1908 elif type(value) is tuple:
1910 for element in value:
1911 if type(element) is float: elements.append("%.7f"%element)
1912 else: elements.append( repr(element) )
1913 value = "(%s)"%", ".join(elements)
1914 places_fd.write( "\n%s = %s" % (key, value) )
1916 places_fd.write("\n")
1918 print("done (%s sections)." % count)
1919 message = "Writing %s..." % stations_fn
1920 sys.stdout.write(message)
1923 if os.path.exists(stations_fn):
1924 os.rename(stations_fn, "%s_old"%stations_fn)
1925 stations_fd = codecs.open(stations_fn, "w", "utf-8")
1926 stations_fd.write(header)
1927 for station in sorted( stations.keys() ):
1928 stations_fd.write("\n\n[%s]" % station)
1929 for key, value in sorted( stations[station].items() ):
1930 if type(value) is float: value = "%.7f"%value
1931 elif type(value) is tuple:
1933 for element in value:
1934 if type(element) is float: elements.append("%.7f"%element)
1935 else: elements.append( repr(element) )
1936 value = "(%s)"%", ".join(elements)
1937 if type(value) is bytes:
1938 value = value.decode("utf-8")
1939 stations_fd.write( "\n%s = %s" % (key, value) )
1941 stations_fd.write("\n")
1943 print("done (%s sections)." % count)
1944 message = "Writing %s..." % zctas_fn
1945 sys.stdout.write(message)
1948 if os.path.exists(zctas_fn):
1949 os.rename(zctas_fn, "%s_old"%zctas_fn)
1950 zctas_fd = codecs.open(zctas_fn, "w", "utf8")
1951 zctas_fd.write(header)
1952 for zcta in sorted( zctas.keys() ):
1953 zctas_fd.write("\n\n[%s]" % zcta)
1954 for key, value in sorted( zctas[zcta].items() ):
1955 if type(value) is float: value = "%.7f"%value
1956 elif type(value) is tuple:
1958 for element in value:
1959 if type(element) is float: elements.append("%.7f"%element)
1960 else: elements.append( repr(element) )
1961 value = "(%s)"%", ".join(elements)
1962 zctas_fd.write( "\n%s = %s" % (key, value) )
1964 zctas_fd.write("\n")
1966 print("done (%s sections)." % count)
1967 message = "Writing %s..." % zones_fn
1968 sys.stdout.write(message)
1971 if os.path.exists(zones_fn):
1972 os.rename(zones_fn, "%s_old"%zones_fn)
1973 zones_fd = codecs.open(zones_fn, "w", "utf8")
1974 zones_fd.write(header)
1975 for zone in sorted( zones.keys() ):
1976 zones_fd.write("\n\n[%s]" % zone)
1977 for key, value in sorted( zones[zone].items() ):
1978 if type(value) is float: value = "%.7f"%value
1979 elif type(value) is tuple:
1981 for element in value:
1982 if type(element) is float: elements.append("%.7f"%element)
1983 else: elements.append( repr(element) )
1984 value = "(%s)"%", ".join(elements)
1985 zones_fd.write( "\n%s = %s" % (key, value) )
1987 zones_fd.write("\n")
1989 print("done (%s sections)." % count)
1990 message = "Starting QA check..."
1991 sys.stdout.write(message)
1993 airports = configparser.ConfigParser()
1994 airports.read(airports_fn)
1995 places = configparser.ConfigParser()
1996 places.read(places_fn)
1997 stations = configparser.ConfigParser()
1998 stations.read(stations_fn)
1999 zctas = configparser.ConfigParser()
2000 zctas.read(zctas_fn)
2001 zones = configparser.ConfigParser()
2002 zones.read(zones_fn)
2004 places_nocentroid = 0
2005 places_nodescription = 0
2006 for place in sorted( places.sections() ):
2007 if not places.has_option(place, "centroid"):
2008 qalog.append("%s: no centroid\n" % place)
2009 places_nocentroid += 1
2010 if not places.has_option(place, "description"):
2011 qalog.append("%s: no description\n" % place)
2012 places_nodescription += 1
2013 stations_nodescription = 0
2014 stations_nolocation = 0
2015 stations_nometar = 0
2016 for station in sorted( stations.sections() ):
2017 if not stations.has_option(station, "description"):
2018 qalog.append("%s: no description\n" % station)
2019 stations_nodescription += 1
2020 if not stations.has_option(station, "location"):
2021 qalog.append("%s: no location\n" % station)
2022 stations_nolocation += 1
2023 if not stations.has_option(station, "metar"):
2024 qalog.append("%s: no metar\n" % station)
2025 stations_nometar += 1
2026 airports_badstation = 0
2027 airports_nostation = 0
2028 for airport in sorted( airports.sections() ):
2029 if not airports.has_option(airport, "station"):
2030 qalog.append("%s: no station\n" % airport)
2031 airports_nostation += 1
2033 station = airports.get(airport, "station")
2034 if station not in stations.sections():
2035 qalog.append( "%s: bad station %s\n" % (airport, station) )
2036 airports_badstation += 1
2037 zctas_nocentroid = 0
2038 for zcta in sorted( zctas.sections() ):
2039 if not zctas.has_option(zcta, "centroid"):
2040 qalog.append("%s: no centroid\n" % zcta)
2041 zctas_nocentroid += 1
2042 zones_nocentroid = 0
2043 zones_nodescription = 0
2044 zones_noforecast = 0
2045 zones_overlapping = 0
2047 for zone in zones.sections():
2048 if zones.has_option(zone, "centroid"):
2050 "centroid": eval( zones.get(zone, "centroid") )
2052 for zone in sorted( zones.sections() ):
2053 if zones.has_option(zone, "centroid"):
2054 zonetable_local = zonetable.copy()
2055 del( zonetable_local[zone] )
2056 centroid = eval( zones.get(zone, "centroid") )
2058 nearest = closest(centroid, zonetable_local, "centroid", 0.1)
2059 if nearest[1]*radian_to_km < 1:
2060 qalog.append( "%s: within one km of %s\n" % (
2064 zones_overlapping += 1
2066 qalog.append("%s: no centroid\n" % zone)
2067 zones_nocentroid += 1
2068 if not zones.has_option(zone, "description"):
2069 qalog.append("%s: no description\n" % zone)
2070 zones_nodescription += 1
2071 if not zones.has_option(zone, "zone_forecast"):
2072 qalog.append("%s: no forecast\n" % zone)
2073 zones_noforecast += 1
2074 if os.path.exists(qalog_fn):
2075 os.rename(qalog_fn, "%s_old"%qalog_fn)
2076 qalog_fd = codecs.open(qalog_fn, "w", "utf8")
2079 '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
2080 '# use, copy, modify, and distribute this software is granted under terms\n'
2081 '# provided in the LICENSE file distributed with this software.\n\n'
2082 % time.gmtime().tm_year)
2083 qalog_fd.writelines(qalog)
2086 print("issues found (see %s for details):"%qalog_fn)
2087 if airports_badstation:
2088 print(" %s airports with invalid station"%airports_badstation)
2089 if airports_nostation:
2090 print(" %s airports with no station"%airports_nostation)
2091 if places_nocentroid:
2092 print(" %s places with no centroid"%places_nocentroid)
2093 if places_nodescription:
2094 print(" %s places with no description"%places_nodescription)
2095 if stations_nodescription:
2096 print(" %s stations with no description"%stations_nodescription)
2097 if stations_nolocation:
2098 print(" %s stations with no location"%stations_nolocation)
2099 if stations_nometar:
2100 print(" %s stations with no METAR"%stations_nometar)
2101 if zctas_nocentroid:
2102 print(" %s ZCTAs with no centroid"%zctas_nocentroid)
2103 if zones_nocentroid:
2104 print(" %s zones with no centroid"%zones_nocentroid)
2105 if zones_nodescription:
2106 print(" %s zones with no description"%zones_nodescription)
2107 if zones_noforecast:
2108 print(" %s zones with no forecast"%zones_noforecast)
2109 if zones_overlapping:
2110 print(" %s zones within one km of another"%zones_overlapping)
2111 else: print("no issues found.")
2112 print("Indexing complete!")