Include time zones in WX weather zone data
[weather.git] / weather.py
index fa94727..17a0e12 100644 (file)
@@ -1,12 +1,12 @@
 """Contains various object definitions needed by the weather utility."""
 
 weather_copyright = """\
 """Contains various object definitions needed by the weather utility."""
 
 weather_copyright = """\
-# Copyright (c) 2006-2020 Jeremy Stanley <fungi@yuggoth.org>. Permission to
+# Copyright (c) 2006-2024 Jeremy Stanley <fungi@yuggoth.org>. Permission to
 # use, copy, modify, and distribute this software is granted under terms
 # provided in the LICENSE file distributed with this software.
 #"""
 
 # use, copy, modify, and distribute this software is granted under terms
 # provided in the LICENSE file distributed with this software.
 #"""
 
-weather_version = "2.4"
+weather_version = "2.4.4"
 
 radian_to_km = 6372.795484
 radian_to_mi = 3959.871528
 
 radian_to_km = 6372.795484
 radian_to_mi = 3959.871528
@@ -89,17 +89,25 @@ class Selections:
                 return self.config.get(argument, option)
         if option in self.options.__dict__:
             return self.options.__dict__[option]
                 return self.config.get(argument, option)
         if option in self.options.__dict__:
             return self.options.__dict__[option]
-        else:
-            import os, sys
-            message = "%s error: no URI defined for %s\n" % (
-                os.path.basename( sys.argv[0] ),
-                option
-            )
-            sys.stderr.write(message)
-            exit(1)
+        import sys
+        message = "WARNING: no URI defined for %s\n" % option
+        sys.stderr.write(message)
+        return None
     def get_bool(self, option, argument=None):
         """Get data and coerce to a boolean if necessary."""
     def get_bool(self, option, argument=None):
         """Get data and coerce to a boolean if necessary."""
-        return bool(self.get(option, argument))
+        # Mimic configparser's getboolean() method by treating
+        # false/no/off/0 as False and true/yes/on/1 as True values,
+        # case-insensitively
+        value = self.get(option, argument)
+        if isinstance(value, bool):
+            return value
+        if isinstance(value, str):
+            vlower = value.lower()
+            if vlower in ('false', 'no', 'off', '0'):
+                return False
+            elif vlower in ('true', 'yes', 'on', '1'):
+                return True
+        raise ValueError("Not a boolean: %s" % value)
     def getint(self, option, argument=None):
         """Get data and coerce to an integer if necessary."""
         value = self.get(option, argument)
     def getint(self, option, argument=None):
         """Get data and coerce to an integer if necessary."""
         value = self.get(option, argument)
@@ -122,7 +130,7 @@ def filter_units(line, units="imperial"):
     # filter lines with both pressures in the form of "X inches (Y hPa)" or
     # "X in. Hg (Y hPa)"
     dual_p = re.match(
     # filter lines with both pressures in the form of "X inches (Y hPa)" or
     # "X in. Hg (Y hPa)"
     dual_p = re.match(
-        "(.* )(\d*(\.\d+)? (inches|in\. Hg)) \((\d*(\.\d+)? hPa)\)(.*)",
+        r"(.* )(\d*(\.\d+)? (inches|in\. Hg)) \((\d*(\.\d+)? hPa)\)(.*)",
         line
     )
     if dual_p:
         line
     )
     if dual_p:
@@ -131,7 +139,7 @@ def filter_units(line, units="imperial"):
         elif units == "metric": line = preamble + hpa + trailer
     # filter lines with both temperatures in the form of "X F (Y C)"
     dual_t = re.match(
         elif units == "metric": line = preamble + hpa + trailer
     # filter lines with both temperatures in the form of "X F (Y C)"
     dual_t = re.match(
-        "(.* )(-?\d*(\.\d+)? F) \((-?\d*(\.\d+)? C)\)(.*)",
+        r"(.* )(-?\d*(\.\d+)? F) \((-?\d*(\.\d+)? C)\)(.*)",
         line
     )
     if dual_t:
         line
     )
     if dual_t:
@@ -142,7 +150,7 @@ def filter_units(line, units="imperial"):
     # "Y kilometer(s)"
     if units == "metric":
         imperial_d = re.match(
     # "Y kilometer(s)"
     if units == "metric":
         imperial_d = re.match(
-            "(.* )(\d+)( mile\(s\))(.*)",
+            r"(.* )(\d+)( mile\(s\))(.*)",
             line
         )
         if imperial_d:
             line
         )
         if imperial_d:
@@ -152,7 +160,7 @@ def filter_units(line, units="imperial"):
     # filter speeds in the form of "X MPH (Y KT)" to just "X MPH"; if metric is
     # desired, convert to "Z KPH"
     imperial_s = re.match(
     # filter speeds in the form of "X MPH (Y KT)" to just "X MPH"; if metric is
     # desired, convert to "Z KPH"
     imperial_s = re.match(
-        "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
+        r"(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
         line
     )
     if imperial_s:
         line
     )
     if imperial_s:
@@ -162,7 +170,7 @@ def filter_units(line, units="imperial"):
             line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
                 trailer
     imperial_s = re.match(
             line = preamble + str(int(round(int(mph)*1.609344))) + " KPH" + \
                 trailer
     imperial_s = re.match(
-        "(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
+        r"(.* )(\d+)( MPH)( \(\d+ KT\))(.*)",
         line
     )
     if imperial_s:
         line
     )
     if imperial_s:
@@ -174,7 +182,7 @@ def filter_units(line, units="imperial"):
     # if imperial is desired, qualify given forcast temperatures like "X F"; if
     # metric is desired, convert to "Y C"
     imperial_t = re.match(
     # if imperial is desired, qualify given forcast temperatures like "X F"; if
     # metric is desired, convert to "Y C"
     imperial_t = re.match(
-        "(.* )(High |high |Low |low )(\d+)(\.|,)(.*)",
+        r"(.* )(High |high |Low |low )(\d+)(\.|,)(.*)",
         line
     )
     if imperial_t:
         line
     )
     if imperial_t:
@@ -212,7 +220,7 @@ def get_uri(
             except (IOError, OSError): pass
         dcache_fn = os.path.join(
             dcachedir,
             except (IOError, OSError): pass
         dcache_fn = os.path.join(
             dcachedir,
-            uri.split(":")[1].replace("/","_")
+            uri.split(":",1)[1].replace("/","_")
         )
     now = time.time()
     if cache_data and os.access(dcache_fn, os.R_OK) \
         )
     now = time.time()
     if cache_data and os.access(dcache_fn, os.R_OK) \
@@ -225,18 +233,10 @@ def get_uri(
             data = urlopen(uri).read().decode("utf-8")
         except URLError:
             if ignore_fail: return ""
             data = urlopen(uri).read().decode("utf-8")
         except URLError:
             if ignore_fail: return ""
-            else:
-                import os, sys, traceback
-                message = "%s error: failed to retrieve\n   %s\n   %s" % (
-                        os.path.basename( sys.argv[0] ),
-                        uri,
-                        traceback.format_exception_only(
-                            sys.exc_type,
-                            sys.exc_value
-                        )[0]
-                    )
-                sys.stderr.write(message)
-                sys.exit(1)
+            import os, sys
+            sys.stderr.write("%s error: failed to retrieve\n   %s\n\n" % (
+                os.path.basename( sys.argv[0] ), uri))
+            raise
         # Some data sources are HTML with the plain text wrapped in pre tags
         if "<pre>" in data:
             data = data[data.find("<pre>")+5:data.find("</pre>")]
         # Some data sources are HTML with the plain text wrapped in pre tags
         if "<pre>" in data:
             data = data[data.find("<pre>")+5:data.find("</pre>")]
@@ -324,11 +324,7 @@ def get_alert(
 ):
     """Return alert notice for the specified URI."""
     if not uri:
 ):
     """Return alert notice for the specified URI."""
     if not uri:
-        import os, sys
-        message = "%s error: Alert URI required for alerts\n" % \
-            os.path.basename( sys.argv[0] )
-        sys.stderr.write(message)
-        sys.exit(1)
+        return ""
     alert = get_uri(
         uri,
         ignore_fail=True,
     alert = get_uri(
         uri,
         ignore_fail=True,
@@ -346,7 +342,19 @@ def get_alert(
                 muted = True
             lines = alert.split("\n")
             import time
                 muted = True
             lines = alert.split("\n")
             import time
-            valid_time = time.strftime("%Y%m%d%H%M")
+            # TODO: make this offset configurable
+            # TODO: adjust offset relative to the difference between the user's
+            #       local time and the zone's local time (will need to extend
+            #       the schema in the zones file to store each tz
+            offset = 86400  # one day
+
+            # report alerts and forecasts that expired less than offset ago;
+            # this is a cheap hack since expiration times seem to be relative
+            # to the zone's local time zone, and converting from the user's
+            # would get complicated, but also there can sometimes be a lag
+            # between expiration and the next update
+            valid_time = time.strftime(
+                "%Y%m%d%H%M", time.localtime(time.time() - offset))
             output = []
             for line in lines:
                 if line.startswith("Expires:") \
             output = []
             for line in lines:
                 if line.startswith("Expires:") \
@@ -381,7 +389,7 @@ def get_options(config):
 
     # the -a/--alert option
     if config.has_option("default", "alert"):
 
     # the -a/--alert option
     if config.has_option("default", "alert"):
-        default_alert = bool(config.get("default", "alert"))
+        default_alert = config.getboolean("default", "alert")
     else: default_alert = False
     option_parser.add_option("-a", "--alert",
         dest="alert",
     else: default_alert = False
     option_parser.add_option("-a", "--alert",
         dest="alert",
@@ -398,16 +406,11 @@ def get_options(config):
             + "flash_flood_statement," \
             + "flash_flood_warning," \
             + "flash_flood_watch," \
             + "flash_flood_statement," \
             + "flash_flood_warning," \
             + "flash_flood_watch," \
-            + "flood_statement," \
             + "flood_warning," \
             + "flood_warning," \
-            + "marine_weather_statement," \
-            + "river_statement," \
             + "severe_thunderstorm_warning," \
             + "severe_weather_statement," \
             + "severe_thunderstorm_warning," \
             + "severe_weather_statement," \
-            + "short_term_forecast," \
-            + "special_marine_warning," \
             + "special_weather_statement," \
             + "special_weather_statement," \
-            + "tornado_warning," \
+            + "tornado," \
             + "urgent_weather_message"
     option_parser.add_option("--atypes",
         dest="atypes",
             + "urgent_weather_message"
     option_parser.add_option("--atypes",
         dest="atypes",
@@ -441,7 +444,7 @@ def get_options(config):
 
     # the -f/--forecast option
     if config.has_option("default", "forecast"):
 
     # the -f/--forecast option
     if config.has_option("default", "forecast"):
-        default_forecast = bool(config.get("default", "forecast"))
+        default_forecast = config.getboolean("default", "forecast")
     else: default_forecast = False
     option_parser.add_option("-f", "--forecast",
         dest="forecast",
     else: default_forecast = False
     option_parser.add_option("-f", "--forecast",
         dest="forecast",
@@ -469,7 +472,7 @@ def get_options(config):
 
     # the --imperial option
     if config.has_option("default", "imperial"):
 
     # the --imperial option
     if config.has_option("default", "imperial"):
-        default_imperial = bool(config.get("default", "imperial"))
+        default_imperial = config.getboolean("default", "imperial")
     else: default_imperial = False
     option_parser.add_option("--imperial",
         dest="imperial",
     else: default_imperial = False
     option_parser.add_option("--imperial",
         dest="imperial",
@@ -500,7 +503,7 @@ def get_options(config):
 
     # the -m/--metric option
     if config.has_option("default", "metric"):
 
     # the -m/--metric option
     if config.has_option("default", "metric"):
-        default_metric = bool(config.get("default", "metric"))
+        default_metric = config.getboolean("default", "metric")
     else: default_metric = False
     option_parser.add_option("-m", "--metric",
         dest="metric",
     else: default_metric = False
     option_parser.add_option("-m", "--metric",
         dest="metric",
@@ -510,7 +513,7 @@ def get_options(config):
 
     # the -n/--no-conditions option
     if config.has_option("default", "conditions"):
 
     # the -n/--no-conditions option
     if config.has_option("default", "conditions"):
-        default_conditions = bool(config.get("default", "conditions"))
+        default_conditions = config.getboolean("default", "conditions")
     else: default_conditions = True
     option_parser.add_option("-n", "--no-conditions",
         dest="conditions",
     else: default_conditions = True
     option_parser.add_option("-n", "--no-conditions",
         dest="conditions",
@@ -520,7 +523,7 @@ def get_options(config):
 
     # the --no-cache option
     if config.has_option("default", "cache"):
 
     # the --no-cache option
     if config.has_option("default", "cache"):
-        default_cache = bool(config.get("default", "cache"))
+        default_cache = config.getboolean("default", "cache")
     else: default_cache = True
     option_parser.add_option("--no-cache",
         dest="cache",
     else: default_cache = True
     option_parser.add_option("--no-cache",
         dest="cache",
@@ -530,7 +533,7 @@ def get_options(config):
 
     # the --no-cache-data option
     if config.has_option("default", "cache_data"):
 
     # the --no-cache-data option
     if config.has_option("default", "cache_data"):
-        default_cache_data = bool(config.get("default", "cache_data"))
+        default_cache_data = config.getboolean("default", "cache_data")
     else: default_cache_data = True
     option_parser.add_option("--no-cache-data",
         dest="cache_data",
     else: default_cache_data = True
     option_parser.add_option("--no-cache-data",
         dest="cache_data",
@@ -540,7 +543,7 @@ def get_options(config):
 
     # the --no-cache-search option
     if config.has_option("default", "cache_search"):
 
     # the --no-cache-search option
     if config.has_option("default", "cache_search"):
-        default_cache_search = bool(config.get("default", "cache_search"))
+        default_cache_search = config.getboolean("default", "cache_search")
     else: default_cache_search = True
     option_parser.add_option("--no-cache-search",
         dest="cache_search",
     else: default_cache_search = True
     option_parser.add_option("--no-cache-search",
         dest="cache_search",
@@ -550,7 +553,7 @@ def get_options(config):
 
     # the -q/--quiet option
     if config.has_option("default", "quiet"):
 
     # the -q/--quiet option
     if config.has_option("default", "quiet"):
-        default_quiet = bool(config.get("default", "quiet"))
+        default_quiet = config.getboolean("default", "quiet")
     else: default_quiet = False
     option_parser.add_option("-q", "--quiet",
         dest="quiet",
     else: default_quiet = False
     option_parser.add_option("-q", "--quiet",
         dest="quiet",
@@ -569,7 +572,7 @@ def get_options(config):
 
     # the -v/--verbose option
     if config.has_option("default", "verbose"):
 
     # the -v/--verbose option
     if config.has_option("default", "verbose"):
-        default_verbose = bool(config.get("default", "verbose"))
+        default_verbose = config.getboolean("default", "verbose")
     else: default_verbose = False
     option_parser.add_option("-v", "--verbose",
         dest="verbose",
     else: default_verbose = False
     option_parser.add_option("-v", "--verbose",
         dest="verbose",
@@ -617,7 +620,11 @@ def get_config():
         "weatherrc"
         ]
     for rcfile in rcfiles:
         "weatherrc"
         ]
     for rcfile in rcfiles:
-        if os.access(rcfile, os.R_OK): config.read(rcfile)
+        if os.access(rcfile, os.R_OK):
+            if pyversion("3"):
+                config.read(rcfile, encoding="utf-8")
+            else:
+                config.read(rcfile)
     for section in config.sections():
         if section != section.lower():
             if config.has_section(section.lower()):
     for section in config.sections():
         if section != section.lower():
             if config.has_section(section.lower()):
@@ -653,7 +660,10 @@ def integrate_search_cache(config, cachedir, setpath):
             pass
         return config
     scache = configparser.ConfigParser()
             pass
         return config
     scache = configparser.ConfigParser()
-    scache.read(scache_fn)
+    if pyversion("3"):
+        scache.read(scache_fn, encoding="utf-8")
+    else:
+        scache.read(scache_fn)
     for section in scache.sections():
         if not config.has_section(section):
             config.add_section(section)
     for section in scache.sections():
         if not config.has_section(section):
             config.add_section(section)
@@ -749,9 +759,12 @@ def guess(
             if pyversion("3"):
                 stations.read_string(
                     gzip.open(datafile).read().decode("utf-8") )
             if pyversion("3"):
                 stations.read_string(
                     gzip.open(datafile).read().decode("utf-8") )
-            else: stations.readfp( gzip.open(datafile) )
+            else: stations.read_file( gzip.open(datafile) )
         else:
         else:
-            stations.read(datafile)
+            if pyversion("3"):
+                stations.read(datafile, encoding="utf-8")
+            else:
+                stations.read(datafile)
     else:
         message = "%s error: can't find \"%s\" data file\n" % (
             os.path.basename( sys.argv[0] ),
     else:
         message = "%s error: can't find \"%s\" data file\n" % (
             os.path.basename( sys.argv[0] ),
@@ -767,9 +780,12 @@ def guess(
             import gzip
             if pyversion("3"):
                 zones.read_string( gzip.open(datafile).read().decode("utf-8") )
             import gzip
             if pyversion("3"):
                 zones.read_string( gzip.open(datafile).read().decode("utf-8") )
-            else: zones.readfp( gzip.open(datafile) )
+            else: zones.read_file( gzip.open(datafile) )
         else:
         else:
-            zones.read(datafile)
+            if pyversion("3"):
+                zones.read(datafile, encoding="utf-8")
+            else:
+                zones.read(datafile)
     else:
         message = "%s error: can't find \"%s\" data file\n" % (
             os.path.basename( sys.argv[0] ),
     else:
         message = "%s error: can't find \"%s\" data file\n" % (
             os.path.basename( sys.argv[0] ),
@@ -794,9 +810,12 @@ def guess(
                 if pyversion("3"):
                     airports.read_string(
                         gzip.open(datafile).read().decode("utf-8") )
                 if pyversion("3"):
                     airports.read_string(
                         gzip.open(datafile).read().decode("utf-8") )
-                else: airports.readfp( gzip.open(datafile) )
+                else: airports.read_file( gzip.open(datafile) )
             else:
             else:
-                airports.read(datafile)
+                if pyversion("3"):
+                    airports.read(datafile, encoding="utf-8")
+                else:
+                    airports.read(datafile)
         else:
             message = "%s error: can't find \"%s\" data file\n" % (
                 os.path.basename( sys.argv[0] ),
         else:
             message = "%s error: can't find \"%s\" data file\n" % (
                 os.path.basename( sys.argv[0] ),
@@ -883,9 +902,12 @@ def guess(
                 if pyversion("3"):
                     zctas.read_string(
                         gzip.open(datafile).read().decode("utf-8") )
                 if pyversion("3"):
                     zctas.read_string(
                         gzip.open(datafile).read().decode("utf-8") )
-                else: zctas.readfp( gzip.open(datafile) )
+                else: zctas.read_file( gzip.open(datafile) )
             else:
             else:
-                zctas.read(datafile)
+                if pyversion("3"):
+                    zctas.read(datafile, encoding="utf-8")
+                else:
+                    zctas.read(datafile)
         else:
             message = "%s error: can't find \"%s\" data file\n" % (
                 os.path.basename( sys.argv[0] ),
         else:
             message = "%s error: can't find \"%s\" data file\n" % (
                 os.path.basename( sys.argv[0] ),
@@ -941,9 +963,12 @@ def guess(
                 if pyversion("3"):
                     places.read_string(
                         gzip.open(datafile).read().decode("utf-8") )
                 if pyversion("3"):
                     places.read_string(
                         gzip.open(datafile).read().decode("utf-8") )
-                else: places.readfp( gzip.open(datafile) )
+                else: places.read_file( gzip.open(datafile) )
             else:
             else:
-                places.read(datafile)
+                if pyversion("3"):
+                    places.read(datafile, encoding="utf-8")
+                else:
+                    places.read(datafile)
         else:
             message = "%s error: can't find \"%s\" data file\n" % (
                 os.path.basename( sys.argv[0] ),
         else:
             message = "%s error: can't find \"%s\" data file\n" % (
                 os.path.basename( sys.argv[0] ),
@@ -1105,7 +1130,7 @@ def guess(
             print(
                 "   (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
             )
             print(
                 "   (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
             )
-        elif searchtype is "coordinates":
+        elif searchtype == "coordinates":
             print( "   (%.3gkm, %.3gmi)" % (km, mi) )
         if zone[0]:
             print(
             print( "   (%.3gkm, %.3gmi)" % (km, mi) )
         if zone[0]:
             print(
@@ -1121,7 +1146,7 @@ def guess(
             print(
                 "   (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
             )
             print(
                 "   (proximity %s, %.3gkm, %.3gmi)" % ( score, km, mi )
             )
-        elif searchtype is "coordinates" and zone[0]:
+        elif searchtype == "coordinates" and zone[0]:
             print( "   (%.3gkm, %.3gmi)" % (km, mi) )
     if cache_search:
         now = time.time()
             print( "   (%.3gkm, %.3gmi)" % (km, mi) )
     if cache_search:
         now = time.time()
@@ -1134,7 +1159,7 @@ def guess(
         )
         search_cache = ["\n"]
         search_cache.append( "[%s]\n" % search[0] ) 
         )
         search_cache = ["\n"]
         search_cache.append( "[%s]\n" % search[0] ) 
-        search_cache.append( "description = cached %s\n" % nowstamp )
+        search_cache.append( "cached = %s\n" % nowstamp )
         for uriname in sorted(uris.keys()):
             search_cache.append( "%s = %s\n" % ( uriname, uris[uriname] ) )
         real_cachedir = os.path.expanduser(cachedir)
         for uriname in sorted(uris.keys()):
             search_cache.append( "%s = %s\n" % ( uriname, uris[uriname] ) )
         real_cachedir = os.path.expanduser(cachedir)
@@ -1160,7 +1185,10 @@ def guess(
             )
         try:
             scache_existing = configparser.ConfigParser()
             )
         try:
             scache_existing = configparser.ConfigParser()
-            scache_existing.read(scache_fn)
+            if pyversion("3"):
+                scache_existing.read(scache_fn, encoding="utf-8")
+            else:
+                scache_existing.read(scache_fn)
             if not scache_existing.has_section(search[0]):
                 scache_fd = codecs.open(scache_fn, "a", "utf-8")
                 scache_fd.writelines(search_cache)
             if not scache_existing.has_section(search[0]):
                 scache_fd = codecs.open(scache_fn, "a", "utf-8")
                 scache_fd.writelines(search_cache)
@@ -1211,7 +1239,7 @@ def gecos(formatted):
     return tuple(coordinates)
 
 def correlate():
     return tuple(coordinates)
 
 def correlate():
-    import codecs, csv, datetime, hashlib, os, re, sys, tarfile, time, zipfile
+    import codecs, csv, datetime, hashlib, os, re, sys, time, zipfile
     if pyversion("3"): import configparser
     else: import ConfigParser as configparser
     for filename in os.listdir("."):
     if pyversion("3"): import configparser
     else: import ConfigParser as configparser
     for filename in os.listdir("."):
@@ -1268,56 +1296,56 @@ def correlate():
         weather_copyright,
         os.path.basename( sys.argv[0] ),
         datetime.date.isoformat(
         weather_copyright,
         os.path.basename( sys.argv[0] ),
         datetime.date.isoformat(
-            datetime.datetime.fromtimestamp( time.time() )
+            datetime.datetime.utcfromtimestamp( int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) )
         ),
         hashlib.md5( open(gcounties_an, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
         ),
         hashlib.md5( open(gcounties_an, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
-            datetime.datetime.fromtimestamp( os.path.getmtime(gcounties_an) )
+            datetime.datetime.utcfromtimestamp( os.path.getmtime(gcounties_an) )
         ),
         gcounties_an,
         hashlib.md5( open(gcousubs_an, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
         ),
         gcounties_an,
         hashlib.md5( open(gcousubs_an, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
-            datetime.datetime.fromtimestamp( os.path.getmtime(gcousubs_an) )
+            datetime.datetime.utcfromtimestamp( os.path.getmtime(gcousubs_an) )
         ),
         gcousubs_an,
         hashlib.md5( open(gplace_an, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
         ),
         gcousubs_an,
         hashlib.md5( open(gplace_an, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
-            datetime.datetime.fromtimestamp( os.path.getmtime(gplace_an) )
+            datetime.datetime.utcfromtimestamp( os.path.getmtime(gplace_an) )
         ),
         gplace_an,
         hashlib.md5( open(gzcta_an, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
         ),
         gplace_an,
         hashlib.md5( open(gzcta_an, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
-            datetime.datetime.fromtimestamp( os.path.getmtime(gzcta_an) )
+            datetime.datetime.utcfromtimestamp( os.path.getmtime(gzcta_an) )
         ),
         gzcta_an,
         hashlib.md5( open(cpfzcf_fn, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
         ),
         gzcta_an,
         hashlib.md5( open(cpfzcf_fn, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
-            datetime.datetime.fromtimestamp( os.path.getmtime(cpfzcf_fn) )
+            datetime.datetime.utcfromtimestamp( os.path.getmtime(cpfzcf_fn) )
         ),
         cpfzcf_fn,
         hashlib.md5( open(nsdcccc_fn, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
         ),
         cpfzcf_fn,
         hashlib.md5( open(nsdcccc_fn, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
-            datetime.datetime.fromtimestamp( os.path.getmtime(nsdcccc_fn) )
+            datetime.datetime.utcfromtimestamp( os.path.getmtime(nsdcccc_fn) )
         ),
         nsdcccc_fn,
         hashlib.md5( open(ourairports_fn, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
         ),
         nsdcccc_fn,
         hashlib.md5( open(ourairports_fn, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
-            datetime.datetime.fromtimestamp( os.path.getmtime(ourairports_fn) )
+            datetime.datetime.utcfromtimestamp( os.path.getmtime(ourairports_fn) )
         ),
         ourairports_fn,
         hashlib.md5( open(overrides_fn, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
         ),
         ourairports_fn,
         hashlib.md5( open(overrides_fn, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
-            datetime.datetime.fromtimestamp( os.path.getmtime(overrides_fn) )
+            datetime.datetime.utcfromtimestamp( os.path.getmtime(overrides_fn) )
         ),
         overrides_fn,
         hashlib.md5( open(slist_fn, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
         ),
         overrides_fn,
         hashlib.md5( open(slist_fn, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
-            datetime.datetime.fromtimestamp( os.path.getmtime(slist_fn) )
+            datetime.datetime.utcfromtimestamp( os.path.getmtime(slist_fn) )
         ),
         slist_fn,
         hashlib.md5( open(zlist_fn, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
         ),
         slist_fn,
         hashlib.md5( open(zlist_fn, "rb").read() ).hexdigest(),
         datetime.date.isoformat(
-            datetime.datetime.fromtimestamp( os.path.getmtime(zlist_fn) )
+            datetime.datetime.utcfromtimestamp( os.path.getmtime(zlist_fn) )
         ),
         zlist_fn
     )
         ),
         zlist_fn
     )
@@ -1330,7 +1358,7 @@ def correlate():
     sys.stdout.write(message)
     sys.stdout.flush()
     count = 0
     sys.stdout.write(message)
     sys.stdout.flush()
     count = 0
-    gcounties = zipfile.ZipFile(gcounties_an).open(gcounties_fn, "rU")
+    gcounties = zipfile.ZipFile(gcounties_an).open(gcounties_fn, "r")
     columns = gcounties.readline().decode("utf-8").strip().split("\t")
     for line in gcounties:
         fields = line.decode("utf-8").strip().split("\t")
     columns = gcounties.readline().decode("utf-8").strip().split("\t")
     for line in gcounties:
         fields = line.decode("utf-8").strip().split("\t")
@@ -1353,7 +1381,7 @@ def correlate():
     sys.stdout.write(message)
     sys.stdout.flush()
     count = 0
     sys.stdout.write(message)
     sys.stdout.flush()
     count = 0
-    gcousubs = zipfile.ZipFile(gcousubs_an).open(gcousubs_fn, "rU")
+    gcousubs = zipfile.ZipFile(gcousubs_an).open(gcousubs_fn, "r")
     columns = gcousubs.readline().decode("utf-8").strip().split("\t")
     for line in gcousubs:
         fields = line.decode("utf-8").strip().split("\t")
     columns = gcousubs.readline().decode("utf-8").strip().split("\t")
     for line in gcousubs:
         fields = line.decode("utf-8").strip().split("\t")
@@ -1376,7 +1404,7 @@ def correlate():
     sys.stdout.write(message)
     sys.stdout.flush()
     count = 0
     sys.stdout.write(message)
     sys.stdout.flush()
     count = 0
-    gplace = zipfile.ZipFile(gplace_an).open(gplace_fn, "rU")
+    gplace = zipfile.ZipFile(gplace_an).open(gplace_fn, "r")
     columns = gplace.readline().decode("utf-8").strip().split("\t")
     for line in gplace:
         fields = line.decode("utf-8").strip().split("\t")
     columns = gplace.readline().decode("utf-8").strip().split("\t")
     for line in gplace:
         fields = line.decode("utf-8").strip().split("\t")
@@ -1399,7 +1427,7 @@ def correlate():
     sys.stdout.write(message)
     sys.stdout.flush()
     count = 0
     sys.stdout.write(message)
     sys.stdout.flush()
     count = 0
-    slist = codecs.open(slist_fn, "rU", "utf-8")
+    slist = codecs.open(slist_fn, "r", "utf-8")
     for line in slist:
         icao = line.split("#")[0].strip()
         if icao:
     for line in slist:
         icao = line.split("#")[0].strip()
         if icao:
@@ -1414,7 +1442,7 @@ def correlate():
     sys.stdout.write(message)
     sys.stdout.flush()
     count = 0
     sys.stdout.write(message)
     sys.stdout.flush()
     count = 0
-    nsdcccc = codecs.open(nsdcccc_fn, "rU", "utf-8")
+    nsdcccc = codecs.open(nsdcccc_fn, "r", "utf-8")
     for line in nsdcccc:
         line = str(line)
         fields = line.split(";")
     for line in nsdcccc:
         line = str(line)
         fields = line.split(";")
@@ -1443,20 +1471,20 @@ def correlate():
     sys.stdout.write(message)
     sys.stdout.flush()
     count = 0
     sys.stdout.write(message)
     sys.stdout.flush()
     count = 0
-    ourairports = open(ourairports_fn, "rU")
+    ourairports = open(ourairports_fn, "r")
     for row in csv.reader(ourairports):
     for row in csv.reader(ourairports):
-        icao = row[12].decode('utf-8').lower()
+        icao = row[12].lower()
         if icao in stations:
         if icao in stations:
-            iata = row[13].decode('utf-8').lower()
+            iata = row[13].lower()
             if len(iata) == 3: airports[iata] = { "station": icao }
             if "description" not in stations[icao]:
                 description = []
             if len(iata) == 3: airports[iata] = { "station": icao }
             if "description" not in stations[icao]:
                 description = []
-                name = row[3].decode('utf-8')
+                name = row[3]
                 if name: description.append(name)
                 if name: description.append(name)
-                municipality = row[10].decode('utf-8')
+                municipality = row[10]
                 if municipality: description.append(municipality)
                 if municipality: description.append(municipality)
-                region = row[9].decode('utf-8')
-                country = row[8].decode('utf-8')
+                region = row[9]
+                country = row[8]
                 if region:
                     if "-" in region:
                         c,r = region.split("-", 1)
                 if region:
                     if "-" in region:
                         c,r = region.split("-", 1)
@@ -1467,9 +1495,9 @@ def correlate():
                 if description:
                     stations[icao]["description"] = ", ".join(description)
             if "location" not in stations[icao]:
                 if description:
                     stations[icao]["description"] = ", ".join(description)
             if "location" not in stations[icao]:
-                lat = row[4].decode('utf-8')
+                lat = row[4]
                 if lat:
                 if lat:
-                    lon = row[5].decode('utf-8')
+                    lon = row[5]
                     if lon:
                         stations[icao]["location"] = gecos(
                             "%s,%s" % (lat, lon)
                     if lon:
                         stations[icao]["location"] = gecos(
                             "%s,%s" % (lat, lon)
@@ -1481,7 +1509,7 @@ def correlate():
     sys.stdout.write(message)
     sys.stdout.flush()
     count = 0
     sys.stdout.write(message)
     sys.stdout.flush()
     count = 0
-    zlist = codecs.open(zlist_fn, "rU", "utf-8")
+    zlist = codecs.open(zlist_fn, "r", "utf-8")
     for line in zlist:
         line = line.split("#")[0].strip()
         if line:
     for line in zlist:
         line = line.split("#")[0].strip()
         if line:
@@ -1494,7 +1522,7 @@ def correlate():
     sys.stdout.flush()
     count = 0
     cpfz = {}
     sys.stdout.flush()
     count = 0
     cpfz = {}
-    cpfzcf = codecs.open(cpfzcf_fn, "rU", "utf-8")
+    cpfzcf = codecs.open(cpfzcf_fn, "r", "utf-8")
     for line in cpfzcf:
         fields = line.strip().split("|")
         if len(fields) == 11 \
     for line in cpfzcf:
         fields = line.strip().split("|")
         if len(fields) == 11 \
@@ -1502,6 +1530,9 @@ def correlate():
             zone = "z".join( fields[:2] ).lower()
             if zone in zones:
                 state = fields[0]
             zone = "z".join( fields[:2] ).lower()
             if zone in zones:
                 state = fields[0]
+                description = fields[3].strip()
+                fips = "fips%s"%fields[6]
+                countycode = "%sc%s" % (state.lower(), fips[-3:])
                 if state:
                     zones[zone]["coastal_flood_statement"] = (
                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
                 if state:
                     zones[zone]["coastal_flood_statement"] = (
                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
@@ -1509,27 +1540,25 @@ def correlate():
                     zones[zone]["flash_flood_statement"] = (
                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
                         "flash_flood/statement/%s/%s.txt"
                     zones[zone]["flash_flood_statement"] = (
                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
                         "flash_flood/statement/%s/%s.txt"
-                        % (state.lower(), zone))
+                        % (state.lower(), countycode))
                     zones[zone]["flash_flood_warning"] = (
                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
                         "flash_flood/warning/%s/%s.txt"
                     zones[zone]["flash_flood_warning"] = (
                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
                         "flash_flood/warning/%s/%s.txt"
-                        % (state.lower(), zone))
+                        % (state.lower(), countycode))
                     zones[zone]["flash_flood_watch"] = (
                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
                         "flash_flood/watch/%s/%s.txt" % (state.lower(), zone))
                     zones[zone]["flash_flood_watch"] = (
                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
                         "flash_flood/watch/%s/%s.txt" % (state.lower(), zone))
-                    zones[zone]["flood_statement"] = (
-                        "https://tgftp.nws.noaa.gov/data/watches_warnings/"
-                        "flood/statement/%s/%s.txt" % (state.lower(), zone))
                     zones[zone]["flood_warning"] = (
                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
                     zones[zone]["flood_warning"] = (
                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
-                        "flood/warning/%s/%s.txt" % (state.lower(), zone))
+                        "flood/warning/%s/%s.txt"
+                        % (state.lower(), countycode))
                     zones[zone]["severe_thunderstorm_warning"] = (
                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
                     zones[zone]["severe_thunderstorm_warning"] = (
                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
-                        "thunderstorm/%s/%s.txt" % (state.lower(), zone))
+                        "thunderstorm/%s/%s.txt" % (state.lower(), countycode))
                     zones[zone]["severe_weather_statement"] = (
                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
                         "severe_weather_stmt/%s/%s.txt"
                     zones[zone]["severe_weather_statement"] = (
                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
                         "severe_weather_stmt/%s/%s.txt"
-                        % (state.lower(), zone))
+                        % (state.lower(), countycode))
                     zones[zone]["short_term_forecast"] = (
                         "https://tgftp.nws.noaa.gov/data/forecasts/nowcast/"
                         "%s/%s.txt" % (state.lower(), zone))
                     zones[zone]["short_term_forecast"] = (
                         "https://tgftp.nws.noaa.gov/data/forecasts/nowcast/"
                         "%s/%s.txt" % (state.lower(), zone))
@@ -1540,14 +1569,46 @@ def correlate():
                     zones[zone]["state_forecast"] = (
                         "https://tgftp.nws.noaa.gov/data/forecasts/state/"
                         "%s/%s.txt" % (state.lower(), zone))
                     zones[zone]["state_forecast"] = (
                         "https://tgftp.nws.noaa.gov/data/forecasts/state/"
                         "%s/%s.txt" % (state.lower(), zone))
+                    zones[zone]["tornado"] = (
+                        "https://tgftp.nws.noaa.gov/data/watches_warnings/"
+                        "tornado/%s/%s.txt" % (state.lower(), countycode))
                     zones[zone]["urgent_weather_message"] = (
                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
                         "non_precip/%s/%s.txt" % (state.lower(), zone))
                     zones[zone]["zone_forecast"] = (
                         "https://tgftp.nws.noaa.gov/data/forecasts/zone/"
                         "%s/%s.txt" % (state.lower(), zone))
                     zones[zone]["urgent_weather_message"] = (
                         "https://tgftp.nws.noaa.gov/data/watches_warnings/"
                         "non_precip/%s/%s.txt" % (state.lower(), zone))
                     zones[zone]["zone_forecast"] = (
                         "https://tgftp.nws.noaa.gov/data/forecasts/zone/"
                         "%s/%s.txt" % (state.lower(), zone))
-                description = fields[3].strip()
-                fips = "fips%s"%fields[6]
+                tzcode = fields[7]
+                if tzcode == "A":
+                    zones[zone]["tz"] = "US/Alaska"
+                elif tzcode == "AH":
+                    zones[zone]["tz"] = "US/Aleutian"
+                elif tzcode in ("C", "CE", "CM"):
+                    zones[zone]["tz"] = "US/Central"
+                elif tzcode in ("E", "e"):
+                    zones[zone]["tz"] = "US/Eastern"
+                elif tzcode == "F":
+                    zones[zone]["tz"] = "Pacific/Guadalcanal"
+                elif tzcode == "G":
+                    zones[zone]["tz"] = "Pacific/Guam"
+                elif tzcode == "H":
+                    zones[zone]["tz"] = "US/Hawaii"
+                elif tzcode == "J":
+                    zones[zone]["tz"] = "Japan"
+                elif tzcode == "K":
+                    zones[zone]["tz"] = "Pacific/Kwajalein"
+                elif tzcode in ("M", "MC", "MP"):
+                    zones[zone]["tz"] = "US/Mountain"
+                elif tzcode == "m":
+                    zones[zone]["tz"] = "US/Arizona"
+                elif tzcode == "P":
+                    zones[zone]["tz"] = "US/Pacific"
+                elif tzcode == "S":
+                    zones[zone]["tz"] = "US/Samoa"
+                elif tzcode == "V":
+                    zones[zone]["tz"] = "America/Virgin"
+                else:
+                    zones[zone]["tz"] = ""
                 county = fields[5]
                 if county:
                     if description.endswith(county):
                 county = fields[5]
                 if county:
                     if description.endswith(county):
@@ -1566,7 +1627,7 @@ def correlate():
     sys.stdout.write(message)
     sys.stdout.flush()
     count = 0
     sys.stdout.write(message)
     sys.stdout.flush()
     count = 0
-    gzcta = zipfile.ZipFile(gzcta_an).open(gzcta_fn, "rU")
+    gzcta = zipfile.ZipFile(gzcta_an).open(gzcta_fn, "r")
     columns = gzcta.readline().decode("utf-8").strip().split("\t")
     for line in gzcta:
         fields = line.decode("utf-8").strip().split("\t")
     columns = gzcta.readline().decode("utf-8").strip().split("\t")
     for line in gzcta:
         fields = line.decode("utf-8").strip().split("\t")
@@ -1589,7 +1650,7 @@ def correlate():
     removed = 0
     changed = 0
     overrides = configparser.ConfigParser()
     removed = 0
     changed = 0
     overrides = configparser.ConfigParser()
-    overrides.readfp( codecs.open(overrides_fn, "r", "utf8") )
+    overrides.read_file( codecs.open(overrides_fn, "r", "utf8") )
     overrideslog = []
     for section in overrides.sections():
         addopt = 0
     overrideslog = []
     for section in overrides.sections():
         addopt = 0
@@ -1739,6 +1800,12 @@ def correlate():
     if os.path.exists(overrideslog_fn):
         os.rename(overrideslog_fn, "%s_old"%overrideslog_fn)
     overrideslog_fd = codecs.open(overrideslog_fn, "w", "utf8")
     if os.path.exists(overrideslog_fn):
         os.rename(overrideslog_fn, "%s_old"%overrideslog_fn)
     overrideslog_fd = codecs.open(overrideslog_fn, "w", "utf8")
+    import time
+    overrideslog_fd.write(
+        '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
+        '# use, copy, modify, and distribute this software is granted under terms\n'
+        '# provided in the LICENSE file distributed with this software.\n\n'
+        % time.gmtime().tm_year)
     overrideslog_fd.writelines(overrideslog)
     overrideslog_fd.close()
     print("done (%s overridden sections: +%s/-%s/!%s)." % (
     overrideslog_fd.writelines(overrideslog)
     overrideslog_fd.close()
     print("done (%s overridden sections: +%s/-%s/!%s)." % (
@@ -1949,6 +2016,8 @@ def correlate():
                     if type(element) is float: elements.append("%.7f"%element)
                     else: elements.append( repr(element) )
                 value = "(%s)"%", ".join(elements)
                     if type(element) is float: elements.append("%.7f"%element)
                     else: elements.append( repr(element) )
                 value = "(%s)"%", ".join(elements)
+            if type(value) is bytes:
+                value = value.decode("utf-8")
             stations_fd.write( "\n%s = %s" % (key, value) )
         count += 1
     stations_fd.write("\n")
             stations_fd.write( "\n%s = %s" % (key, value) )
         count += 1
     stations_fd.write("\n")
@@ -2004,15 +2073,30 @@ def correlate():
     sys.stdout.write(message)
     sys.stdout.flush()
     airports = configparser.ConfigParser()
     sys.stdout.write(message)
     sys.stdout.flush()
     airports = configparser.ConfigParser()
-    airports.read(airports_fn)
+    if pyversion("3"):
+        airports.read(airports_fn, encoding="utf-8")
+    else:
+        airports.read(airports_fn)
     places = configparser.ConfigParser()
     places = configparser.ConfigParser()
-    places.read(places_fn)
+    if pyversion("3"):
+        places.read(places_fn, encoding="utf-8")
+    else:
+        places.read(places_fn)
     stations = configparser.ConfigParser()
     stations = configparser.ConfigParser()
-    stations.read(stations_fn)
+    if pyversion("3"):
+        stations.read(stations_fn, encoding="utf-8")
+    else:
+        stations.read(stations_fn)
     zctas = configparser.ConfigParser()
     zctas = configparser.ConfigParser()
-    zctas.read(zctas_fn)
+    if pyversion("3"):
+        zctas.read(zctas_fn, encoding="utf-8")
+    else:
+        zctas.read(zctas_fn)
     zones = configparser.ConfigParser()
     zones = configparser.ConfigParser()
-    zones.read(zones_fn)
+    if pyversion("3"):
+        zones.read(zones_fn, encoding="utf-8")
+    else:
+        zones.read(zones_fn)
     qalog = []
     places_nocentroid = 0
     places_nodescription = 0
     qalog = []
     places_nocentroid = 0
     places_nodescription = 0
@@ -2087,6 +2171,12 @@ def correlate():
     if os.path.exists(qalog_fn):
         os.rename(qalog_fn, "%s_old"%qalog_fn)
     qalog_fd = codecs.open(qalog_fn, "w", "utf8")
     if os.path.exists(qalog_fn):
         os.rename(qalog_fn, "%s_old"%qalog_fn)
     qalog_fd = codecs.open(qalog_fn, "w", "utf8")
+    import time
+    qalog_fd.write(
+        '# Copyright (c) %s Jeremy Stanley <fungi@yuggoth.org>. Permission to\n'
+        '# use, copy, modify, and distribute this software is granted under terms\n'
+        '# provided in the LICENSE file distributed with this software.\n\n'
+        % time.gmtime().tm_year)
     qalog_fd.writelines(qalog)
     qalog_fd.close()
     if qalog:
     qalog_fd.writelines(qalog)
     qalog_fd.close()
     if qalog: