Skip to content

Commit 0733807

Browse files
committed
Reverts "filename" parameter rename for consistency
1 parent a110a69 commit 0733807

File tree

1 file changed

+76
-59
lines changed

1 file changed

+76
-59
lines changed

pvlib/iotools/tmy.py

Lines changed: 76 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -8,23 +8,23 @@
88

99
# Dictionary mapping TMY3 names to pvlib names
1010
VARIABLE_MAP = {
11-
'GHI (W/m^2)': 'ghi',
12-
'ETR (W/m^2)': 'ghi_extra',
13-
'DNI (W/m^2)': 'dni',
14-
'ETRN (W/m^2)': 'dni_extra',
15-
'DHI (W/m^2)': 'dhi',
16-
'Pressure (mbar)': 'pressure',
17-
'Wdir (degrees)': 'wind_direction',
18-
'Wspd (m/s)': 'wind_speed',
19-
'Dry-bulb (C)': 'temp_air',
20-
'Dew-point (C)': 'temp_dew',
21-
'RHum (%)': 'relative_humidity',
22-
'Alb (unitless)': 'albedo',
23-
'Pwat (cm)': 'precipitable_water'
11+
"GHI (W/m^2)": "ghi",
12+
"ETR (W/m^2)": "ghi_extra",
13+
"DNI (W/m^2)": "dni",
14+
"ETRN (W/m^2)": "dni_extra",
15+
"DHI (W/m^2)": "dhi",
16+
"Pressure (mbar)": "pressure",
17+
"Wdir (degrees)": "wind_direction",
18+
"Wspd (m/s)": "wind_speed",
19+
"Dry-bulb (C)": "temp_air",
20+
"Dew-point (C)": "temp_dew",
21+
"RHum (%)": "relative_humidity",
22+
"Alb (unitless)": "albedo",
23+
"Pwat (cm)": "precipitable_water",
2424
}
2525

2626

27-
def read_tmy3(filename_or_obj, coerce_year=None, map_variables=True, encoding=None):
27+
def read_tmy3(filename, coerce_year=None, map_variables=True, encoding=None):
2828
"""Read a TMY3 file into a pandas dataframe.
2929
3030
Note that values contained in the metadata dictionary are unchanged
@@ -37,7 +37,7 @@ def read_tmy3(filename_or_obj, coerce_year=None, map_variables=True, encoding=No
3737
3838
Parameters
3939
----------
40-
filename_or_obj : str, Path, or file-like object
40+
filename : str, Path, or file-like object
4141
A relative file path or absolute file path.
4242
coerce_year : int, optional
4343
If supplied, the year of the index will be set to ``coerce_year``, except
@@ -186,32 +186,34 @@ def read_tmy3(filename_or_obj, coerce_year=None, map_variables=True, encoding=No
186186
.. [3] `SolarAnywhere file formats
187187
<https://www.solaranywhere.com/support/historical-data/file-formats/>`_
188188
""" # noqa: E501
189-
head = ['USAF', 'Name', 'State', 'TZ', 'latitude', 'longitude', 'altitude']
189+
head = ["USAF", "Name", "State", "TZ", "latitude", "longitude", "altitude"]
190190

191-
with _file_context_manager(filename_or_obj, mode="r", encoding=encoding) as fbuf:
191+
with _file_context_manager(filename, mode="r", encoding=encoding) as fbuf:
192192
# header information on the 1st line (0 indexing)
193193
firstline = fbuf.readline()
194194
# use pandas to read the csv file buffer
195195
# header is actually the second line, but tell pandas to look for
196196
data = pd.read_csv(fbuf, header=0)
197197

198-
meta = dict(zip(head, firstline.rstrip('\n').split(",")))
198+
meta = dict(zip(head, firstline.rstrip("\n").split(",")))
199199
# convert metadata strings to numeric types
200-
meta['altitude'] = float(meta['altitude'])
201-
meta['latitude'] = float(meta['latitude'])
202-
meta['longitude'] = float(meta['longitude'])
203-
meta['TZ'] = float(meta['TZ'])
204-
meta['USAF'] = int(meta['USAF'])
200+
meta["altitude"] = float(meta["altitude"])
201+
meta["latitude"] = float(meta["latitude"])
202+
meta["longitude"] = float(meta["longitude"])
203+
meta["TZ"] = float(meta["TZ"])
204+
meta["USAF"] = int(meta["USAF"])
205205

206206
# get the date column as a pd.Series of numpy datetime64
207-
data_ymd = pd.to_datetime(data['Date (MM/DD/YYYY)'], format='%m/%d/%Y')
207+
data_ymd = pd.to_datetime(data["Date (MM/DD/YYYY)"], format="%m/%d/%Y")
208208
# extract minutes
209-
minutes = data['Time (HH:MM)'].str.split(':').str[1].astype(int)
209+
minutes = data["Time (HH:MM)"].str.split(":").str[1].astype(int)
210210
# shift the time column so that midnite is 00:00 instead of 24:00
211-
shifted_hour = data['Time (HH:MM)'].str.split(':').str[0].astype(int) % 24
211+
shifted_hour = data["Time (HH:MM)"].str.split(":").str[0].astype(int) % 24
212212
# shift the dates at midnight (24:00) so they correspond to the next day.
213213
# If midnight is specified as 00:00 do not shift date.
214-
data_ymd[data['Time (HH:MM)'].str[:2] == '24'] += datetime.timedelta(days=1) # noqa: E501
214+
data_ymd[data["Time (HH:MM)"].str[:2] == "24"] += datetime.timedelta(
215+
days=1
216+
) # noqa: E501
215217
# NOTE: as of pandas>=0.24 the pd.Series.array has a month attribute, but
216218
# in pandas-0.18.1, only DatetimeIndex has month, but indices are immutable
217219
# so we need to continue to work with the panda series of dates `data_ymd`
@@ -223,16 +225,19 @@ def read_tmy3(filename_or_obj, coerce_year=None, map_variables=True, encoding=No
223225
# timedeltas
224226
if coerce_year is not None:
225227
data_ymd = data_ymd.map(lambda dt: dt.replace(year=coerce_year))
226-
data_ymd.iloc[-1] = data_ymd.iloc[-1].replace(year=coerce_year+1)
228+
data_ymd.iloc[-1] = data_ymd.iloc[-1].replace(year=coerce_year + 1)
227229
# NOTE: as of pvlib-0.6.3, min req is pandas-0.18.1, so pd.to_timedelta
228230
# unit must be in (D,h,m,s,ms,us,ns), but pandas>=0.24 allows unit='hour'
229-
data.index = data_ymd + pd.to_timedelta(shifted_hour, unit='h') \
230-
+ pd.to_timedelta(minutes, unit='min')
231+
data.index = (
232+
data_ymd
233+
+ pd.to_timedelta(shifted_hour, unit="h")
234+
+ pd.to_timedelta(minutes, unit="min")
235+
)
231236

232237
if map_variables:
233238
data = data.rename(columns=VARIABLE_MAP)
234239

235-
data = data.tz_localize(int(meta['TZ'] * 3600))
240+
data = data.tz_localize(int(meta["TZ"] * 3600))
236241

237242
return data, meta
238243

@@ -367,9 +372,9 @@ def read_tmy2(filename):
367372
:doi:`10.2172/87130`
368373
""" # noqa: E501
369374
# paste in the column info as one long line
370-
string = '%2d%2d%2d%2d%4d%4d%4d%1s%1d%4d%1s%1d%4d%1s%1d%4d%1s%1d%4d%1s%1d%4d%1s%1d%4d%1s%1d%2d%1s%1d%2d%1s%1d%4d%1s%1d%4d%1s%1d%3d%1s%1d%4d%1s%1d%3d%1s%1d%3d%1s%1d%4d%1s%1d%5d%1s%1d%10d%3d%1s%1d%3d%1s%1d%3d%1s%1d%2d%1s%1d' # noqa: E501
371-
columns = 'year,month,day,hour,ETR,ETRN,GHI,GHISource,GHIUncertainty,DNI,DNISource,DNIUncertainty,DHI,DHISource,DHIUncertainty,GHillum,GHillumSource,GHillumUncertainty,DNillum,DNillumSource,DNillumUncertainty,DHillum,DHillumSource,DHillumUncertainty,Zenithlum,ZenithlumSource,ZenithlumUncertainty,TotCld,TotCldSource,TotCldUncertainty,OpqCld,OpqCldSource,OpqCldUncertainty,DryBulb,DryBulbSource,DryBulbUncertainty,DewPoint,DewPointSource,DewPointUncertainty,RHum,RHumSource,RHumUncertainty,Pressure,PressureSource,PressureUncertainty,Wdir,WdirSource,WdirUncertainty,Wspd,WspdSource,WspdUncertainty,Hvis,HvisSource,HvisUncertainty,CeilHgt,CeilHgtSource,CeilHgtUncertainty,PresentWeather,Pwat,PwatSource,PwatUncertainty,AOD,AODSource,AODUncertainty,SnowDepth,SnowDepthSource,SnowDepthUncertainty,LastSnowfall,LastSnowfallSource,LastSnowfallUncertaint' # noqa: E501
372-
hdr_columns = 'WBAN,City,State,TZ,latitude,longitude,altitude'
375+
string = "%2d%2d%2d%2d%4d%4d%4d%1s%1d%4d%1s%1d%4d%1s%1d%4d%1s%1d%4d%1s%1d%4d%1s%1d%4d%1s%1d%2d%1s%1d%2d%1s%1d%4d%1s%1d%4d%1s%1d%3d%1s%1d%4d%1s%1d%3d%1s%1d%3d%1s%1d%4d%1s%1d%5d%1s%1d%10d%3d%1s%1d%3d%1s%1d%3d%1s%1d%2d%1s%1d" # noqa: E501
376+
columns = "year,month,day,hour,ETR,ETRN,GHI,GHISource,GHIUncertainty,DNI,DNISource,DNIUncertainty,DHI,DHISource,DHIUncertainty,GHillum,GHillumSource,GHillumUncertainty,DNillum,DNillumSource,DNillumUncertainty,DHillum,DHillumSource,DHillumUncertainty,Zenithlum,ZenithlumSource,ZenithlumUncertainty,TotCld,TotCldSource,TotCldUncertainty,OpqCld,OpqCldSource,OpqCldUncertainty,DryBulb,DryBulbSource,DryBulbUncertainty,DewPoint,DewPointSource,DewPointUncertainty,RHum,RHumSource,RHumUncertainty,Pressure,PressureSource,PressureUncertainty,Wdir,WdirSource,WdirUncertainty,Wspd,WspdSource,WspdUncertainty,Hvis,HvisSource,HvisUncertainty,CeilHgt,CeilHgtSource,CeilHgtUncertainty,PresentWeather,Pwat,PwatSource,PwatUncertainty,AOD,AODSource,AODUncertainty,SnowDepth,SnowDepthSource,SnowDepthUncertainty,LastSnowfall,LastSnowfallSource,LastSnowfallUncertaint" # noqa: E501
377+
hdr_columns = "WBAN,City,State,TZ,latitude,longitude,altitude"
373378

374379
tmy2, tmy2_meta = _read_tmy2(string, columns, hdr_columns, str(filename))
375380

@@ -396,17 +401,19 @@ def _parsemeta_tmy2(columns, line):
396401
meta = rawmeta[:3] # take the first string entries
397402
meta.append(int(rawmeta[3]))
398403
# Convert to decimal notation with S negative
399-
longitude = (
400-
float(rawmeta[5]) + float(rawmeta[6])/60) * (2*(rawmeta[4] == 'N') - 1)
404+
longitude = (float(rawmeta[5]) + float(rawmeta[6]) / 60) * (
405+
2 * (rawmeta[4] == "N") - 1
406+
)
401407
# Convert to decimal notation with W negative
402-
latitude = (
403-
float(rawmeta[8]) + float(rawmeta[9])/60) * (2*(rawmeta[7] == 'E') - 1)
408+
latitude = (float(rawmeta[8]) + float(rawmeta[9]) / 60) * (
409+
2 * (rawmeta[7] == "E") - 1
410+
)
404411
meta.append(longitude)
405412
meta.append(latitude)
406413
meta.append(float(rawmeta[10]))
407414

408415
# Creates a dictionary of metadata
409-
meta_dict = dict(zip(columns.split(','), meta))
416+
meta_dict = dict(zip(columns.split(","), meta))
410417
return meta_dict
411418

412419

@@ -424,36 +431,42 @@ def _read_tmy2(string, columns, hdr_columns, fname):
424431
# Reset the cursor and array for each line
425432
cursor = 1
426433
part = []
427-
for marker in string.split('%'):
434+
for marker in string.split("%"):
428435
# Skip the first line of markers
429-
if marker == '':
436+
if marker == "":
430437
continue
431438

432439
# Read the next increment from the marker list
433-
increment = int(re.findall(r'\d+', marker)[0])
440+
increment = int(re.findall(r"\d+", marker)[0])
434441
next_cursor = cursor + increment
435442

436443
# Extract the value from the line in the file
437-
val = (line[cursor:next_cursor])
444+
val = line[cursor:next_cursor]
438445
# increment the cursor by the length of the read value
439446
cursor = next_cursor
440447

441448
# Determine the datatype from the marker string
442-
if marker[-1] == 'd':
449+
if marker[-1] == "d":
443450
try:
444451
val = float(val)
445452
except ValueError:
446-
raise ValueError('WARNING: In {} Read value is not an '
447-
'integer " {} " '.format(fname, val))
448-
elif marker[-1] == 's':
453+
raise ValueError(
454+
"WARNING: In {} Read value is not an "
455+
'integer " {} " '.format(fname, val)
456+
)
457+
elif marker[-1] == "s":
449458
try:
450459
val = str(val)
451460
except ValueError:
452-
raise ValueError('WARNING: In {} Read value is not a '
453-
'string " {} " '.format(fname, val))
461+
raise ValueError(
462+
"WARNING: In {} Read value is not a "
463+
'string " {} " '.format(fname, val)
464+
)
454465
else:
455-
raise Exception('WARNING: In {} Improper column DataFrame '
456-
'" %{} " '.format(__name__, marker))
466+
raise Exception(
467+
"WARNING: In {} Improper column DataFrame "
468+
'" %{} " '.format(__name__, marker)
469+
)
457470

458471
part.append(val)
459472

@@ -465,13 +478,17 @@ def _read_tmy2(string, columns, hdr_columns, fname):
465478
axes.append(part)
466479

467480
# Create datetime objects from read data
468-
date.append(datetime.datetime(year=int(year),
469-
month=int(part[1]),
470-
day=int(part[2]),
471-
hour=(int(part[3]) - 1)))
472-
473-
data = pd.DataFrame(
474-
axes, index=date,
475-
columns=columns.split(',')).tz_localize(int(meta['TZ'] * 3600))
481+
date.append(
482+
datetime.datetime(
483+
year=int(year),
484+
month=int(part[1]),
485+
day=int(part[2]),
486+
hour=(int(part[3]) - 1),
487+
)
488+
)
489+
490+
data = pd.DataFrame(axes, index=date, columns=columns.split(",")).tz_localize(
491+
int(meta["TZ"] * 3600)
492+
)
476493

477494
return data, meta

0 commit comments

Comments
 (0)