Solar Radiation Monitoring Laboratory (SRML)
Contents
Solar Radiation Monitoring Laboratory (SRML)#
The Solar Radiation Monitoring Laboratory (SRML) at the University of Oregon has been providing solar radiation data for the Northeastern United States since 1975. The SRML monitoring station network consists of both high-quality stations (Tier 1) that measure all three irradiance components at a 1-minute resolution, as well as stations with low-quality instruments (Tier 2) that only log measurements hourly. A full list of the 42 stations (including discontinued stations) can be found at the SRML website.
Only the high-quality SRML stations are included in the SolarStations’ station listing; however, all active stations are shown below.
Station full name | Abbreviation | State | Country | Latitude | Longitude | Elevation | Time period | Network | Owner | Comment | Data availability | Tier | Instrument | Components |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Loading... (need help?) |
Data retrieval#
Data from the SRML stations are stored in monthly files for each station and can be freely downloaded from their website. The data can also be downloaded programmatically using the pvlib-python library, specifically the read_srml_month_from_solardat
function. A list of the station acronyms can be found here.
Help support the SRML
If you find the data useful, please consider donating to support the SRML.
An example of how to use pvlib to download data from the Hermiston station for June 2020 is shown here:
import pvlib
df = pvlib.iotools.read_srml_month_from_solardat(
station='HE',
year=2020,
month=6)
# print the first 12 rows of data
show(df.head(), scrollX=True, scrollCollapse=True, paging=False, maxColumns=100, dom="tpr")
---------------------------------------------------------------------------
TimeoutError Traceback (most recent call last)
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/urllib/request.py:1348, in AbstractHTTPHandler.do_open(self, http_class, req, **http_conn_args)
1347 try:
-> 1348 h.request(req.get_method(), req.selector, req.data, headers,
1349 encode_chunked=req.has_header('Transfer-encoding'))
1350 except OSError as err: # timeout error
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:1283, in HTTPConnection.request(self, method, url, body, headers, encode_chunked)
1282 """Send a complete request to the server."""
-> 1283 self._send_request(method, url, body, headers, encode_chunked)
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:1329, in HTTPConnection._send_request(self, method, url, body, headers, encode_chunked)
1328 body = _encode(body, 'body')
-> 1329 self.endheaders(body, encode_chunked=encode_chunked)
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:1278, in HTTPConnection.endheaders(self, message_body, encode_chunked)
1277 raise CannotSendHeader()
-> 1278 self._send_output(message_body, encode_chunked=encode_chunked)
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:1038, in HTTPConnection._send_output(self, message_body, encode_chunked)
1037 del self._buffer[:]
-> 1038 self.send(msg)
1040 if message_body is not None:
1041
1042 # create a consistent interface to message_body
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:976, in HTTPConnection.send(self, data)
975 if self.auto_open:
--> 976 self.connect()
977 else:
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/http/client.py:942, in HTTPConnection.connect(self)
941 sys.audit("http.client.connect", self, self.host, self.port)
--> 942 self.sock = self._create_connection(
943 (self.host,self.port), self.timeout, self.source_address)
944 # Might fail in OSs that don't implement TCP_NODELAY
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/socket.py:845, in create_connection(address, timeout, source_address)
844 try:
--> 845 raise err
846 finally:
847 # Break explicitly a reference cycle
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/socket.py:833, in create_connection(address, timeout, source_address)
832 sock.bind(source_address)
--> 833 sock.connect(sa)
834 # Break explicitly a reference cycle
TimeoutError: [Errno 110] Connection timed out
During handling of the above exception, another exception occurred:
URLError Traceback (most recent call last)
Cell In[3], line 3
1 import pvlib
----> 3 df = pvlib.iotools.read_srml_month_from_solardat(
4 station='HE',
5 year=2020,
6 month=6)
8 # print the first 12 rows of data
9 show(df.head(), scrollX=True, scrollCollapse=True, paging=False, maxColumns=100, dom="tpr")
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pvlib/iotools/srml.py:217, in read_srml_month_from_solardat(station, year, month, filetype)
211 file_name = "{station}{filetype}{year:02d}{month:02d}.txt".format(
212 station=station,
213 filetype=filetype,
214 year=year % 100,
215 month=month)
216 url = "http://solardat.uoregon.edu/download/Archive/"
--> 217 data = read_srml(url + file_name)
218 return data
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pvlib/iotools/srml.py:62, in read_srml(filename)
27 def read_srml(filename):
28 """
29 Read University of Oregon SRML 1min .tsv file into pandas dataframe. The
30 SRML is described in [1]_.
(...)
60 `http://solardat.uoregon.edu/ <http://solardat.uoregon.edu/>`_
61 """
---> 62 tsv_data = pd.read_csv(filename, delimiter='\t')
63 data = format_index(tsv_data)
64 # Drop day of year and time columns
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pandas/util/_decorators.py:311, in deprecate_nonkeyword_arguments.<locals>.decorate.<locals>.wrapper(*args, **kwargs)
305 if len(args) > num_allow_args:
306 warnings.warn(
307 msg.format(arguments=arguments),
308 FutureWarning,
309 stacklevel=stacklevel,
310 )
--> 311 return func(*args, **kwargs)
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pandas/io/parsers/readers.py:680, in read_csv(filepath_or_buffer, sep, delimiter, header, names, index_col, usecols, squeeze, prefix, mangle_dupe_cols, dtype, engine, converters, true_values, false_values, skipinitialspace, skiprows, skipfooter, nrows, na_values, keep_default_na, na_filter, verbose, skip_blank_lines, parse_dates, infer_datetime_format, keep_date_col, date_parser, dayfirst, cache_dates, iterator, chunksize, compression, thousands, decimal, lineterminator, quotechar, quoting, doublequote, escapechar, comment, encoding, encoding_errors, dialect, error_bad_lines, warn_bad_lines, on_bad_lines, delim_whitespace, low_memory, memory_map, float_precision, storage_options)
665 kwds_defaults = _refine_defaults_read(
666 dialect,
667 delimiter,
(...)
676 defaults={"delimiter": ","},
677 )
678 kwds.update(kwds_defaults)
--> 680 return _read(filepath_or_buffer, kwds)
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pandas/io/parsers/readers.py:575, in _read(filepath_or_buffer, kwds)
572 _validate_names(kwds.get("names", None))
574 # Create the parser.
--> 575 parser = TextFileReader(filepath_or_buffer, **kwds)
577 if chunksize or iterator:
578 return parser
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pandas/io/parsers/readers.py:934, in TextFileReader.__init__(self, f, engine, **kwds)
931 self.options["has_index_names"] = kwds["has_index_names"]
933 self.handles: IOHandles | None = None
--> 934 self._engine = self._make_engine(f, self.engine)
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pandas/io/parsers/readers.py:1218, in TextFileReader._make_engine(self, f, engine)
1214 mode = "rb"
1215 # error: No overload variant of "get_handle" matches argument types
1216 # "Union[str, PathLike[str], ReadCsvBuffer[bytes], ReadCsvBuffer[str]]"
1217 # , "str", "bool", "Any", "Any", "Any", "Any", "Any"
-> 1218 self.handles = get_handle( # type: ignore[call-overload]
1219 f,
1220 mode,
1221 encoding=self.options.get("encoding", None),
1222 compression=self.options.get("compression", None),
1223 memory_map=self.options.get("memory_map", False),
1224 is_text=is_text,
1225 errors=self.options.get("encoding_errors", "strict"),
1226 storage_options=self.options.get("storage_options", None),
1227 )
1228 assert self.handles is not None
1229 f = self.handles.handle
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pandas/io/common.py:667, in get_handle(path_or_buf, mode, encoding, compression, memory_map, is_text, errors, storage_options)
664 codecs.lookup_error(errors)
666 # open URLs
--> 667 ioargs = _get_filepath_or_buffer(
668 path_or_buf,
669 encoding=encoding,
670 compression=compression,
671 mode=mode,
672 storage_options=storage_options,
673 )
675 handle = ioargs.filepath_or_buffer
676 handles: list[BaseBuffer]
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pandas/io/common.py:336, in _get_filepath_or_buffer(filepath_or_buffer, encoding, compression, mode, storage_options)
334 # assuming storage_options is to be interpreted as headers
335 req_info = urllib.request.Request(filepath_or_buffer, headers=storage_options)
--> 336 with urlopen(req_info) as req:
337 content_encoding = req.headers.get("Content-Encoding", None)
338 if content_encoding == "gzip":
339 # Override compression based on Content-Encoding header
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/site-packages/pandas/io/common.py:236, in urlopen(*args, **kwargs)
230 """
231 Lazy-import wrapper for stdlib urlopen, as that imports a big chunk of
232 the stdlib.
233 """
234 import urllib.request
--> 236 return urllib.request.urlopen(*args, **kwargs)
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/urllib/request.py:216, in urlopen(url, data, timeout, cafile, capath, cadefault, context)
214 else:
215 opener = _opener
--> 216 return opener.open(url, data, timeout)
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/urllib/request.py:519, in OpenerDirector.open(self, fullurl, data, timeout)
516 req = meth(req)
518 sys.audit('urllib.Request', req.full_url, req.data, req.headers, req.get_method())
--> 519 response = self._open(req, data)
521 # post-process response
522 meth_name = protocol+"_response"
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/urllib/request.py:536, in OpenerDirector._open(self, req, data)
533 return result
535 protocol = req.type
--> 536 result = self._call_chain(self.handle_open, protocol, protocol +
537 '_open', req)
538 if result:
539 return result
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/urllib/request.py:496, in OpenerDirector._call_chain(self, chain, kind, meth_name, *args)
494 for handler in handlers:
495 func = getattr(handler, meth_name)
--> 496 result = func(*args)
497 if result is not None:
498 return result
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/urllib/request.py:1377, in HTTPHandler.http_open(self, req)
1376 def http_open(self, req):
-> 1377 return self.do_open(http.client.HTTPConnection, req)
File /opt/hostedtoolcache/Python/3.10.13/x64/lib/python3.10/urllib/request.py:1351, in AbstractHTTPHandler.do_open(self, http_class, req, **http_conn_args)
1348 h.request(req.get_method(), req.selector, req.data, headers,
1349 encode_chunked=req.has_header('Transfer-encoding'))
1350 except OSError as err: # timeout error
-> 1351 raise URLError(err)
1352 r = h.getresponse()
1353 except:
URLError: <urlopen error [Errno 110] Connection timed out>
The data retrieved from the Hermiston station include measurements of the three irradiance components and additional weather parameters including temperature and humidity. A few of the parameters in the downloaded datasets are visualized below.
axes = df[['ghi_0','dni_0','dhi_3','temp_air_1','wind_speed_1']].plot(
subplots=True, legend=False, rot=0, figsize=(8,8), sharex=True)
# Set y-labels and y-limits
axes[0].set_ylabel('GHI [W/m$^2$]'), axes[0].set_ylim(-10,1400)
axes[1].set_ylabel('DNI [W/m$^2$]'), axes[1].set_ylim(-10,1400)
axes[2].set_ylabel('DHI [W/m$^2$]'), axes[2].set_ylim(-10,1400)
axes[3].set_ylabel('Temperature [°]'), axes[3].set_ylim(0,40)
_ = axes[4].set_ylabel('Wind\nspeed [m/s]'), axes[4].set_ylim(0,15)
