mirror of
https://github.com/sensebox/opensensmapr
synced 2025-04-05 21:00:27 +02:00
Compare commits
88 commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
8ef52f8f59 | ||
![]() |
8d0746b263 | ||
![]() |
fc2ee05f77 | ||
![]() |
e3099ca35c | ||
![]() |
19351bd487 | ||
![]() |
1732084856 | ||
![]() |
334a49a309 | ||
![]() |
95f4f889da | ||
![]() |
ddf911e6a7 | ||
![]() |
6ebfc7f50a | ||
![]() |
0e1d9e3cad | ||
![]() |
9911226a76 | ||
![]() |
97475dbbff | ||
![]() |
eec6f84806 | ||
![]() |
86e80d52c9 | ||
![]() |
86efd52bf2 | ||
![]() |
60445d70c5 | ||
![]() |
b1001b174e | ||
![]() |
e2e9e3dbb3 | ||
![]() |
7e8eb46c8e | ||
![]() |
262141751f | ||
![]() |
a22c46ba14 | ||
![]() |
37d4dde1d6 | ||
![]() |
62667ef139 | ||
![]() |
b26ca150a9 | ||
![]() |
d919f89082 | ||
![]() |
64db38c291 | ||
![]() |
e4216b3572 | ||
![]() |
a4b878fc8f | ||
![]() |
7e1f42b8b9 | ||
![]() |
ebb9f5bd1f | ||
![]() |
35c9e84302 | ||
![]() |
c8925df68d | ||
![]() |
4e1b5d6389 | ||
![]() |
8393048957 | ||
![]() |
34a5dfae19 | ||
![]() |
438eda09cd | ||
![]() |
36f4701557 | ||
![]() |
0292779ca5 | ||
![]() |
4a81cab11c | ||
![]() |
4b24aa9582 | ||
![]() |
35c3014dee | ||
![]() |
35d9ee697b | ||
![]() |
ca7c32ee64 | ||
![]() |
6d5c821d3a | ||
![]() |
b4ee1b9ff6 | ||
![]() |
fda76ed670 | ||
![]() |
0529dd9a29 | ||
![]() |
379b38046d | ||
![]() |
24851046f2 | ||
![]() |
e4825ca14c | ||
![]() |
92672ae74c | ||
![]() |
f69cf62b27 | ||
b69e5dc57f | |||
4b01bbbee1 | |||
9ddc077bfd | |||
c618907853 | |||
3e56cd1a0e | |||
8936ff270c | |||
e853430c8e | |||
e37f572a94 | |||
32d0cceb28 | |||
92cbbcbfc7 | |||
ee491673fa | |||
ddc6289ce3 | |||
de3a05bf97 | |||
8d515a5fb0 | |||
18a698b375 | |||
4d33fa9029 | |||
80dc58a298 | |||
c89cd274a5 | |||
abcfbf5910 | |||
33a9c42e54 | |||
c4da876761 | |||
6a42357ec3 | |||
aa453d6afe | |||
1976e07cec | |||
bdc72e94e1 | |||
f30bc9c185 | |||
![]() |
925909ebe8 | ||
![]() |
93b4f6fe52 | ||
![]() |
f53eeb015c | ||
![]() |
12ffb14b45 | ||
![]() |
28e767586e | ||
![]() |
4dac0a4c04 | ||
![]() |
f7cbb1bc26 | ||
![]() |
994f08ab94 | ||
![]() |
97768e7cdb |
67 changed files with 7359 additions and 1800 deletions
|
@ -1,9 +1,11 @@
|
|||
^.*\.Rproj$
|
||||
^\.Rproj\.user$
|
||||
^CHANGES\.md$
|
||||
^tools*$
|
||||
^\.travis\.yml$
|
||||
^appveyor\.yml$
|
||||
^CONDUCT\.md$
|
||||
^codecov\.yml$
|
||||
^\.lintr$
|
||||
^opensensmapr_.*\.tar\.gz$
|
||||
^cran-comments\.md$
|
||||
^CRAN-SUBMISSION$
|
||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -5,5 +5,6 @@
|
|||
.Ruserdata
|
||||
*.Rcheck
|
||||
*.log
|
||||
cran-comments.md
|
||||
|
||||
opensensmapr_*.tar.gz
|
||||
|
|
2
.lintr
2
.lintr
|
@ -1,4 +1,4 @@
|
|||
exclusions: list('inst/doc/osem-intro.R')
|
||||
exclusions: list.files(path = 'inst/doc', full.names = TRUE)
|
||||
linters: with_defaults(
|
||||
# we use snake case
|
||||
camel_case_linter = NULL,
|
||||
|
|
25
.travis.yml
25
.travis.yml
|
@ -3,18 +3,12 @@
|
|||
language: R
|
||||
sudo: false
|
||||
cache: packages
|
||||
|
||||
r:
|
||||
- release
|
||||
- devel
|
||||
warnings_are_errors: true
|
||||
|
||||
r_github_packages:
|
||||
- r-lib/covr
|
||||
- jimhester/lintr
|
||||
|
||||
r_build_args: "--no-build-vignettes"
|
||||
r_check_args: "--as-cran --no-vignettes"
|
||||
|
||||
before_install:
|
||||
- sudo add-apt-repository ppa:ubuntugis/ubuntugis-unstable --yes
|
||||
- sudo apt-get --yes --force-yes update -qq
|
||||
|
@ -26,3 +20,20 @@ before_install:
|
|||
after_success:
|
||||
- Rscript -e 'covr::codecov()'
|
||||
- Rscript -e 'lintr::lint_package()'
|
||||
|
||||
matrix:
|
||||
include:
|
||||
# fast build
|
||||
- r: devel
|
||||
r_build_args: "--no-build-vignettes"
|
||||
r_check_args: "--no-vignettes --no-manual"
|
||||
env: NOT_CRAN=true
|
||||
|
||||
# strict builds
|
||||
- r: devel
|
||||
r_check_args: "--as-cran"
|
||||
env: NOT_CRAN=false
|
||||
- r: release
|
||||
r_check_args: "--as-cran"
|
||||
env: NOT_CRAN=false
|
||||
|
||||
|
|
27
CONDUCT.md
27
CONDUCT.md
|
@ -1,25 +1,20 @@
|
|||
# Contributor Code of Conduct
|
||||
|
||||
As contributors and maintainers of this project, we pledge to respect all people who
|
||||
contribute through reporting issues, posting feature requests, updating documentation,
|
||||
submitting pull requests or patches, and other activities.
|
||||
As contributors and maintainers of this project, we pledge to respect all people who
|
||||
contribute through any means.
|
||||
|
||||
We are committed to making participation in this project a harassment-free experience for
|
||||
everyone, regardless of level of experience, gender, gender identity and expression,
|
||||
sexual orientation, disability, personal appearance, body size, race, ethnicity, age, or religion.
|
||||
everyone, regardless of their level of experience and personal or cultural traits.
|
||||
|
||||
Examples of unacceptable behavior by participants include the use of sexual language or
|
||||
imagery, derogatory comments or personal attacks, trolling, public or private harassment,
|
||||
insults, or other unprofessional conduct.
|
||||
Examples of unacceptable behavior by participants include derogatory comments,
|
||||
personal attacks, and trolling, both in public or private.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or reject comments,
|
||||
commits, code, wiki edits, issues, and other contributions that are not aligned to this
|
||||
Code of Conduct. Project maintainers who do not follow the Code of Conduct may be removed
|
||||
from the project team.
|
||||
Project maintainers have the right and responsibility to remove, edit, or reject any
|
||||
contributions that are not aligned to this Code of Conduct. Project maintainers who
|
||||
do not follow the Code of Conduct may be removed from the project team.
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
|
||||
opening an issue or contacting one or more of the project maintainers.
|
||||
|
||||
This Code of Conduct is adapted from the Contributor Covenant
|
||||
(http:contributor-covenant.org), version 1.0.0, available at
|
||||
http://contributor-covenant.org/version/1/0/0/
|
||||
This Code of Conduct is adapted from the [Contributor Covenant version 1.0.0](http://contributor-covenant.org/version/1/0/0/).
|
||||
|
||||
|
|
24
DESCRIPTION
24
DESCRIPTION
|
@ -1,18 +1,23 @@
|
|||
Package: opensensmapr
|
||||
Type: Package
|
||||
Title: Client for the Data API of openSenseMap.org
|
||||
Version: 0.4.0
|
||||
URL: http://github.com/noerw/opensensmapR
|
||||
BugReports: http://github.com/noerw/opensensmapR/issues
|
||||
Title: Client for the Data API of 'openSenseMap.org'
|
||||
Version: 0.6.0
|
||||
URL: https://github.com/sensebox/opensensmapR
|
||||
BugReports: https://github.com/sensebox/opensensmapR/issues
|
||||
Depends:
|
||||
R (>= 3.5.0)
|
||||
Imports:
|
||||
dplyr,
|
||||
httr,
|
||||
digest,
|
||||
lazyeval,
|
||||
readr,
|
||||
purrr,
|
||||
magrittr
|
||||
Suggests:
|
||||
maps,
|
||||
maptools,
|
||||
readr,
|
||||
tibble,
|
||||
rgeos,
|
||||
sf,
|
||||
knitr,
|
||||
|
@ -25,8 +30,9 @@ Suggests:
|
|||
lintr,
|
||||
testthat,
|
||||
covr
|
||||
Authors@R: c(person("Norwin", "Roosen", role = c("aut", "cre"), email = "hello@nroo.de"),
|
||||
person("Daniel", "Nuest", role = c("ctb"), email = "daniel.nuest@uni-muenster.de", comment = c(ORCID = "0000-0003-2392-6140")))
|
||||
Authors@R: c(person("Norwin", "Roosen", role = c("aut"), email = "hello@nroo.de"),
|
||||
person("Daniel", "Nuest", role = c("ctb"), email = "daniel.nuest@uni-muenster.de", comment = c(ORCID = "0000-0003-2392-6140")),
|
||||
person("Jan", "Stenkamp", role = c("ctb", "cre"), email = "jan.stenkamp@uni-muenster.de"))
|
||||
Description: Download environmental measurements and sensor station metadata
|
||||
from the API of open data sensor web platform <https://opensensemap.org> for
|
||||
analysis in R.
|
||||
|
@ -35,8 +41,8 @@ Description: Download environmental measurements and sensor station metadata
|
|||
phenomena.
|
||||
The package aims to be compatible with 'sf' and the 'Tidyverse', and provides
|
||||
several helper functions for data exploration and transformation.
|
||||
License: GPL (>= 2) | file LICENSE
|
||||
License: GPL (>= 2)
|
||||
Encoding: UTF-8
|
||||
LazyData: true
|
||||
RoxygenNote: 6.0.1
|
||||
RoxygenNote: 7.2.3
|
||||
VignetteBuilder: knitr
|
||||
|
|
339
LICENSE
339
LICENSE
|
@ -1,339 +0,0 @@
|
|||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 2, June 1991
|
||||
|
||||
Copyright (C) 1989, 1991 Free Software Foundation, Inc., <http://fsf.org/>
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The licenses for most software are designed to take away your
|
||||
freedom to share and change it. By contrast, the GNU General Public
|
||||
License is intended to guarantee your freedom to share and change free
|
||||
software--to make sure the software is free for all its users. This
|
||||
General Public License applies to most of the Free Software
|
||||
Foundation's software and to any other program whose authors commit to
|
||||
using it. (Some other Free Software Foundation software is covered by
|
||||
the GNU Lesser General Public License instead.) You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
this service if you wish), that you receive source code or can get it
|
||||
if you want it, that you can change the software or use pieces of it
|
||||
in new free programs; and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to make restrictions that forbid
|
||||
anyone to deny you these rights or to ask you to surrender the rights.
|
||||
These restrictions translate to certain responsibilities for you if you
|
||||
distribute copies of the software, or if you modify it.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must give the recipients all the rights that
|
||||
you have. You must make sure that they, too, receive or can get the
|
||||
source code. And you must show them these terms so they know their
|
||||
rights.
|
||||
|
||||
We protect your rights with two steps: (1) copyright the software, and
|
||||
(2) offer you this license which gives you legal permission to copy,
|
||||
distribute and/or modify the software.
|
||||
|
||||
Also, for each author's protection and ours, we want to make certain
|
||||
that everyone understands that there is no warranty for this free
|
||||
software. If the software is modified by someone else and passed on, we
|
||||
want its recipients to know that what they have is not the original, so
|
||||
that any problems introduced by others will not reflect on the original
|
||||
authors' reputations.
|
||||
|
||||
Finally, any free program is threatened constantly by software
|
||||
patents. We wish to avoid the danger that redistributors of a free
|
||||
program will individually obtain patent licenses, in effect making the
|
||||
program proprietary. To prevent this, we have made it clear that any
|
||||
patent must be licensed for everyone's free use or not licensed at all.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. This License applies to any program or other work which contains
|
||||
a notice placed by the copyright holder saying it may be distributed
|
||||
under the terms of this General Public License. The "Program", below,
|
||||
refers to any such program or work, and a "work based on the Program"
|
||||
means either the Program or any derivative work under copyright law:
|
||||
that is to say, a work containing the Program or a portion of it,
|
||||
either verbatim or with modifications and/or translated into another
|
||||
language. (Hereinafter, translation is included without limitation in
|
||||
the term "modification".) Each licensee is addressed as "you".
|
||||
|
||||
Activities other than copying, distribution and modification are not
|
||||
covered by this License; they are outside its scope. The act of
|
||||
running the Program is not restricted, and the output from the Program
|
||||
is covered only if its contents constitute a work based on the
|
||||
Program (independent of having been made by running the Program).
|
||||
Whether that is true depends on what the Program does.
|
||||
|
||||
1. You may copy and distribute verbatim copies of the Program's
|
||||
source code as you receive it, in any medium, provided that you
|
||||
conspicuously and appropriately publish on each copy an appropriate
|
||||
copyright notice and disclaimer of warranty; keep intact all the
|
||||
notices that refer to this License and to the absence of any warranty;
|
||||
and give any other recipients of the Program a copy of this License
|
||||
along with the Program.
|
||||
|
||||
You may charge a fee for the physical act of transferring a copy, and
|
||||
you may at your option offer warranty protection in exchange for a fee.
|
||||
|
||||
2. You may modify your copy or copies of the Program or any portion
|
||||
of it, thus forming a work based on the Program, and copy and
|
||||
distribute such modifications or work under the terms of Section 1
|
||||
above, provided that you also meet all of these conditions:
|
||||
|
||||
a) You must cause the modified files to carry prominent notices
|
||||
stating that you changed the files and the date of any change.
|
||||
|
||||
b) You must cause any work that you distribute or publish, that in
|
||||
whole or in part contains or is derived from the Program or any
|
||||
part thereof, to be licensed as a whole at no charge to all third
|
||||
parties under the terms of this License.
|
||||
|
||||
c) If the modified program normally reads commands interactively
|
||||
when run, you must cause it, when started running for such
|
||||
interactive use in the most ordinary way, to print or display an
|
||||
announcement including an appropriate copyright notice and a
|
||||
notice that there is no warranty (or else, saying that you provide
|
||||
a warranty) and that users may redistribute the program under
|
||||
these conditions, and telling the user how to view a copy of this
|
||||
License. (Exception: if the Program itself is interactive but
|
||||
does not normally print such an announcement, your work based on
|
||||
the Program is not required to print an announcement.)
|
||||
|
||||
These requirements apply to the modified work as a whole. If
|
||||
identifiable sections of that work are not derived from the Program,
|
||||
and can be reasonably considered independent and separate works in
|
||||
themselves, then this License, and its terms, do not apply to those
|
||||
sections when you distribute them as separate works. But when you
|
||||
distribute the same sections as part of a whole which is a work based
|
||||
on the Program, the distribution of the whole must be on the terms of
|
||||
this License, whose permissions for other licensees extend to the
|
||||
entire whole, and thus to each and every part regardless of who wrote it.
|
||||
|
||||
Thus, it is not the intent of this section to claim rights or contest
|
||||
your rights to work written entirely by you; rather, the intent is to
|
||||
exercise the right to control the distribution of derivative or
|
||||
collective works based on the Program.
|
||||
|
||||
In addition, mere aggregation of another work not based on the Program
|
||||
with the Program (or with a work based on the Program) on a volume of
|
||||
a storage or distribution medium does not bring the other work under
|
||||
the scope of this License.
|
||||
|
||||
3. You may copy and distribute the Program (or a work based on it,
|
||||
under Section 2) in object code or executable form under the terms of
|
||||
Sections 1 and 2 above provided that you also do one of the following:
|
||||
|
||||
a) Accompany it with the complete corresponding machine-readable
|
||||
source code, which must be distributed under the terms of Sections
|
||||
1 and 2 above on a medium customarily used for software interchange; or,
|
||||
|
||||
b) Accompany it with a written offer, valid for at least three
|
||||
years, to give any third party, for a charge no more than your
|
||||
cost of physically performing source distribution, a complete
|
||||
machine-readable copy of the corresponding source code, to be
|
||||
distributed under the terms of Sections 1 and 2 above on a medium
|
||||
customarily used for software interchange; or,
|
||||
|
||||
c) Accompany it with the information you received as to the offer
|
||||
to distribute corresponding source code. (This alternative is
|
||||
allowed only for noncommercial distribution and only if you
|
||||
received the program in object code or executable form with such
|
||||
an offer, in accord with Subsection b above.)
|
||||
|
||||
The source code for a work means the preferred form of the work for
|
||||
making modifications to it. For an executable work, complete source
|
||||
code means all the source code for all modules it contains, plus any
|
||||
associated interface definition files, plus the scripts used to
|
||||
control compilation and installation of the executable. However, as a
|
||||
special exception, the source code distributed need not include
|
||||
anything that is normally distributed (in either source or binary
|
||||
form) with the major components (compiler, kernel, and so on) of the
|
||||
operating system on which the executable runs, unless that component
|
||||
itself accompanies the executable.
|
||||
|
||||
If distribution of executable or object code is made by offering
|
||||
access to copy from a designated place, then offering equivalent
|
||||
access to copy the source code from the same place counts as
|
||||
distribution of the source code, even though third parties are not
|
||||
compelled to copy the source along with the object code.
|
||||
|
||||
4. You may not copy, modify, sublicense, or distribute the Program
|
||||
except as expressly provided under this License. Any attempt
|
||||
otherwise to copy, modify, sublicense or distribute the Program is
|
||||
void, and will automatically terminate your rights under this License.
|
||||
However, parties who have received copies, or rights, from you under
|
||||
this License will not have their licenses terminated so long as such
|
||||
parties remain in full compliance.
|
||||
|
||||
5. You are not required to accept this License, since you have not
|
||||
signed it. However, nothing else grants you permission to modify or
|
||||
distribute the Program or its derivative works. These actions are
|
||||
prohibited by law if you do not accept this License. Therefore, by
|
||||
modifying or distributing the Program (or any work based on the
|
||||
Program), you indicate your acceptance of this License to do so, and
|
||||
all its terms and conditions for copying, distributing or modifying
|
||||
the Program or works based on it.
|
||||
|
||||
6. Each time you redistribute the Program (or any work based on the
|
||||
Program), the recipient automatically receives a license from the
|
||||
original licensor to copy, distribute or modify the Program subject to
|
||||
these terms and conditions. You may not impose any further
|
||||
restrictions on the recipients' exercise of the rights granted herein.
|
||||
You are not responsible for enforcing compliance by third parties to
|
||||
this License.
|
||||
|
||||
7. If, as a consequence of a court judgment or allegation of patent
|
||||
infringement or for any other reason (not limited to patent issues),
|
||||
conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot
|
||||
distribute so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you
|
||||
may not distribute the Program at all. For example, if a patent
|
||||
license would not permit royalty-free redistribution of the Program by
|
||||
all those who receive copies directly or indirectly through you, then
|
||||
the only way you could satisfy both it and this License would be to
|
||||
refrain entirely from distribution of the Program.
|
||||
|
||||
If any portion of this section is held invalid or unenforceable under
|
||||
any particular circumstance, the balance of the section is intended to
|
||||
apply and the section as a whole is intended to apply in other
|
||||
circumstances.
|
||||
|
||||
It is not the purpose of this section to induce you to infringe any
|
||||
patents or other property right claims or to contest validity of any
|
||||
such claims; this section has the sole purpose of protecting the
|
||||
integrity of the free software distribution system, which is
|
||||
implemented by public license practices. Many people have made
|
||||
generous contributions to the wide range of software distributed
|
||||
through that system in reliance on consistent application of that
|
||||
system; it is up to the author/donor to decide if he or she is willing
|
||||
to distribute software through any other system and a licensee cannot
|
||||
impose that choice.
|
||||
|
||||
This section is intended to make thoroughly clear what is believed to
|
||||
be a consequence of the rest of this License.
|
||||
|
||||
8. If the distribution and/or use of the Program is restricted in
|
||||
certain countries either by patents or by copyrighted interfaces, the
|
||||
original copyright holder who places the Program under this License
|
||||
may add an explicit geographical distribution limitation excluding
|
||||
those countries, so that distribution is permitted only in or among
|
||||
countries not thus excluded. In such case, this License incorporates
|
||||
the limitation as if written in the body of this License.
|
||||
|
||||
9. The Free Software Foundation may publish revised and/or new versions
|
||||
of the General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Program
|
||||
specifies a version number of this License which applies to it and "any
|
||||
later version", you have the option of following the terms and conditions
|
||||
either of that version or of any later version published by the Free
|
||||
Software Foundation. If the Program does not specify a version number of
|
||||
this License, you may choose any version ever published by the Free Software
|
||||
Foundation.
|
||||
|
||||
10. If you wish to incorporate parts of the Program into other free
|
||||
programs whose distribution conditions are different, write to the author
|
||||
to ask for permission. For software which is copyrighted by the Free
|
||||
Software Foundation, write to the Free Software Foundation; we sometimes
|
||||
make exceptions for this. Our decision will be guided by the two goals
|
||||
of preserving the free status of all derivatives of our free software and
|
||||
of promoting the sharing and reuse of software generally.
|
||||
|
||||
NO WARRANTY
|
||||
|
||||
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
|
||||
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
|
||||
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
|
||||
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
|
||||
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
|
||||
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
|
||||
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
|
||||
REPAIR OR CORRECTION.
|
||||
|
||||
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
|
||||
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
|
||||
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
|
||||
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
|
||||
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
|
||||
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
|
||||
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGES.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
convey the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
{description}
|
||||
Copyright (C) {year} {fullname}
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License along
|
||||
with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program is interactive, make it output a short notice like this
|
||||
when it starts in an interactive mode:
|
||||
|
||||
Gnomovision version 69, Copyright (C) year name of author
|
||||
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, the commands you use may
|
||||
be called something other than `show w' and `show c'; they could even be
|
||||
mouse-clicks or menu items--whatever suits your program.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or your
|
||||
school, if any, to sign a "copyright disclaimer" for the program, if
|
||||
necessary. Here is a sample; alter the names:
|
||||
|
||||
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
|
||||
`Gnomovision' (which makes passes at compilers) written by James Hacker.
|
||||
|
||||
{signature of Ty Coon}, 1 April 1989
|
||||
Ty Coon, President of Vice
|
||||
|
||||
This General Public License does not permit incorporating your program into
|
||||
proprietary programs. If your program is a subroutine library, you may
|
||||
consider it more useful to permit linking proprietary applications with the
|
||||
library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License.
|
|
@ -5,16 +5,14 @@ S3method("[",sensebox)
|
|||
S3method(osem_measurements,bbox)
|
||||
S3method(osem_measurements,default)
|
||||
S3method(osem_measurements,sensebox)
|
||||
S3method(osem_measurements_archive,default)
|
||||
S3method(osem_measurements_archive,sensebox)
|
||||
S3method(osem_phenomena,sensebox)
|
||||
S3method(plot,osem_measurements)
|
||||
S3method(plot,sensebox)
|
||||
S3method(print,osem_measurements)
|
||||
S3method(print,sensebox)
|
||||
S3method(summary,sensebox)
|
||||
export(filter.osem_measurements)
|
||||
export(filter.sensebox)
|
||||
export(mutate.osem_measurements)
|
||||
export(mutate.sensebox)
|
||||
export(osem_as_measurements)
|
||||
export(osem_as_sensebox)
|
||||
export(osem_box)
|
||||
|
@ -23,9 +21,8 @@ export(osem_clear_cache)
|
|||
export(osem_counts)
|
||||
export(osem_endpoint)
|
||||
export(osem_measurements)
|
||||
export(osem_measurements_archive)
|
||||
export(osem_phenomena)
|
||||
export(st_as_sf.osem_measurements)
|
||||
export(st_as_sf.sensebox)
|
||||
importFrom(graphics,legend)
|
||||
importFrom(graphics,par)
|
||||
importFrom(graphics,plot)
|
||||
|
|
|
@ -1,6 +1,44 @@
|
|||
# opensensmapr changelog
|
||||
This project does its best to adhere to semantic versioning.
|
||||
|
||||
### 2023-03-06: v0.6.0
|
||||
- fix package bugs to pass CRAN tests after 4 years of maintenance break
|
||||
- updated hyperlinks
|
||||
- don't throw error for empty sensors
|
||||
- updated tests
|
||||
- updated maintainer
|
||||
- updated vignettes
|
||||
- use precomputed data to create vignettes
|
||||
- change archive url to 'https://archive.opensensemap.org/' and checking its availability before requesting data
|
||||
- new features:
|
||||
- added param bbox for osem_boxes function
|
||||
- support of multiple grouptags
|
||||
|
||||
### 2019-02-09: v0.5.1
|
||||
- fix package to work with API v6
|
||||
- box$lastMeasurement may be missing now for long inactive boxes
|
||||
- add tests
|
||||
|
||||
### 2018-10-20: v0.5.0
|
||||
- fix dynamic method export
|
||||
- add `osem_measurements_archive()` to fetch measurements from the archive (#23)
|
||||
- add `box$sensors` containing a data.frame with sensor metadata
|
||||
- add sensor-IDs to `box$phenomena`
|
||||
|
||||
### 2018-09-21: v0.4.3
|
||||
- dynamically export S3 methods of foreign generics
|
||||
for compatibility with upcoming R 3.6.0
|
||||
- add `readr` as default dependency
|
||||
|
||||
### 2018-09-05: v0.4.2
|
||||
- move to sensebox GitHub organization
|
||||
- pass ... to plot.sensebox()
|
||||
|
||||
### 2018-06-07: v0.4.1
|
||||
- fix `osem_as_measurements()` returning wrong classes
|
||||
- improve vignettes
|
||||
- be on CRAN eventually.. hopefully??
|
||||
|
||||
### 2018-05-25: v0.4.0
|
||||
- add caching feature for requests; see vignette osem-serialization
|
||||
- add vignette osem-serialization
|
||||
|
@ -38,7 +76,7 @@ This project does its best to adhere to semantic versioning.
|
|||
### 2017-08-23: v0.2.0
|
||||
- add auto paging for `osem_measurements()`, allowing data retrieval for arbitrary time intervals (#2)
|
||||
- improve plots for `osem_measurements` & `sensebox` (#1)
|
||||
- add `sensorId` & `unit` colummn to `get_measurements()` output by default
|
||||
- add `sensorId` & `unit` column to `get_measurements()` output by default
|
||||
- show download progress info, hide readr output
|
||||
- shorten vignette `osem-intro`
|
||||
|
25
R/00utils.R
25
R/00utils.R
|
@ -1,13 +1,13 @@
|
|||
# parses from/to params for get_measurements_ and get_boxes_
|
||||
parse_dateparams = function (from, to) {
|
||||
from = utc_date(from)
|
||||
to = utc_date(to)
|
||||
from = date_as_utc(from)
|
||||
to = date_as_utc(to)
|
||||
if (from - to > 0) stop('"from" must be earlier than "to"')
|
||||
c(date_as_isostring(from), date_as_isostring(to))
|
||||
}
|
||||
|
||||
# NOTE: cannot handle mixed vectors of POSIXlt and POSIXct
|
||||
utc_date = function (date) {
|
||||
date_as_utc = function (date) {
|
||||
time = as.POSIXct(date)
|
||||
attr(time, 'tzone') = 'UTC'
|
||||
time
|
||||
|
@ -16,14 +16,7 @@ utc_date = function (date) {
|
|||
# NOTE: cannot handle mixed vectors of POSIXlt and POSIXct
|
||||
date_as_isostring = function (date) format.Date(date, format = '%FT%TZ')
|
||||
|
||||
#' Simple factory function meant to implement dplyr functions for other classes,
|
||||
#' which call an callback to attach the original class again after the fact.
|
||||
#'
|
||||
#' @param callback The function to call after the dplyr function
|
||||
#' @noRd
|
||||
dplyr_class_wrapper = function(callback) {
|
||||
function(.data, ..., .dots) callback(NextMethod())
|
||||
}
|
||||
isostring_as_date = function (x) as.POSIXct(strptime(x, format = '%FT%T', tz = 'GMT'))
|
||||
|
||||
#' Checks for an interactive session using interactive() and a knitr process in
|
||||
#' the callstack. See https://stackoverflow.com/a/33108841
|
||||
|
@ -33,3 +26,13 @@ is_non_interactive = function () {
|
|||
ff = sapply(sys.calls(), function(f) as.character(f[1]))
|
||||
any(ff %in% c('knit2html', 'render')) || !interactive()
|
||||
}
|
||||
|
||||
#' custom recursive lapply with better handling of NULL values
|
||||
#' from https://stackoverflow.com/a/38950304
|
||||
#' @noRd
|
||||
recursive_lapply = function(x, fn) {
|
||||
if (is.list(x))
|
||||
lapply(x, recursive_lapply, fn)
|
||||
else
|
||||
fn(x)
|
||||
}
|
||||
|
|
47
R/api.R
47
R/api.R
|
@ -4,11 +4,32 @@
|
|||
# for CSV responses (get_measurements) the readr package is a hidden dependency
|
||||
# ==============================================================================
|
||||
|
||||
default_api = 'https://api.opensensemap.org'
|
||||
|
||||
#' Get the default openSenseMap API endpoint
|
||||
#' @export
|
||||
#' @return A character string with the HTTP URL of the openSenseMap API
|
||||
osem_endpoint = function() {
|
||||
'https://api.opensensemap.org'
|
||||
osem_endpoint = function() default_api
|
||||
|
||||
#' Check if the given openSenseMap API endpoint is available
|
||||
#' @param endpoint The API base URL to check, defaulting to \code{\link{osem_endpoint}}
|
||||
#' @return \code{TRUE} if the API is available, otherwise \code{stop()} is called.
|
||||
osem_ensure_api_available = function(endpoint = osem_endpoint()) {
|
||||
code = FALSE
|
||||
try({
|
||||
code = httr::status_code(httr::GET(endpoint, path='stats'))
|
||||
}, silent = TRUE)
|
||||
|
||||
if (code == 200)
|
||||
return(TRUE)
|
||||
|
||||
errtext = paste('The API at', endpoint, 'is currently not available.')
|
||||
if (code != FALSE)
|
||||
errtext = paste0(errtext, ' (HTTP code ', code, ')')
|
||||
if (endpoint == default_api)
|
||||
errtext = c(errtext, 'If the issue persists, please check back at https://status.sensebox.de/778247404 and notify support@sensebox.de')
|
||||
stop(paste(errtext, collapse='\n '), call. = FALSE)
|
||||
FALSE
|
||||
}
|
||||
|
||||
get_boxes_ = function (..., endpoint) {
|
||||
|
@ -24,8 +45,9 @@ get_boxes_ = function (..., endpoint) {
|
|||
df = dplyr::bind_rows(boxesList)
|
||||
df$exposure = df$exposure %>% as.factor()
|
||||
df$model = df$model %>% as.factor()
|
||||
if (!is.null(df$grouptag))
|
||||
if (!is.null(df$grouptag)){
|
||||
df$grouptag = df$grouptag %>% as.factor()
|
||||
}
|
||||
df
|
||||
}
|
||||
|
||||
|
@ -34,12 +56,10 @@ get_box_ = function (boxId, endpoint, ...) {
|
|||
parse_senseboxdata()
|
||||
}
|
||||
|
||||
get_measurements_ = function (..., endpoint) {
|
||||
result = osem_get_resource(endpoint, c('boxes', 'data'), ..., type = 'text')
|
||||
|
||||
parse_measurement_csv = function (resText) {
|
||||
# parse the CSV response manually & mute readr
|
||||
suppressWarnings({
|
||||
result = readr::read_csv(result, col_types = readr::cols(
|
||||
result = readr::read_csv(resText, col_types = readr::cols(
|
||||
# factor as default would raise issues with concatenation of multiple requests
|
||||
.default = readr::col_character(),
|
||||
createdAt = readr::col_datetime(),
|
||||
|
@ -51,6 +71,11 @@ get_measurements_ = function (..., endpoint) {
|
|||
})
|
||||
|
||||
osem_as_measurements(result)
|
||||
}
|
||||
|
||||
get_measurements_ = function (..., endpoint) {
|
||||
osem_get_resource(endpoint, c('boxes', 'data'), ..., type = 'text') %>%
|
||||
parse_measurement_csv
|
||||
}
|
||||
|
||||
get_stats_ = function (endpoint, cache) {
|
||||
|
@ -69,7 +94,7 @@ get_stats_ = function (endpoint, cache) {
|
|||
#' @param cache Optional path to a directory were responses will be cached. If not NA, no requests will be made when a request for the given is already cached.
|
||||
#' @return Result of a Request to openSenseMap API
|
||||
#' @noRd
|
||||
osem_get_resource = function (host, path, ..., type = 'parsed', progress = T, cache = NA) {
|
||||
osem_get_resource = function (host, path, ..., type = 'parsed', progress = TRUE, cache = NA) {
|
||||
query = list(...)
|
||||
if (!is.na(cache)) {
|
||||
filename = osem_cache_filename(path, query, host) %>% paste(cache, ., sep = '/')
|
||||
|
@ -96,11 +121,12 @@ osem_cache_filename = function (path, query = list(), host = osem_endpoint()) {
|
|||
#'
|
||||
#' @export
|
||||
#' @examples
|
||||
#' \donttest{
|
||||
#' \dontrun{
|
||||
#' osem_boxes(cache = tempdir())
|
||||
#' osem_clear_cache()
|
||||
#'
|
||||
#' cachedir = paste(getwd(), 'osemcache', sep = '/')
|
||||
#' dir.create(file.path(cachedir), showWarnings = FALSE)
|
||||
#' osem_boxes(cache = cachedir)
|
||||
#' osem_clear_cache(cachedir)
|
||||
#' }
|
||||
|
@ -112,6 +138,9 @@ osem_clear_cache = function (location = tempdir()) {
|
|||
}
|
||||
|
||||
osem_request_ = function (host, path, query = list(), type = 'parsed', progress = TRUE) {
|
||||
# stop() if API is not available
|
||||
osem_ensure_api_available(host)
|
||||
|
||||
progress = if (progress && !is_non_interactive()) httr::progress() else NULL
|
||||
res = httr::GET(host, progress, path = path, query = query)
|
||||
|
||||
|
|
173
R/archive.R
Normal file
173
R/archive.R
Normal file
|
@ -0,0 +1,173 @@
|
|||
# client for archive.opensensemap.org
|
||||
# in this archive, CSV files for measurements of each sensor per day is provided.
|
||||
|
||||
default_archive_url = 'https://archive.opensensemap.org/'
|
||||
|
||||
#' Returns the default endpoint for the archive *download*
|
||||
#' While the front end domain is archive.opensensemap.org, file downloads
|
||||
#' are provided via sciebo.
|
||||
osem_archive_endpoint = function () default_archive_url
|
||||
|
||||
#' Fetch day-wise measurements for a single box from the openSenseMap archive.
|
||||
#'
|
||||
#' This function is significantly faster than \code{\link{osem_measurements}} for large
|
||||
#' time-frames, as daily CSV dumps for each sensor from
|
||||
#' \href{https://archive.opensensemap.org}{archive.opensensemap.org} are used.
|
||||
#' Note that the latest data available is from the previous day.
|
||||
#'
|
||||
#' By default, data for all sensors of a box is fetched, but you can select a
|
||||
#' subset with a \code{\link[dplyr]{dplyr}}-style NSE filter expression.
|
||||
#'
|
||||
#' The function will warn when no data is available in the selected period,
|
||||
#' but continue the remaining download.
|
||||
#'
|
||||
#' @param x A `sensebox data.frame` of a single box, as retrieved via \code{\link{osem_box}},
|
||||
#' to download measurements for.
|
||||
#' @param ... see parameters below
|
||||
#' @param fromDate Start date for measurement download, must be convertable via `as.Date`.
|
||||
#' @param toDate End date for measurement download (inclusive).
|
||||
#' @param sensorFilter A NSE formula matching to \code{x$sensors}, selecting a subset of sensors.
|
||||
#' @param progress Whether to print download progress information, defaults to \code{TRUE}.
|
||||
#' @return A \code{tbl_df} containing observations of all selected sensors for each time stamp.
|
||||
#'
|
||||
#' @seealso \href{https://archive.opensensemap.org}{openSenseMap archive}
|
||||
#' @seealso \code{\link{osem_measurements}}
|
||||
#' @seealso \code{\link{osem_box}}
|
||||
#'
|
||||
#' @export
|
||||
osem_measurements_archive = function (x, ...) UseMethod('osem_measurements_archive')
|
||||
|
||||
#' @export
|
||||
osem_measurements_archive.default = function (x, ...) {
|
||||
# NOTE: to implement for a different class:
|
||||
# in order to call `archive_fetch_measurements()`, `box` must be a dataframe
|
||||
# with a single row and the columns `X_id` and `name`
|
||||
stop(paste('not implemented for class', toString(class(x))))
|
||||
}
|
||||
|
||||
|
||||
# ==============================================================================
|
||||
#
|
||||
#' @describeIn osem_measurements_archive Get daywise measurements for one or more sensors of a single box.
|
||||
#' @export
|
||||
#' @examples
|
||||
#' \donttest{
|
||||
#' # fetch measurements for a single day
|
||||
#' box = osem_box('593bcd656ccf3b0011791f5a')
|
||||
#' m = osem_measurements_archive(box, as.POSIXlt('2018-09-13'))
|
||||
#'
|
||||
#' # fetch measurements for a date range and selected sensors
|
||||
#' sensors = ~ phenomenon %in% c('Temperatur', 'Beleuchtungsstärke')
|
||||
#' m = osem_measurements_archive(
|
||||
#' box,
|
||||
#' as.POSIXlt('2018-09-01'), as.POSIXlt('2018-09-30'),
|
||||
#' sensorFilter = sensors
|
||||
#' )
|
||||
#' }
|
||||
osem_measurements_archive.sensebox = function (x, fromDate, toDate = fromDate, sensorFilter = ~ TRUE, ..., progress = TRUE) {
|
||||
if (nrow(x) != 1)
|
||||
stop('this function only works for exactly one senseBox!')
|
||||
|
||||
# filter sensors using NSE, for example: `~ phenomenon == 'Temperatur'`
|
||||
sensors = x$sensors[[1]] %>%
|
||||
dplyr::filter(lazyeval::f_eval(sensorFilter, .))
|
||||
|
||||
# fetch each sensor separately
|
||||
dfs = by(sensors, 1:nrow(sensors), function (sensor) {
|
||||
df = archive_fetch_measurements(x, sensor$id, fromDate, toDate, progress) %>%
|
||||
dplyr::select(createdAt, value) %>%
|
||||
#dplyr::mutate(unit = sensor$unit, sensor = sensor$sensor) %>% # inject sensor metadata
|
||||
dplyr::rename_at(., 'value', function(v) sensor$phenomenon)
|
||||
})
|
||||
|
||||
# merge all data.frames by timestamp
|
||||
dfs %>% purrr::reduce(dplyr::full_join, 'createdAt')
|
||||
}
|
||||
|
||||
#' fetch measurements from archive from a single box, and a single sensor
|
||||
#'
|
||||
#' @param box A sensebox data.frame with a single box
|
||||
#' @param sensorId Character specifying the sensor
|
||||
#' @param fromDate Start date for measurement download, must be convertable via `as.Date`.
|
||||
#' @param toDate End date for measurement download (inclusive).
|
||||
#' @param progress whether to print progress
|
||||
#' @return A \code{tbl_df} containing observations of all selected sensors for each time stamp.
|
||||
archive_fetch_measurements = function (box, sensorId, fromDate, toDate, progress) {
|
||||
osem_ensure_archive_available()
|
||||
|
||||
dates = list()
|
||||
from = fromDate
|
||||
while (from <= toDate) {
|
||||
dates = append(dates, list(from))
|
||||
from = from + as.difftime(1, units = 'days')
|
||||
}
|
||||
|
||||
http_handle = httr::handle(osem_archive_endpoint()) # reuse the http connection for speed!
|
||||
progress = if (progress && !is_non_interactive()) httr::progress() else NULL
|
||||
|
||||
measurements = lapply(dates, function(date) {
|
||||
url = build_archive_url(date, box, sensorId)
|
||||
res = httr::GET(url, progress, handle = http_handle)
|
||||
|
||||
if (httr::http_error(res)) {
|
||||
warning(paste(
|
||||
httr::status_code(res),
|
||||
'on day', format.Date(date, '%F'),
|
||||
'for sensor', sensorId
|
||||
))
|
||||
|
||||
if (httr::status_code(res) == 404)
|
||||
return(data.frame(createdAt = as.POSIXlt(x = integer(0), origin = date), value = double()))
|
||||
}
|
||||
|
||||
measurements = httr::content(res, type = 'text', encoding = 'UTF-8') %>%
|
||||
parse_measurement_csv
|
||||
})
|
||||
|
||||
measurements %>% dplyr::bind_rows()
|
||||
}
|
||||
|
||||
#' returns URL to fetch measurements from a sensor for a specific date,
|
||||
#' based on `osem_archive_endpoint()`
|
||||
#' @noRd
|
||||
build_archive_url = function (date, box, sensorId) {
|
||||
d = format.Date(date, '%F')
|
||||
format = 'csv'
|
||||
|
||||
paste(
|
||||
osem_archive_endpoint(),
|
||||
d,
|
||||
osem_box_to_archivename(box),
|
||||
paste(paste(sensorId, d, sep = '-'), format, sep = '.'),
|
||||
sep = '/'
|
||||
)
|
||||
}
|
||||
|
||||
#' replace chars in box name according to archive script:
|
||||
#' https://github.com/sensebox/osem-archiver/blob/612e14b/helpers.sh#L66
|
||||
#'
|
||||
#' @param box A sensebox data.frame
|
||||
#' @return character with archive identifier for each box
|
||||
osem_box_to_archivename = function (box) {
|
||||
name = gsub('[^A-Za-z0-9._-]', '_', box$name)
|
||||
paste(box$X_id, name, sep = '-')
|
||||
}
|
||||
|
||||
#' Check if the given openSenseMap archive endpoint is available
|
||||
#' @param endpoint The archive base URL to check, defaulting to \code{\link{osem_archive_endpoint}}
|
||||
#' @return \code{TRUE} if the archive is available, otherwise \code{stop()} is called.
|
||||
osem_ensure_archive_available = function(endpoint = osem_archive_endpoint()) {
|
||||
code = FALSE
|
||||
try({
|
||||
code = httr::status_code(httr::GET(endpoint))
|
||||
}, silent = TRUE)
|
||||
|
||||
if (code == 200)
|
||||
return(TRUE)
|
||||
|
||||
errtext = paste('The archive at', endpoint, 'is currently not available.')
|
||||
if (code != FALSE)
|
||||
errtext = paste0(errtext, ' (HTTP code ', code, ')')
|
||||
stop(paste(errtext, collapse='\n '), call. = FALSE)
|
||||
FALSE
|
||||
}
|
74
R/box.R
74
R/box.R
|
@ -18,6 +18,10 @@
|
|||
#' @param to Only return boxes that were measuring earlier than this time
|
||||
#' @param phenomenon Only return boxes that measured the given phenomenon in the
|
||||
#' time interval as specified through \code{date} or \code{from / to}
|
||||
#' @param bbox Only return boxes that are within the given boundingbox,
|
||||
#' vector of 4 WGS84 coordinates.
|
||||
#' Order is: longitude southwest, latitude southwest, longitude northeast, latitude northeast.
|
||||
#' Minimal and maximal values are: -180, 180 for longitude and -90, 90 for latitude.
|
||||
#' @param endpoint The URL of the openSenseMap API instance
|
||||
#' @param progress Whether to print download progress information, defaults to \code{TRUE}
|
||||
#' @param cache Whether to cache the result, defaults to false.
|
||||
|
@ -33,7 +37,7 @@
|
|||
#' @export
|
||||
#' @examples
|
||||
#'
|
||||
#' \donttest{
|
||||
#' \dontrun{
|
||||
#' # get *all* boxes available on the API
|
||||
#' b = osem_boxes()
|
||||
#'
|
||||
|
@ -67,7 +71,8 @@
|
|||
#' b = osem_boxes(progress = FALSE)
|
||||
#' }
|
||||
osem_boxes = function (exposure = NA, model = NA, grouptag = NA,
|
||||
date = NA, from = NA, to = NA, phenomenon = NA,
|
||||
date = NA, from = NA, to = NA, phenomenon = NA,
|
||||
bbox = NA,
|
||||
endpoint = osem_endpoint(),
|
||||
progress = TRUE,
|
||||
cache = NA) {
|
||||
|
@ -93,11 +98,13 @@ osem_boxes = function (exposure = NA, model = NA, grouptag = NA,
|
|||
if (!is.na(model)) query$model = model
|
||||
if (!is.na(grouptag)) query$grouptag = grouptag
|
||||
if (!is.na(phenomenon)) query$phenomenon = phenomenon
|
||||
if (all(!is.na(bbox))) query$bbox = paste(bbox, collapse = ', ')
|
||||
|
||||
if (!is.na(to) && !is.na(from))
|
||||
query$date = parse_dateparams(from, to) %>% paste(collapse = ',')
|
||||
else if (!is.na(date))
|
||||
query$date = utc_date(date) %>% date_as_isostring()
|
||||
query$date = date_as_utc(date) %>% date_as_isostring()
|
||||
|
||||
|
||||
do.call(get_boxes_, query)
|
||||
}
|
||||
|
@ -118,7 +125,7 @@ osem_boxes = function (exposure = NA, model = NA, grouptag = NA,
|
|||
#' @seealso \code{\link{osem_clear_cache}}
|
||||
#' @export
|
||||
#' @examples
|
||||
#' \donttest{
|
||||
#' \dontrun{
|
||||
#' # get a specific box by ID
|
||||
#' b = osem_box('57000b8745fd40c8196ad04c')
|
||||
#'
|
||||
|
@ -147,32 +154,63 @@ parse_senseboxdata = function (boxdata) {
|
|||
# to allow a simple data.frame structure
|
||||
sensors = boxdata$sensors
|
||||
location = boxdata$currentLocation
|
||||
boxdata[c('loc', 'locations', 'currentLocation', 'sensors', 'image', 'boxType')] = NULL
|
||||
thebox = as.data.frame(boxdata, stringsAsFactors = F)
|
||||
lastMeasurement = boxdata$lastMeasurementAt # rename for backwards compat < 0.5.1
|
||||
grouptags = boxdata$grouptag
|
||||
boxdata[c(
|
||||
'loc', 'locations', 'currentLocation', 'sensors', 'image', 'boxType', 'lastMeasurementAt', 'grouptag'
|
||||
)] = NULL
|
||||
thebox = as.data.frame(boxdata, stringsAsFactors = FALSE)
|
||||
|
||||
# parse timestamps (updatedAt might be not defined)
|
||||
thebox$createdAt = as.POSIXct(strptime(thebox$createdAt, format = '%FT%T', tz = 'GMT'))
|
||||
thebox$createdAt = isostring_as_date(thebox$createdAt)
|
||||
if (!is.null(thebox$updatedAt))
|
||||
thebox$updatedAt = as.POSIXct(strptime(thebox$updatedAt, format = '%FT%T', tz = 'GMT'))
|
||||
thebox$updatedAt = isostring_as_date(thebox$updatedAt)
|
||||
if (!is.null(lastMeasurement))
|
||||
thebox$lastMeasurement = isostring_as_date(lastMeasurement)
|
||||
|
||||
# add empty sensortype to sensors without type
|
||||
if(!('sensorType' %in% names(sensors[[1]]))) {
|
||||
sensors[[1]]$sensorType <- NA
|
||||
}
|
||||
|
||||
# create a dataframe of sensors
|
||||
thebox$sensors = sensors %>%
|
||||
recursive_lapply(function (x) if (is.null(x)) NA else x) %>% # replace NULLs with NA
|
||||
lapply(as.data.frame, stringsAsFactors = FALSE) %>%
|
||||
dplyr::bind_rows(.) %>%
|
||||
dplyr::select(phenomenon = title, id = X_id, unit, sensor = sensorType) %>%
|
||||
list
|
||||
|
||||
# extract metadata from sensors
|
||||
thebox$phenomena = lapply(sensors, function(s) s$title) %>% unlist %>% list
|
||||
|
||||
# FIXME: if one sensor has NA, max() returns bullshit
|
||||
get_last_measurement = function(s) {
|
||||
if (!is.null(s$lastMeasurement))
|
||||
as.POSIXct(strptime(s$lastMeasurement$createdAt, format = '%FT%T', tz = 'GMT'))
|
||||
else
|
||||
NA
|
||||
}
|
||||
thebox$lastMeasurement = max(lapply(sensors, get_last_measurement)[[1]])
|
||||
thebox$phenomena = sensors %>%
|
||||
stats::setNames(lapply(., function (s) s$`_id`)) %>%
|
||||
lapply(function(s) s$title) %>%
|
||||
unlist %>% list # convert to vector
|
||||
|
||||
# extract coordinates & transform to simple feature object
|
||||
thebox$lon = location$coordinates[[1]]
|
||||
thebox$lat = location$coordinates[[2]]
|
||||
thebox$locationtimestamp = isostring_as_date(location$timestamp)
|
||||
if (length(location$coordinates) == 3)
|
||||
thebox$height = location$coordinates[[3]]
|
||||
|
||||
# extract grouptag(s) from box
|
||||
if (length(grouptags) == 0)
|
||||
thebox$grouptag = NULL
|
||||
if (length(grouptags) > 0) {
|
||||
# if box does not have grouptag dont set attribute
|
||||
if(grouptags[[1]] == '') {
|
||||
thebox$grouptag = NULL
|
||||
}
|
||||
else {
|
||||
thebox$grouptag = grouptags[[1]]
|
||||
}
|
||||
}
|
||||
if (length(grouptags) > 1)
|
||||
thebox$grouptag2 = grouptags[[2]]
|
||||
if (length(grouptags) > 2)
|
||||
thebox$grouptag3 = grouptags[[3]]
|
||||
|
||||
# attach a custom class for methods
|
||||
osem_as_sensebox(thebox)
|
||||
}
|
||||
|
|
|
@ -1,14 +1,5 @@
|
|||
#' @export
|
||||
plot.sensebox = function (x, ..., mar = c(2, 2, 1, 1)) {
|
||||
if (
|
||||
!requireNamespace('sf', quietly = TRUE) ||
|
||||
!requireNamespace('maps', quietly = TRUE) ||
|
||||
!requireNamespace('maptools', quietly = TRUE) ||
|
||||
!requireNamespace('rgeos', quietly = TRUE)
|
||||
) {
|
||||
stop('this functions requires additional packages. install them with
|
||||
install.packages(c("sf", "maps", "maptools", "rgeos"))')
|
||||
}
|
||||
|
||||
geom = x %>%
|
||||
sf::st_as_sf() %>%
|
||||
|
@ -20,12 +11,12 @@ plot.sensebox = function (x, ..., mar = c(2, 2, 1, 1)) {
|
|||
sf::st_as_sf() %>%
|
||||
sf::st_geometry()
|
||||
|
||||
oldpar = par()
|
||||
oldpar <- par(no.readonly = TRUE)
|
||||
on.exit(par(oldpar))
|
||||
par(mar = mar)
|
||||
plot(world, col = 'gray', xlim = bbox[c(1, 3)], ylim = bbox[c(2, 4)], axes = T)
|
||||
plot(geom, add = T, col = x$exposure)
|
||||
plot(world, col = 'gray', xlim = bbox[c(1, 3)], ylim = bbox[c(2, 4)], axes = TRUE, ...)
|
||||
plot(geom, add = TRUE, col = x$exposure, ...)
|
||||
legend('left', legend = levels(x$exposure), col = 1:length(x$exposure), pch = 1)
|
||||
par(mar = oldpar$mar)
|
||||
|
||||
invisible(x)
|
||||
}
|
||||
|
@ -39,14 +30,14 @@ print.sensebox = function(x, columns = c('name', 'exposure', 'lastMeasurement',
|
|||
|
||||
#' @export
|
||||
summary.sensebox = function(object, ...) {
|
||||
cat('boxes total:', nrow(object), fill = T)
|
||||
cat('boxes total:', nrow(object), fill = TRUE)
|
||||
cat('\nboxes by exposure:')
|
||||
table(object$exposure) %>% print()
|
||||
cat('\nboxes by model:')
|
||||
table(object$model) %>% print()
|
||||
cat('\n')
|
||||
|
||||
diffNow = (utc_date(Sys.time()) - object$lastMeasurement) %>% as.numeric(unit = 'hours')
|
||||
diffNow = (date_as_utc(Sys.time()) - object$lastMeasurement) %>% as.numeric(unit = 'hours')
|
||||
list(
|
||||
'last_measurement_within' = c(
|
||||
'1h' = nrow(dplyr::filter(object, diffNow <= 1)),
|
||||
|
@ -59,10 +50,10 @@ summary.sensebox = function(object, ...) {
|
|||
|
||||
oldest = object[object$createdAt == min(object$createdAt), ]
|
||||
newest = object[object$createdAt == max(object$createdAt), ]
|
||||
cat('oldest box:', format(oldest$createdAt, '%F %T'), paste0('(', oldest$name, ')'), fill = T)
|
||||
cat('newest box:', format(newest$createdAt, '%F %T'), paste0('(', newest$name, ')'), fill = T)
|
||||
cat('oldest box:', format(oldest$createdAt, '%F %T'), paste0('(', oldest$name, ')'), fill = TRUE)
|
||||
cat('newest box:', format(newest$createdAt, '%F %T'), paste0('(', newest$name, ')'), fill = TRUE)
|
||||
|
||||
cat('\nsensors per box:', fill = T)
|
||||
cat('\nsensors per box:', fill = TRUE)
|
||||
lapply(object$phenomena, length) %>%
|
||||
as.numeric() %>%
|
||||
summary() %>%
|
||||
|
@ -71,52 +62,12 @@ summary.sensebox = function(object, ...) {
|
|||
invisible(object)
|
||||
}
|
||||
|
||||
# ==============================================================================
|
||||
#
|
||||
#' Converts a foreign object to a sensebox data.frame.
|
||||
#' @param x A data.frame to attach the class to
|
||||
#' @return data.frame of class \code{sensebox}
|
||||
#' @export
|
||||
osem_as_sensebox = function(x) {
|
||||
ret = as.data.frame(x)
|
||||
class(ret) = c('sensebox', class(x))
|
||||
ret
|
||||
}
|
||||
|
||||
#' Return rows with matching conditions, while maintaining class & attributes
|
||||
#' @param .data A sensebox data.frame to filter
|
||||
#' @param .dots see corresponding function in package \code{\link{dplyr}}
|
||||
#' @param ... other arguments
|
||||
#' @seealso \code{\link[dplyr]{filter}}
|
||||
#' @export
|
||||
filter.sensebox = dplyr_class_wrapper(osem_as_sensebox)
|
||||
|
||||
#' Add new variables to the data, while maintaining class & attributes
|
||||
#' @param .data A sensebox data.frame to mutate
|
||||
#' @param .dots see corresponding function in package \code{\link{dplyr}}
|
||||
#' @param ... other arguments
|
||||
#' @seealso \code{\link[dplyr]{mutate}}
|
||||
#' @export
|
||||
mutate.sensebox = dplyr_class_wrapper(osem_as_sensebox)
|
||||
|
||||
# ==============================================================================
|
||||
#
|
||||
#' maintains class / attributes after subsetting
|
||||
#' @noRd
|
||||
#' @export
|
||||
`[.sensebox` = function(x, i, ...) {
|
||||
s = NextMethod('[')
|
||||
mostattributes(s) = attributes(s)
|
||||
s
|
||||
}
|
||||
|
||||
# ==============================================================================
|
||||
#
|
||||
#' Convert a \code{sensebox} dataframe to an \code{\link[sf]{st_sf}} object.
|
||||
#'
|
||||
#' @param x The object to convert
|
||||
#' @param ... maybe more objects to convert
|
||||
#' @return The object with an st_geometry column attached.
|
||||
#' @export
|
||||
st_as_sf.sensebox = function (x, ...) {
|
||||
NextMethod(x, ..., coords = c('lon', 'lat'), crs = 4326)
|
||||
}
|
||||
|
|
126
R/external_generics.R
Normal file
126
R/external_generics.R
Normal file
|
@ -0,0 +1,126 @@
|
|||
# helpers for the dplyr & co related functions
|
||||
# also delayed method registration
|
||||
#
|
||||
# Methods for external generics (except when from `base`) should be registered,
|
||||
# but not exported: see https://github.com/klutometis/roxygen/issues/796
|
||||
# Until roxygen supports this usecase properly, we're using a different
|
||||
# workaround than suggested, copied from edzer's sf package:
|
||||
# dynamically register the methods only when the related package is loaded as well.
|
||||
|
||||
|
||||
# ====================== base generics =========================
|
||||
|
||||
#' maintains class / attributes after subsetting
|
||||
#' @noRd
|
||||
#' @export
|
||||
`[.sensebox` = function(x, i, ...) {
|
||||
s = NextMethod('[')
|
||||
mostattributes(s) = attributes(s)
|
||||
s
|
||||
}
|
||||
|
||||
#' maintains class / attributes after subsetting
|
||||
#' @noRd
|
||||
#' @export
|
||||
`[.osem_measurements` = function(x, i, ...) {
|
||||
s = NextMethod()
|
||||
mostattributes(s) = attributes(x)
|
||||
s
|
||||
}
|
||||
|
||||
|
||||
# ====================== dplyr generics =========================
|
||||
|
||||
#' Simple factory function meant to implement dplyr functions for other classes,
|
||||
#' which call an callback to attach the original class again after the fact.
|
||||
#'
|
||||
#' @param callback The function to call after the dplyr function
|
||||
#' @noRd
|
||||
dplyr_class_wrapper = function(callback) {
|
||||
function(.data, ..., .dots) callback(NextMethod())
|
||||
}
|
||||
|
||||
#' Return rows with matching conditions, while maintaining class & attributes
|
||||
#' @param .data A sensebox data.frame to filter
|
||||
#' @param .dots see corresponding function in package \code{\link{dplyr}}
|
||||
#' @param ... other arguments
|
||||
#' @seealso \code{\link[dplyr]{filter}}
|
||||
filter.sensebox = dplyr_class_wrapper(osem_as_sensebox)
|
||||
|
||||
#' Add new variables to the data, while maintaining class & attributes
|
||||
#' @param .data A sensebox data.frame to mutate
|
||||
#' @param .dots see corresponding function in package \code{\link{dplyr}}
|
||||
#' @param ... other arguments
|
||||
#' @seealso \code{\link[dplyr]{mutate}}
|
||||
mutate.sensebox = dplyr_class_wrapper(osem_as_sensebox)
|
||||
|
||||
#' Return rows with matching conditions, while maintaining class & attributes
|
||||
#' @param .data A osem_measurements data.frame to filter
|
||||
#' @param .dots see corresponding function in package \code{\link{dplyr}}
|
||||
#' @param ... other arguments
|
||||
#' @seealso \code{\link[dplyr]{filter}}
|
||||
filter.osem_measurements = dplyr_class_wrapper(osem_as_measurements)
|
||||
|
||||
#' Add new variables to the data, while maintaining class & attributes
|
||||
#' @param .data A osem_measurements data.frame to mutate
|
||||
#' @param .dots see corresponding function in package \code{\link{dplyr}}
|
||||
#' @param ... other arguments
|
||||
#' @seealso \code{\link[dplyr]{mutate}}
|
||||
mutate.osem_measurements = dplyr_class_wrapper(osem_as_measurements)
|
||||
|
||||
|
||||
# ====================== sf generics =========================
|
||||
|
||||
#' Convert a \code{sensebox} dataframe to an \code{\link[sf]{st_sf}} object.
|
||||
#'
|
||||
#' @param x The object to convert
|
||||
#' @param ... maybe more objects to convert
|
||||
#' @return The object with an st_geometry column attached.
|
||||
st_as_sf.sensebox = function (x, ...) {
|
||||
NextMethod(x, ..., coords = c('lon', 'lat'), crs = 4326)
|
||||
}
|
||||
|
||||
#' Convert a \code{osem_measurements} dataframe to an \code{\link[sf]{st_sf}} object.
|
||||
#'
|
||||
#' @param x The object to convert
|
||||
#' @param ... maybe more objects to convert
|
||||
#' @return The object with an st_geometry column attached.
|
||||
st_as_sf.osem_measurements = function (x, ...) {
|
||||
NextMethod(x, ..., coords = c('lon', 'lat'), crs = 4326)
|
||||
}
|
||||
|
||||
|
||||
# from: https://github.com/tidyverse/hms/blob/master/R/zzz.R
|
||||
# Thu Apr 19 10:53:24 CEST 2018
|
||||
register_s3_method <- function(pkg, generic, class, fun = NULL) {
|
||||
stopifnot(is.character(pkg), length(pkg) == 1)
|
||||
stopifnot(is.character(generic), length(generic) == 1)
|
||||
stopifnot(is.character(class), length(class) == 1)
|
||||
|
||||
if (is.null(fun)) {
|
||||
fun <- get(paste0(generic, ".", class), envir = parent.frame())
|
||||
} else {
|
||||
stopifnot(is.function(fun))
|
||||
}
|
||||
|
||||
if (pkg %in% loadedNamespaces()) {
|
||||
registerS3method(generic, class, fun, envir = asNamespace(pkg))
|
||||
}
|
||||
|
||||
# Always register hook in case package is later unloaded & reloaded
|
||||
setHook(
|
||||
packageEvent(pkg, "onLoad"),
|
||||
function(...) {
|
||||
registerS3method(generic, class, fun, envir = asNamespace(pkg))
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
.onLoad = function(libname, pkgname) {
|
||||
register_s3_method('dplyr', 'filter', 'sensebox')
|
||||
register_s3_method('dplyr', 'mutate', 'sensebox')
|
||||
register_s3_method('dplyr', 'filter', 'osem_measurements')
|
||||
register_s3_method('dplyr', 'mutate', 'osem_measurements')
|
||||
register_s3_method('sf', 'st_as_sf', 'sensebox')
|
||||
register_s3_method('sf', 'st_as_sf', 'osem_measurements')
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
# ==============================================================================
|
||||
#
|
||||
#' Get the Measurements of a Phenomenon on opensensemap.org
|
||||
#' Fetch the Measurements of a Phenomenon on opensensemap.org
|
||||
#'
|
||||
#' Measurements can be retrieved either for a set of boxes, or through a spatial
|
||||
#' bounding box filter. To get all measurements, the \code{default} function applies
|
||||
|
@ -39,7 +39,7 @@ osem_measurements = function (x, ...) UseMethod('osem_measurements')
|
|||
#' @describeIn osem_measurements Get measurements from \strong{all} senseBoxes.
|
||||
#' @export
|
||||
#' @examples
|
||||
#' \donttest{
|
||||
#' \dontrun{
|
||||
#' # get measurements from all boxes on the phenomenon 'PM10' from the last 48h
|
||||
#' m = osem_measurements('PM10')
|
||||
#'
|
||||
|
@ -72,7 +72,7 @@ osem_measurements.default = function (x, ...) {
|
|||
#' @describeIn osem_measurements Get measurements by a spatial filter.
|
||||
#' @export
|
||||
#' @examples
|
||||
#' \donttest{
|
||||
#' \dontrun{
|
||||
#' # get measurements from sensors within a custom WGS84 bounding box
|
||||
#' bbox = structure(c(7, 51, 8, 52), class = 'bbox')
|
||||
#' m = osem_measurements(bbox, 'Temperatur')
|
||||
|
@ -80,6 +80,7 @@ osem_measurements.default = function (x, ...) {
|
|||
#' # construct a bounding box 12km around berlin using the sf package,
|
||||
#' # and get measurements from stations within that box
|
||||
#' library(sf)
|
||||
#' library(units)
|
||||
#' bbox2 = st_point(c(13.4034, 52.5120)) %>%
|
||||
#' st_sfc(crs = 4326) %>%
|
||||
#' st_transform(3857) %>% # allow setting a buffer in meters
|
||||
|
@ -98,7 +99,7 @@ osem_measurements.bbox = function (x, phenomenon, exposure = NA,
|
|||
from = NA, to = NA, columns = NA,
|
||||
...,
|
||||
endpoint = osem_endpoint(),
|
||||
progress = T,
|
||||
progress = TRUE,
|
||||
cache = NA) {
|
||||
bbox = x
|
||||
environment() %>%
|
||||
|
@ -136,7 +137,7 @@ osem_measurements.sensebox = function (x, phenomenon, exposure = NA,
|
|||
from = NA, to = NA, columns = NA,
|
||||
...,
|
||||
endpoint = osem_endpoint(),
|
||||
progress = T,
|
||||
progress = TRUE,
|
||||
cache = NA) {
|
||||
boxes = x
|
||||
environment() %>%
|
||||
|
@ -180,8 +181,8 @@ parse_get_measurements_params = function (params) {
|
|||
|
||||
if (!is.na(params$from) && !is.na(params$to)) {
|
||||
parse_dateparams(params$from, params$to) # only for validation sideeffect
|
||||
query$`from-date` = utc_date(params$from)
|
||||
query$`to-date` = utc_date(params$to)
|
||||
query$`from-date` = date_as_utc(params$from)
|
||||
query$`to-date` = date_as_utc(params$to)
|
||||
}
|
||||
|
||||
if (!is.na(params$exposure)) query$exposure = params$exposure
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
#' @export
|
||||
plot.osem_measurements = function (x, ..., mar = c(2, 4, 1, 1)) {
|
||||
oldpar = par()
|
||||
oldpar <- par(no.readonly = TRUE)
|
||||
on.exit(par(oldpar))
|
||||
par(mar = mar)
|
||||
plot(value~createdAt, x, col = factor(x$sensorId), xlab = NA, ylab = x$unit[1], ...)
|
||||
par(mar = oldpar$mar)
|
||||
invisible(x)
|
||||
}
|
||||
|
||||
|
@ -14,47 +14,12 @@ print.osem_measurements = function (x, ...) {
|
|||
}
|
||||
|
||||
#' Converts a foreign object to an osem_measurements data.frame.
|
||||
#' @param x A data.frame to attach the class to
|
||||
#' @param x A data.frame to attach the class to.
|
||||
#' Should have at least a `value` and `createdAt` column.
|
||||
#' @return data.frame of class \code{osem_measurements}
|
||||
#' @export
|
||||
osem_as_measurements = function(x) {
|
||||
ret = as.data.frame(x)
|
||||
class(ret) = c('osem_measurements', class(x))
|
||||
ret = tibble::as_tibble(x)
|
||||
class(ret) = c('osem_measurements', class(ret))
|
||||
ret
|
||||
}
|
||||
|
||||
#' Return rows with matching conditions, while maintaining class & attributes
|
||||
#' @param .data A osem_measurements data.frame to filter
|
||||
#' @param .dots see corresponding function in package \code{\link{dplyr}}
|
||||
#' @param ... other arguments
|
||||
#' @seealso \code{\link[dplyr]{filter}}
|
||||
#' @export
|
||||
filter.osem_measurements = dplyr_class_wrapper(osem_as_measurements)
|
||||
|
||||
#' Add new variables to the data, while maintaining class & attributes
|
||||
#' @param .data A osem_measurements data.frame to mutate
|
||||
#' @param .dots see corresponding function in package \code{\link{dplyr}}
|
||||
#' @param ... other arguments
|
||||
#' @seealso \code{\link[dplyr]{mutate}}
|
||||
#' @export
|
||||
mutate.osem_measurements = dplyr_class_wrapper(osem_as_measurements)
|
||||
|
||||
#' maintains class / attributes after subsetting
|
||||
#' @noRd
|
||||
#' @export
|
||||
`[.osem_measurements` = function(x, i, ...) {
|
||||
s = NextMethod()
|
||||
mostattributes(s) = attributes(x)
|
||||
s
|
||||
}
|
||||
|
||||
# ==============================================================================
|
||||
#
|
||||
#' Convert a \code{osem_measurements} dataframe to an \code{\link[sf]{st_sf}} object.
|
||||
#'
|
||||
#' @param x The object to convert
|
||||
#' @param ... maybe more objects to convert
|
||||
#' @return The object with an st_geometry column attached.
|
||||
#' @export
|
||||
st_as_sf.osem_measurements = function (x, ...) {
|
||||
NextMethod(x, ..., coords = c('lon', 'lat'), crs = 4326)
|
||||
}
|
||||
|
|
|
@ -37,16 +37,27 @@
|
|||
#' }
|
||||
#'
|
||||
#' @section Retrieving measurements:
|
||||
#' Measurements can be retrieved through \code{\link{osem_measurements}} for a
|
||||
#' given phenomenon only. A subset of measurements may be selected by
|
||||
#'
|
||||
#' There are two ways to retrieve measurements:
|
||||
#' \itemize{
|
||||
#' \item a list of senseBoxes, previously retrieved through
|
||||
#' \code{\link{osem_box}} or \code{\link{osem_boxes}}.
|
||||
#' \item a geographic bounding box, which can be generated with the
|
||||
#' \code{\link[sf]{sf}} package.
|
||||
#' \item a time frame
|
||||
#' \item a exposure type of the given box
|
||||
#' \item \code{\link{osem_measurements_archive}}:
|
||||
#' Downloads measurements for a \emph{single box} from the openSenseMap archive.
|
||||
#' This function does not provide realtime data, but is suitable for long time frames.
|
||||
#'
|
||||
#' \item \code{\link{osem_measurements}}:
|
||||
#' This function retrieves (realtime) measurements from the API. It works for a
|
||||
#' \emph{single phenomenon} only, but provides various filters to select sensors by
|
||||
#'
|
||||
#' \itemize{
|
||||
#' \item a list of senseBoxes, previously retrieved through
|
||||
#' \code{\link{osem_box}} or \code{\link{osem_boxes}}.
|
||||
#' \item a geographic bounding box, which can be generated with the
|
||||
#' \code{\link[sf]{sf}} package.
|
||||
#' \item a time frame
|
||||
#' \item a exposure type of the given box
|
||||
#' }
|
||||
#'
|
||||
#' Use this function with caution for long time frames, as the API becomes
|
||||
#' quite slow is limited to 10.000 measurements per 30 day interval.
|
||||
#' }
|
||||
#'
|
||||
#' Data is returned as \code{tibble} with the class \code{osem_measurements}.
|
||||
|
@ -54,6 +65,14 @@
|
|||
#' @section Retrieving statistics:
|
||||
#' Count statistics about the database are provided with \code{\link{osem_counts}}.
|
||||
#'
|
||||
#' @section Using a different API instance / endpoint:
|
||||
#' You can override the functions \code{osem_endpoint} and \code{osem_endpoint_archive}
|
||||
#' inside the package namespace:
|
||||
#'
|
||||
#' \code{
|
||||
#' assignInNamespace("osem_endpoint", function() "http://mynewosem.org", "opensensmapr")
|
||||
#' }
|
||||
#'
|
||||
#' @section Integration with other packages:
|
||||
#' The package aims to be compatible with the tidyverse.
|
||||
#' Helpers are implemented to ease the further usage of the retrieved data:
|
||||
|
@ -69,7 +88,7 @@
|
|||
#' \code{\link{dplyr}}.
|
||||
#' }
|
||||
#'
|
||||
#' @seealso Report bugs at \url{https://github.com/noerw/opensensmapR/issues}
|
||||
#' @seealso Report bugs at \url{https://github.com/sensebox/opensensmapR/issues}
|
||||
#' @seealso openSenseMap API: \url{https://api.opensensemap.org/}
|
||||
#' @seealso official openSenseMap API documentation: \url{https://docs.opensensemap.org/}
|
||||
#' @docType package
|
||||
|
@ -81,4 +100,13 @@
|
|||
`%>%` = magrittr::`%>%`
|
||||
|
||||
# just to make R CMD check happy, due to NSE (dplyr) functions
|
||||
globalVariables(c('lastMeasurement', '.'))
|
||||
globalVariables(c(
|
||||
'createdAt',
|
||||
'lastMeasurement',
|
||||
'sensorType',
|
||||
'title',
|
||||
'unit',
|
||||
'value',
|
||||
'X_id',
|
||||
'.'
|
||||
))
|
||||
|
|
|
@ -18,13 +18,13 @@ osem_phenomena = function (boxes) UseMethod('osem_phenomena')
|
|||
#' # get the phenomena for a single senseBox
|
||||
#' osem_phenomena(osem_box('593bcd656ccf3b0011791f5a'))
|
||||
#'
|
||||
#' # get the phenomena for a group of senseBoxes
|
||||
#' osem_phenomena(
|
||||
#' osem_boxes(grouptag = 'ifgi', exposure = 'outdoor', date = Sys.time())
|
||||
#' )
|
||||
#'
|
||||
#' # get phenomena with at least 30 sensors on opensensemap
|
||||
#' \donttest{
|
||||
#' # get the phenomena for a group of senseBoxes
|
||||
#' osem_phenomena(
|
||||
#' osem_boxes(grouptag = 'ifgi', exposure = 'outdoor', date = Sys.time())
|
||||
#' )
|
||||
#'
|
||||
#' # get phenomena with at least 30 sensors on opensensemap
|
||||
#' phenoms = osem_phenomena(osem_boxes())
|
||||
#' names(phenoms[phenoms > 29])
|
||||
#' }
|
||||
|
@ -33,5 +33,5 @@ osem_phenomena.sensebox = function (boxes) {
|
|||
table() %>% # get count for each phenomenon
|
||||
as.list()
|
||||
|
||||
p[order(unlist(p), decreasing = T)]
|
||||
p[order(unlist(p), decreasing = TRUE)]
|
||||
}
|
||||
|
|
128
README.md
128
README.md
|
@ -1,9 +1,44 @@
|
|||
# opensensmapr
|
||||
|
||||
[](https://cran.r-project.org/package=opensensmapr) [](https://travis-ci.org/noerw/opensensmapR) [](https://ci.appveyor.com/project/noerw/opensensmapR) [](https://codecov.io/github/noerw/opensensmapR?branch=master)
|
||||
[](https://cran.r-project.org/package=opensensmapr)
|
||||
[](https://travis-ci.org/sensebox/opensensmapR)
|
||||
[](https://ci.appveyor.com/project/noerw/opensensmapr/branch/master)
|
||||
|
||||
This R package ingests data (environmental measurements, sensor stations) from the API of opensensemap.org for analysis in R.
|
||||
The package aims to be compatible with sf and the tidyverse.
|
||||
This R package ingests data from the API of [opensensemap.org][osem] for analysis in R.
|
||||
|
||||
Features include:
|
||||
|
||||
- `osem_boxes()`: fetch sensor station ("box") metadata, with various filters
|
||||
- `osem_measurements()`: fetch measurements by phenomenon, with various filters such as submitting spatial extent, time range, sensor type, box, exposure..
|
||||
- no time frame limitation through request paging!
|
||||
- many helper functions to help understand the queried data
|
||||
- caching queries for reproducibility
|
||||
|
||||
The package aims to be compatible with the [`tidyverse`][tidy] and [`sf`][sf],
|
||||
so it is easy to analyze or vizualize the data with state of the art packages.
|
||||
|
||||
[osem]: https://opensensemap.org/
|
||||
[sf]: https://github.com/r-spatial/sf
|
||||
[tidy]: https://www.tidyverse.org/
|
||||
|
||||
## Usage
|
||||
|
||||
Complete documentation is provided via the R help system:
|
||||
Each function's documentation can be viewed with `?<function-name>`.
|
||||
A comprehensive overview of all functions is given in `?opensensmapr`.
|
||||
|
||||
There are also vignettes showcasing applications of this package:
|
||||
|
||||
- [Visualising the History of openSenseMap.org][osem-history]: Showcase of `opensensmapr` with `dplyr` + `ggplot2`
|
||||
- [Exploring the openSenseMap dataset][osem-intro]: Showcase of included helper functions
|
||||
- [Caching openSenseMap Data for reproducibility][osem-serialization]
|
||||
|
||||
[osem-intro]: https://sensebox.github.io/opensensmapR/inst/doc/osem-intro.html
|
||||
[osem-history]: https://sensebox.github.io/opensensmapR/inst/doc/osem-history.html
|
||||
[osem-serialization]: https://sensebox.github.io/opensensmapR/inst/doc/osem-serialization.html
|
||||
|
||||
If you used this package for an analysis and think it could serve as a good
|
||||
example or showcase, feel free to add a vignette to the package via a [PR](#contribute)!
|
||||
|
||||
## Installation
|
||||
|
||||
|
@ -13,56 +48,73 @@ The package is available on CRAN, install it via
|
|||
install.packages('opensensmapr')
|
||||
```
|
||||
|
||||
To install the veryy latest from GitHub, run:
|
||||
To install the very latest versions from GitHub, run:
|
||||
|
||||
```r
|
||||
install.packages('devtools')
|
||||
devtools::install_github('noerw/opensensmapr@master') # latest stable version
|
||||
devtools::install_github('noerw/opensensmapr@development') # bleeding edge version
|
||||
devtools::install_github('sensebox/opensensmapr@master') # latest stable version
|
||||
devtools::install_github('sensebox/opensensmapr@development') # bleeding edge version
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
Each function's documentation can be viewed with `?<function-name>`.
|
||||
An overview is given in `?opensensmapr`.
|
||||
A verbose usage example is shown in the vignette [`osem-intro`](https://noerw.github.com/opensensmapR/inst/doc/osem-intro.html).
|
||||
|
||||
In short, the following pseudocode shows the main functions for data retrieval:
|
||||
|
||||
```r
|
||||
# retrieve a single box by id, or many boxes by some property-filters
|
||||
b = osem_box('boxId')
|
||||
b = osem_boxes(filter1, filter2, ...)
|
||||
|
||||
# get the counts of observed phenomena for a list of boxes
|
||||
p = osem_phenomena(b)
|
||||
|
||||
# get measurements for a phenomenon
|
||||
m = osem_measurements(phenomenon, filter1, ...)
|
||||
# get measurements for a phenomenon from selected boxes
|
||||
m = osem_measurements(b, phenomenon, filter1, ...)
|
||||
# get measurements for a phenomenon from a geographic bounding box
|
||||
m = osem_measurements(bbox, phenomenon, filter1, ...)
|
||||
|
||||
# get general count statistics of the openSenseMap database
|
||||
osem_counts()
|
||||
```
|
||||
|
||||
Additionally there are some helpers: `summary.sensebox(), plot.sensebox(), st_as_sf.sensebox(), osem_as_sensebox(), [.sensebox(), filter.sensebox(), mutate.sensebox(), ...`.
|
||||
|
||||
## Changelog
|
||||
|
||||
This project adheres to semantic versioning, for changes in recent versions please consult [CHANGES.md](CHANGES.md).
|
||||
This project adheres to semantic versioning, for changes in recent versions please consult [NEWS.md](NEWS.md).
|
||||
|
||||
## Contributing & Development
|
||||
|
||||
Contributions are very welcome!
|
||||
When submitting a patch, please follow the existing [code style](.lintr),
|
||||
When submitting a patch, please follow the existing code stlye,
|
||||
and run `R CMD check --no-vignettes .` on the package.
|
||||
Where feasible, also add tests for the added / changed functionality in `tests/testthat`.
|
||||
|
||||
Please note that this project is released with a [Contributor Code of Conduct](CONDUCT.md).
|
||||
Please note that this project is released with a Contributor Code of Conduct.
|
||||
By participating in this project you agree to abide by its terms.
|
||||
|
||||
### development environment
|
||||
|
||||
To set up the development environment for testing and checking, all suggested packages should be installed.
|
||||
On linux, these require some system dependencies:
|
||||
```sh
|
||||
# install dependencies for sf (see https://github.com/r-spatial/sf#installing)
|
||||
sudo dnf install gdal-devel proj-devel proj-epsg proj-nad geos-devel udunits2-devel
|
||||
|
||||
# install suggested packages
|
||||
R -e "install.packages(c('maps', 'maptools', 'tibble', 'rgeos', 'sf',
|
||||
'knitr', 'rmarkdown', 'lubridate', 'units', 'jsonlite', 'ggplot2',
|
||||
'zoo', 'lintr', 'testthat', 'covr')"
|
||||
```
|
||||
|
||||
### build
|
||||
|
||||
To build the package, either use `devtools::build()` or run
|
||||
```sh
|
||||
R CMD build .
|
||||
```
|
||||
|
||||
Next, run the **tests and checks**:
|
||||
```sh
|
||||
R CMD check --as-cran ../opensensmapr_*.tar.gz
|
||||
# alternatively, if you're in a hurry:
|
||||
R CMD check --no-vignettes ../opensensmapr_*.tar.gz
|
||||
```
|
||||
|
||||
### release
|
||||
|
||||
To create a release:
|
||||
|
||||
0. make sure you are on master branch
|
||||
1. run the tests and checks as described above
|
||||
2. bump the version in `DESCRIPTION`
|
||||
3. update `NEWS.md`
|
||||
3. rebuild the documentation: `R -e 'devtools::document()'`
|
||||
4. build the package again with the new version: `R CMD build . --no-build-vignettes`
|
||||
5. tag the commit with the new version: `git tag v0.5.0`
|
||||
6. push changes: `git push && git push --tags`
|
||||
7. wait for *all* CI tests to complete successfully (helps in the next step)
|
||||
8. [upload the new release to CRAN](https://cran.r-project.org/submit.html)
|
||||
9. get back to the enjoyable parts of your life & hope you won't get bad mail next week.
|
||||
|
||||
|
||||
## License
|
||||
|
||||
GPL-2.0 - Norwin Roosen
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
## ----setup, results='hide', message=FALSE, warning=FALSE-----------------
|
||||
## ----setup, results='hide', message=FALSE, warning=FALSE----------------------
|
||||
# required packages:
|
||||
library(opensensmapr) # data download
|
||||
library(dplyr) # data wrangling
|
||||
|
@ -6,12 +6,15 @@ library(ggplot2) # plotting
|
|||
library(lubridate) # date arithmetic
|
||||
library(zoo) # rollmean()
|
||||
|
||||
## ----download------------------------------------------------------------
|
||||
## ----download-----------------------------------------------------------------
|
||||
# if you want to see results for a specific subset of boxes,
|
||||
# just specify a filter such as grouptag='ifgi' here
|
||||
boxes = osem_boxes()
|
||||
|
||||
## ----exposure_counts, message=FALSE--------------------------------------
|
||||
# boxes = osem_boxes(cache = '.')
|
||||
boxes = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
||||
|
||||
|
||||
## ----exposure_counts, message=FALSE-------------------------------------------
|
||||
exposure_counts = boxes %>%
|
||||
group_by(exposure) %>%
|
||||
mutate(count = row_number(createdAt))
|
||||
|
@ -22,7 +25,7 @@ ggplot(exposure_counts, aes(x = createdAt, y = count, colour = exposure)) +
|
|||
scale_colour_manual(values = exposure_colors) +
|
||||
xlab('Registration Date') + ylab('senseBox count')
|
||||
|
||||
## ----exposure_summary----------------------------------------------------
|
||||
## ----exposure_summary---------------------------------------------------------
|
||||
exposure_counts %>%
|
||||
summarise(
|
||||
oldest = min(createdAt),
|
||||
|
@ -31,11 +34,11 @@ exposure_counts %>%
|
|||
) %>%
|
||||
arrange(desc(count))
|
||||
|
||||
## ----grouptag_counts, message=FALSE--------------------------------------
|
||||
## ----grouptag_counts, message=FALSE-------------------------------------------
|
||||
grouptag_counts = boxes %>%
|
||||
group_by(grouptag) %>%
|
||||
# only include grouptags with 8 or more members
|
||||
filter(length(grouptag) >= 8 && !is.na(grouptag)) %>%
|
||||
filter(length(grouptag) >= 8 & !is.na(grouptag)) %>%
|
||||
mutate(count = row_number(createdAt))
|
||||
|
||||
# helper for sorting the grouptags by boxcount
|
||||
|
@ -49,7 +52,7 @@ ggplot(grouptag_counts, aes(x = createdAt, y = count, colour = grouptag)) +
|
|||
geom_line(aes(group = grouptag)) +
|
||||
xlab('Registration Date') + ylab('senseBox count')
|
||||
|
||||
## ----grouptag_summary----------------------------------------------------
|
||||
## ----grouptag_summary---------------------------------------------------------
|
||||
grouptag_counts %>%
|
||||
summarise(
|
||||
oldest = min(createdAt),
|
||||
|
@ -58,7 +61,7 @@ grouptag_counts %>%
|
|||
) %>%
|
||||
arrange(desc(count))
|
||||
|
||||
## ----growthrate_registered, warning=FALSE, message=FALSE, results='hide'----
|
||||
## ----growthrate_registered, warning=FALSE, message=FALSE, results='hide'------
|
||||
bins = 'week'
|
||||
mvavg_bins = 6
|
||||
|
||||
|
@ -68,7 +71,7 @@ growth = boxes %>%
|
|||
summarize(count = length(week)) %>%
|
||||
mutate(event = 'registered')
|
||||
|
||||
## ----growthrate_inactive, warning=FALSE, message=FALSE, results='hide'----
|
||||
## ----growthrate_inactive, warning=FALSE, message=FALSE, results='hide'--------
|
||||
inactive = boxes %>%
|
||||
# remove boxes that were updated in the last two days,
|
||||
# b/c any box becomes inactive at some point by definition of updatedAt
|
||||
|
@ -78,7 +81,7 @@ inactive = boxes %>%
|
|||
summarize(count = length(week)) %>%
|
||||
mutate(event = 'inactive')
|
||||
|
||||
## ----growthrate, warning=FALSE, message=FALSE, results='hide'------------
|
||||
## ----growthrate, warning=FALSE, message=FALSE, results='hide'-----------------
|
||||
boxes_by_date = bind_rows(growth, inactive) %>% group_by(event)
|
||||
|
||||
ggplot(boxes_by_date, aes(x = as.Date(week), colour = event)) +
|
||||
|
@ -89,7 +92,7 @@ ggplot(boxes_by_date, aes(x = as.Date(week), colour = event)) +
|
|||
# moving average, make first and last value NA (to ensure identical length of vectors)
|
||||
geom_line(aes(y = rollmean(count, mvavg_bins, fill = list(NA, NULL, NA))))
|
||||
|
||||
## ----exposure_duration, message=FALSE------------------------------------
|
||||
## ----exposure_duration, message=FALSE-----------------------------------------
|
||||
duration = boxes %>%
|
||||
group_by(exposure) %>%
|
||||
filter(!is.na(updatedAt)) %>%
|
||||
|
@ -99,11 +102,11 @@ ggplot(duration, aes(x = exposure, y = duration)) +
|
|||
geom_boxplot() +
|
||||
coord_flip() + ylab('Duration active in Days')
|
||||
|
||||
## ----grouptag_duration, message=FALSE------------------------------------
|
||||
## ----grouptag_duration, message=FALSE-----------------------------------------
|
||||
duration = boxes %>%
|
||||
group_by(grouptag) %>%
|
||||
# only include grouptags with 8 or more members
|
||||
filter(length(grouptag) >= 8 && !is.na(grouptag) && !is.na(updatedAt)) %>%
|
||||
filter(length(grouptag) >= 8 & !is.na(grouptag) & !is.na(updatedAt)) %>%
|
||||
mutate(duration = difftime(updatedAt, createdAt, units='days'))
|
||||
|
||||
ggplot(duration, aes(x = grouptag, y = duration)) +
|
||||
|
@ -119,7 +122,7 @@ duration %>%
|
|||
) %>%
|
||||
arrange(desc(duration_avg))
|
||||
|
||||
## ----year_duration, message=FALSE----------------------------------------
|
||||
## ----year_duration, message=FALSE---------------------------------------------
|
||||
# NOTE: boxes older than 2016 missing due to missing updatedAt in database
|
||||
duration = boxes %>%
|
||||
mutate(year = cut(as.Date(createdAt), breaks = 'year')) %>%
|
||||
|
|
|
@ -43,7 +43,10 @@ So the first step is to retrieve *all the boxes*:
|
|||
```{r download}
|
||||
# if you want to see results for a specific subset of boxes,
|
||||
# just specify a filter such as grouptag='ifgi' here
|
||||
boxes = osem_boxes()
|
||||
|
||||
# boxes = osem_boxes(cache = '.')
|
||||
boxes = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
||||
|
||||
```
|
||||
|
||||
# Plot count of boxes by time {.tabset}
|
||||
|
@ -68,7 +71,7 @@ ggplot(exposure_counts, aes(x = createdAt, y = count, colour = exposure)) +
|
|||
Outdoor boxes are growing *fast*!
|
||||
We can also see the introduction of `mobile` sensor "stations" in 2017. While
|
||||
mobile boxes are still few, we can expect a quick rise in 2018 once the new
|
||||
[senseBox MCU with GPS support is released](https://sensebox.de/blog/2018-03-06-senseBox_MCU).
|
||||
senseBox MCU with GPS support is released.
|
||||
|
||||
Let's have a quick summary:
|
||||
```{r exposure_summary}
|
||||
|
@ -93,7 +96,7 @@ inconsistent (`Luftdaten`, `luftdaten.info`, ...)
|
|||
grouptag_counts = boxes %>%
|
||||
group_by(grouptag) %>%
|
||||
# only include grouptags with 8 or more members
|
||||
filter(length(grouptag) >= 8 && !is.na(grouptag)) %>%
|
||||
filter(length(grouptag) >= 8 & !is.na(grouptag)) %>%
|
||||
mutate(count = row_number(createdAt))
|
||||
|
||||
# helper for sorting the grouptags by boxcount
|
||||
|
@ -163,7 +166,7 @@ ggplot(boxes_by_date, aes(x = as.Date(week), colour = event)) +
|
|||
|
||||
We see a sudden rise in early 2017, which lines up with the fast growing grouptag `Luftdaten`.
|
||||
This was enabled by an integration of openSenseMap.org into the firmware of the
|
||||
air quality monitoring project [luftdaten.info](https://luftdaten.info).
|
||||
air quality monitoring project [luftdaten.info](https://sensor.community/de/).
|
||||
The dips in mid 2017 and early 2018 could possibly be explained by production/delivery issues
|
||||
of the senseBox hardware, but I have no data on the exact time frames to verify.
|
||||
|
||||
|
@ -192,7 +195,7 @@ spanning a large chunk of openSenseMap's existence.
|
|||
duration = boxes %>%
|
||||
group_by(grouptag) %>%
|
||||
# only include grouptags with 8 or more members
|
||||
filter(length(grouptag) >= 8 && !is.na(grouptag) && !is.na(updatedAt)) %>%
|
||||
filter(length(grouptag) >= 8 & !is.na(grouptag) & !is.na(updatedAt)) %>%
|
||||
mutate(duration = difftime(updatedAt, createdAt, units='days'))
|
||||
|
||||
ggplot(duration, aes(x = grouptag, y = duration)) +
|
||||
|
@ -240,4 +243,4 @@ If you implemented some, feel free to add them to this vignette via a [Pull Requ
|
|||
* filter timespans with big dips in growth rate, and extrapolate the amount of
|
||||
senseBoxes that could be on the platform today, assuming there were no production issues ;)
|
||||
|
||||
[PR]: https://github.com/noerw/opensensmapr/pulls
|
||||
[PR]: https://github.com/sensebox/opensensmapr/pulls
|
||||
|
|
File diff suppressed because one or more lines are too long
159
inst/doc/osem-history_revised.R
Normal file
159
inst/doc/osem-history_revised.R
Normal file
|
@ -0,0 +1,159 @@
|
|||
## ----setup, results='hide', message=FALSE, warning=FALSE----------------------
|
||||
# required packages:
|
||||
library(opensensmapr) # data download
|
||||
library(dplyr) # data wrangling
|
||||
library(ggplot2) # plotting
|
||||
library(lubridate) # date arithmetic
|
||||
library(zoo) # rollmean()
|
||||
|
||||
## ----download, results='hide', message=FALSE, warning=FALSE-------------------
|
||||
# if you want to see results for a specific subset of boxes,
|
||||
# just specify a filter such as grouptag='ifgi' here
|
||||
|
||||
# boxes = osem_boxes(cache = '.')
|
||||
boxes = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
||||
|
||||
## -----------------------------------------------------------------------------
|
||||
boxes = filter(boxes, locationtimestamp >= "2022-01-01" & locationtimestamp <="2022-12-31")
|
||||
summary(boxes) -> summary.data.frame
|
||||
|
||||
## ---- message=FALSE, warning=FALSE--------------------------------------------
|
||||
plot(boxes)
|
||||
|
||||
## -----------------------------------------------------------------------------
|
||||
phenoms = osem_phenomena(boxes)
|
||||
str(phenoms)
|
||||
|
||||
## -----------------------------------------------------------------------------
|
||||
phenoms[phenoms > 50]
|
||||
|
||||
## ----exposure_counts, message=FALSE-------------------------------------------
|
||||
exposure_counts = boxes %>%
|
||||
group_by(exposure) %>%
|
||||
mutate(count = row_number(locationtimestamp))
|
||||
|
||||
exposure_colors = c(indoor = 'red', outdoor = 'lightgreen', mobile = 'blue', unknown = 'darkgrey')
|
||||
ggplot(exposure_counts, aes(x = locationtimestamp, y = count, colour = exposure)) +
|
||||
geom_line() +
|
||||
scale_colour_manual(values = exposure_colors) +
|
||||
xlab('Registration Date') + ylab('senseBox count')
|
||||
|
||||
## ----exposure_summary---------------------------------------------------------
|
||||
exposure_counts %>%
|
||||
summarise(
|
||||
oldest = min(locationtimestamp),
|
||||
newest = max(locationtimestamp),
|
||||
count = max(count)
|
||||
) %>%
|
||||
arrange(desc(count))
|
||||
|
||||
## ----grouptag_counts, message=FALSE-------------------------------------------
|
||||
grouptag_counts = boxes %>%
|
||||
group_by(grouptag) %>%
|
||||
# only include grouptags with 15 or more members
|
||||
filter(length(grouptag) >= 15 & !is.na(grouptag) & grouptag != '') %>%
|
||||
mutate(count = row_number(locationtimestamp))
|
||||
|
||||
# helper for sorting the grouptags by boxcount
|
||||
sortLvls = function(oldFactor, ascending = TRUE) {
|
||||
lvls = table(oldFactor) %>% sort(., decreasing = !ascending) %>% names()
|
||||
factor(oldFactor, levels = lvls)
|
||||
}
|
||||
grouptag_counts$grouptag = sortLvls(grouptag_counts$grouptag, ascending = FALSE)
|
||||
|
||||
ggplot(grouptag_counts, aes(x = locationtimestamp, y = count, colour = grouptag)) +
|
||||
geom_line(aes(group = grouptag)) +
|
||||
xlab('Registration Date') + ylab('senseBox count')
|
||||
|
||||
## ----grouptag_summary---------------------------------------------------------
|
||||
grouptag_counts %>%
|
||||
summarise(
|
||||
oldest = min(locationtimestamp),
|
||||
newest = max(locationtimestamp),
|
||||
count = max(count)
|
||||
) %>%
|
||||
arrange(desc(count))
|
||||
|
||||
## ----growthrate_registered, warning=FALSE, message=FALSE, results='hide'------
|
||||
bins = 'week'
|
||||
mvavg_bins = 6
|
||||
|
||||
growth = boxes %>%
|
||||
mutate(week = cut(as.Date(locationtimestamp), breaks = bins)) %>%
|
||||
group_by(week) %>%
|
||||
summarize(count = length(week)) %>%
|
||||
mutate(event = 'registered')
|
||||
|
||||
## ----growthrate_inactive, warning=FALSE, message=FALSE, results='hide'--------
|
||||
inactive = boxes %>%
|
||||
# remove boxes that were updated in the last two days,
|
||||
# b/c any box becomes inactive at some point by definition of updatedAt
|
||||
filter(lastMeasurement < now() - days(2)) %>%
|
||||
mutate(week = cut(as.Date(lastMeasurement), breaks = bins)) %>%
|
||||
filter(as.Date(week) > as.Date("2021-12-31")) %>%
|
||||
group_by(week) %>%
|
||||
summarize(count = length(week)) %>%
|
||||
mutate(event = 'inactive')
|
||||
|
||||
## ----growthrate, warning=FALSE, message=FALSE, results='hide'-----------------
|
||||
boxes_by_date = bind_rows(growth, inactive) %>% group_by(event)
|
||||
|
||||
ggplot(boxes_by_date, aes(x = as.Date(week), colour = event)) +
|
||||
xlab('Time') + ylab(paste('rate per ', bins)) +
|
||||
scale_x_date(date_breaks="years", date_labels="%Y") +
|
||||
scale_colour_manual(values = c(registered = 'lightgreen', inactive = 'grey')) +
|
||||
geom_point(aes(y = count), size = 0.5) +
|
||||
# moving average, make first and last value NA (to ensure identical length of vectors)
|
||||
geom_line(aes(y = rollmean(count, mvavg_bins, fill = list(NA, NULL, NA))))
|
||||
|
||||
## ----table_mostregistrations--------------------------------------------------
|
||||
boxes_by_date %>%
|
||||
filter(count > 50) %>%
|
||||
arrange(desc(count))
|
||||
|
||||
## ----exposure_duration, message=FALSE-----------------------------------------
|
||||
durations = boxes %>%
|
||||
group_by(exposure) %>%
|
||||
filter(!is.na(lastMeasurement)) %>%
|
||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
||||
filter(duration >= 0)
|
||||
|
||||
ggplot(durations, aes(x = exposure, y = duration)) +
|
||||
geom_boxplot() +
|
||||
coord_flip() + ylab('Duration active in Days')
|
||||
|
||||
## ----grouptag_duration, message=FALSE-----------------------------------------
|
||||
durations = boxes %>%
|
||||
filter(!is.na(lastMeasurement)) %>%
|
||||
group_by(grouptag) %>%
|
||||
# only include grouptags with 20 or more members
|
||||
filter(length(grouptag) >= 15 & !is.na(grouptag) & !is.na(lastMeasurement)) %>%
|
||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
||||
filter(duration >= 0)
|
||||
|
||||
ggplot(durations, aes(x = grouptag, y = duration)) +
|
||||
geom_boxplot() +
|
||||
coord_flip() + ylab('Duration active in Days')
|
||||
|
||||
durations %>%
|
||||
summarize(
|
||||
duration_avg = round(mean(duration)),
|
||||
duration_min = round(min(duration)),
|
||||
duration_max = round(max(duration)),
|
||||
oldest_box = round(max(difftime(now(), locationtimestamp, units='days')))
|
||||
) %>%
|
||||
arrange(desc(duration_avg))
|
||||
|
||||
## ----year_duration, message=FALSE---------------------------------------------
|
||||
# NOTE: boxes older than 2016 missing due to missing updatedAt in database
|
||||
duration = boxes %>%
|
||||
mutate(year = cut(as.Date(locationtimestamp), breaks = 'year')) %>%
|
||||
group_by(year) %>%
|
||||
filter(!is.na(lastMeasurement)) %>%
|
||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
||||
filter(duration >= 0)
|
||||
|
||||
ggplot(duration, aes(x = substr(as.character(year), 0, 4), y = duration)) +
|
||||
geom_boxplot() +
|
||||
coord_flip() + ylab('Duration active in Days') + xlab('Year of Registration')
|
||||
|
297
inst/doc/osem-history_revised.Rmd
Normal file
297
inst/doc/osem-history_revised.Rmd
Normal file
|
@ -0,0 +1,297 @@
|
|||
---
|
||||
title: "Visualising the Development of openSenseMap.org in 2022"
|
||||
author: "Jan Stenkamp"
|
||||
date: '`r Sys.Date()`'
|
||||
output:
|
||||
html_document:
|
||||
code_folding: hide
|
||||
df_print: kable
|
||||
theme: lumen
|
||||
toc: yes
|
||||
toc_float: yes
|
||||
rmarkdown::html_vignette:
|
||||
df_print: kable
|
||||
fig_height: 5
|
||||
fig_width: 7
|
||||
toc: yes
|
||||
vignette: >
|
||||
%\VignetteIndexEntry{Visualising the Development of openSenseMap.org in 2022}
|
||||
%\VignetteEncoding{UTF-8}
|
||||
%\VignetteEngine{knitr::rmarkdown}
|
||||
---
|
||||
|
||||
> This vignette serves as an example on data wrangling & visualization with
|
||||
`opensensmapr`, `dplyr` and `ggplot2`.
|
||||
|
||||
```{r setup, results='hide', message=FALSE, warning=FALSE}
|
||||
# required packages:
|
||||
library(opensensmapr) # data download
|
||||
library(dplyr) # data wrangling
|
||||
library(ggplot2) # plotting
|
||||
library(lubridate) # date arithmetic
|
||||
library(zoo) # rollmean()
|
||||
```
|
||||
|
||||
openSenseMap.org has grown quite a bit in the last years; it would be interesting
|
||||
to see how we got to the current `r osem_counts()$boxes` sensor stations,
|
||||
split up by various attributes of the boxes.
|
||||
|
||||
While `opensensmapr` provides extensive methods of filtering boxes by attributes
|
||||
on the server, we do the filtering within R to save time and gain flexibility.
|
||||
|
||||
|
||||
So the first step is to retrieve *all the boxes*.
|
||||
|
||||
```{r download, results='hide', message=FALSE, warning=FALSE}
|
||||
# if you want to see results for a specific subset of boxes,
|
||||
# just specify a filter such as grouptag='ifgi' here
|
||||
|
||||
# boxes = osem_boxes(cache = '.')
|
||||
boxes = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
||||
```
|
||||
# Introduction
|
||||
In the following we just want to have a look at the boxes created in 2022, so we filter for them.
|
||||
|
||||
```{r}
|
||||
boxes = filter(boxes, locationtimestamp >= "2022-01-01" & locationtimestamp <="2022-12-31")
|
||||
summary(boxes) -> summary.data.frame
|
||||
```
|
||||
|
||||
<!-- This gives a good overview already: As of writing this, there are more than 11,000 -->
|
||||
<!-- sensor stations, of which ~30% are currently running. Most of them are placed -->
|
||||
<!-- outdoors and have around 5 sensors each. -->
|
||||
<!-- The oldest station is from August 2016, while the latest station was registered a -->
|
||||
<!-- couple of minutes ago. -->
|
||||
|
||||
Another feature of interest is the spatial distribution of the boxes: `plot()`
|
||||
can help us out here. This function requires a bunch of optional dependencies though.
|
||||
|
||||
```{r, message=FALSE, warning=FALSE}
|
||||
plot(boxes)
|
||||
```
|
||||
|
||||
But what do these sensor stations actually measure? Lets find out.
|
||||
`osem_phenomena()` gives us a named list of of the counts of each observed
|
||||
phenomenon for the given set of sensor stations:
|
||||
|
||||
```{r}
|
||||
phenoms = osem_phenomena(boxes)
|
||||
str(phenoms)
|
||||
```
|
||||
|
||||
Thats quite some noise there, with many phenomena being measured by a single
|
||||
sensor only, or many duplicated phenomena due to slightly different spellings.
|
||||
We should clean that up, but for now let's just filter out the noise and find
|
||||
those phenomena with high sensor numbers:
|
||||
|
||||
```{r}
|
||||
phenoms[phenoms > 50]
|
||||
```
|
||||
|
||||
|
||||
# Plot count of boxes by time {.tabset}
|
||||
By looking at the `createdAt` attribute of each box we know the exact time a box
|
||||
was registered. Because of some database migration issues the `createdAt` values are mostly wrong (~80% of boxes created 2022-03-30), so we are using the `timestamp` attribute of the `currentlocation` which should in most cases correspond to the creation date.
|
||||
|
||||
With this approach we have no information about boxes that were deleted in the
|
||||
meantime, but that's okay for now.
|
||||
|
||||
## ...and exposure
|
||||
```{r exposure_counts, message=FALSE}
|
||||
exposure_counts = boxes %>%
|
||||
group_by(exposure) %>%
|
||||
mutate(count = row_number(locationtimestamp))
|
||||
|
||||
exposure_colors = c(indoor = 'red', outdoor = 'lightgreen', mobile = 'blue', unknown = 'darkgrey')
|
||||
ggplot(exposure_counts, aes(x = locationtimestamp, y = count, colour = exposure)) +
|
||||
geom_line() +
|
||||
scale_colour_manual(values = exposure_colors) +
|
||||
xlab('Registration Date') + ylab('senseBox count')
|
||||
```
|
||||
|
||||
Outdoor boxes are growing *fast*!
|
||||
We can also see the introduction of `mobile` sensor "stations" in 2017.
|
||||
|
||||
Let's have a quick summary:
|
||||
```{r exposure_summary}
|
||||
exposure_counts %>%
|
||||
summarise(
|
||||
oldest = min(locationtimestamp),
|
||||
newest = max(locationtimestamp),
|
||||
count = max(count)
|
||||
) %>%
|
||||
arrange(desc(count))
|
||||
```
|
||||
|
||||
## ...and grouptag
|
||||
We can try to find out where the increases in growth came from, by analysing the
|
||||
box count by grouptag.
|
||||
|
||||
Caveats: Only a small subset of boxes has a grouptag, and we should assume
|
||||
that these groups are actually bigger. Also, we can see that grouptag naming is
|
||||
inconsistent (`Luftdaten`, `luftdaten.info`, ...)
|
||||
|
||||
```{r grouptag_counts, message=FALSE}
|
||||
grouptag_counts = boxes %>%
|
||||
group_by(grouptag) %>%
|
||||
# only include grouptags with 15 or more members
|
||||
filter(length(grouptag) >= 15 & !is.na(grouptag) & grouptag != '') %>%
|
||||
mutate(count = row_number(locationtimestamp))
|
||||
|
||||
# helper for sorting the grouptags by boxcount
|
||||
sortLvls = function(oldFactor, ascending = TRUE) {
|
||||
lvls = table(oldFactor) %>% sort(., decreasing = !ascending) %>% names()
|
||||
factor(oldFactor, levels = lvls)
|
||||
}
|
||||
grouptag_counts$grouptag = sortLvls(grouptag_counts$grouptag, ascending = FALSE)
|
||||
|
||||
ggplot(grouptag_counts, aes(x = locationtimestamp, y = count, colour = grouptag)) +
|
||||
geom_line(aes(group = grouptag)) +
|
||||
xlab('Registration Date') + ylab('senseBox count')
|
||||
```
|
||||
|
||||
```{r grouptag_summary}
|
||||
grouptag_counts %>%
|
||||
summarise(
|
||||
oldest = min(locationtimestamp),
|
||||
newest = max(locationtimestamp),
|
||||
count = max(count)
|
||||
) %>%
|
||||
arrange(desc(count))
|
||||
```
|
||||
|
||||
# Plot rate of growth and inactivity per week
|
||||
First we group the boxes by `locationtimestamp` into bins of one week:
|
||||
```{r growthrate_registered, warning=FALSE, message=FALSE, results='hide'}
|
||||
bins = 'week'
|
||||
mvavg_bins = 6
|
||||
|
||||
growth = boxes %>%
|
||||
mutate(week = cut(as.Date(locationtimestamp), breaks = bins)) %>%
|
||||
group_by(week) %>%
|
||||
summarize(count = length(week)) %>%
|
||||
mutate(event = 'registered')
|
||||
```
|
||||
|
||||
We can do the same for `updatedAt`, which informs us about the last change to
|
||||
a box, including uploaded measurements. As a lot of boxes were "updated" by the database
|
||||
migration, many of them are updated at 2022-03-30, so we try to use the `lastMeasurement`
|
||||
attribute instead of `updatedAt`. This leads to fewer boxes but also automatically excludes
|
||||
boxes which were created but never made a measurement.
|
||||
|
||||
This method of determining inactive boxes is fairly inaccurate and should be
|
||||
considered an approximation, because we have no information about intermediate
|
||||
inactive phases.
|
||||
Also deleted boxes would probably have a big impact here.
|
||||
```{r growthrate_inactive, warning=FALSE, message=FALSE, results='hide'}
|
||||
inactive = boxes %>%
|
||||
# remove boxes that were updated in the last two days,
|
||||
# b/c any box becomes inactive at some point by definition of updatedAt
|
||||
filter(lastMeasurement < now() - days(2)) %>%
|
||||
mutate(week = cut(as.Date(lastMeasurement), breaks = bins)) %>%
|
||||
filter(as.Date(week) > as.Date("2021-12-31")) %>%
|
||||
group_by(week) %>%
|
||||
summarize(count = length(week)) %>%
|
||||
mutate(event = 'inactive')
|
||||
```
|
||||
|
||||
Now we can combine both datasets for plotting:
|
||||
```{r growthrate, warning=FALSE, message=FALSE, results='hide'}
|
||||
boxes_by_date = bind_rows(growth, inactive) %>% group_by(event)
|
||||
|
||||
ggplot(boxes_by_date, aes(x = as.Date(week), colour = event)) +
|
||||
xlab('Time') + ylab(paste('rate per ', bins)) +
|
||||
scale_x_date(date_breaks="years", date_labels="%Y") +
|
||||
scale_colour_manual(values = c(registered = 'lightgreen', inactive = 'grey')) +
|
||||
geom_point(aes(y = count), size = 0.5) +
|
||||
# moving average, make first and last value NA (to ensure identical length of vectors)
|
||||
geom_line(aes(y = rollmean(count, mvavg_bins, fill = list(NA, NULL, NA))))
|
||||
```
|
||||
|
||||
And see in which weeks the most boxes become (in)active:
|
||||
```{r table_mostregistrations}
|
||||
boxes_by_date %>%
|
||||
filter(count > 50) %>%
|
||||
arrange(desc(count))
|
||||
```
|
||||
|
||||
# Plot duration of boxes being active {.tabset}
|
||||
While we are looking at `locationtimestamp` and `lastMeasurement`, we can also extract the duration of activity
|
||||
of each box, and look at metrics by exposure and grouptag once more:
|
||||
|
||||
## ...by exposure
|
||||
```{r exposure_duration, message=FALSE}
|
||||
durations = boxes %>%
|
||||
group_by(exposure) %>%
|
||||
filter(!is.na(lastMeasurement)) %>%
|
||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
||||
filter(duration >= 0)
|
||||
|
||||
ggplot(durations, aes(x = exposure, y = duration)) +
|
||||
geom_boxplot() +
|
||||
coord_flip() + ylab('Duration active in Days')
|
||||
```
|
||||
|
||||
The time of activity averages at only `r round(mean(durations$duration))` days,
|
||||
though there are boxes with `r round(max(durations$duration))` days of activity,
|
||||
spanning a large chunk of openSenseMap's existence.
|
||||
|
||||
## ...by grouptag
|
||||
```{r grouptag_duration, message=FALSE}
|
||||
durations = boxes %>%
|
||||
filter(!is.na(lastMeasurement)) %>%
|
||||
group_by(grouptag) %>%
|
||||
# only include grouptags with 20 or more members
|
||||
filter(length(grouptag) >= 15 & !is.na(grouptag) & !is.na(lastMeasurement)) %>%
|
||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
||||
filter(duration >= 0)
|
||||
|
||||
ggplot(durations, aes(x = grouptag, y = duration)) +
|
||||
geom_boxplot() +
|
||||
coord_flip() + ylab('Duration active in Days')
|
||||
|
||||
durations %>%
|
||||
summarize(
|
||||
duration_avg = round(mean(duration)),
|
||||
duration_min = round(min(duration)),
|
||||
duration_max = round(max(duration)),
|
||||
oldest_box = round(max(difftime(now(), locationtimestamp, units='days')))
|
||||
) %>%
|
||||
arrange(desc(duration_avg))
|
||||
```
|
||||
|
||||
The time of activity averages at only `r round(mean(durations$duration))` days,
|
||||
though there are boxes with `r round(max(durations$duration))` days of activity,
|
||||
spanning a large chunk of openSenseMap's existence.
|
||||
|
||||
## ...by year of registration
|
||||
This is less useful, as older boxes are active for a longer time by definition.
|
||||
If you have an idea how to compensate for that, please send a [Pull Request][PR]!
|
||||
|
||||
```{r year_duration, message=FALSE}
|
||||
# NOTE: boxes older than 2016 missing due to missing updatedAt in database
|
||||
duration = boxes %>%
|
||||
mutate(year = cut(as.Date(locationtimestamp), breaks = 'year')) %>%
|
||||
group_by(year) %>%
|
||||
filter(!is.na(lastMeasurement)) %>%
|
||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
||||
filter(duration >= 0)
|
||||
|
||||
ggplot(duration, aes(x = substr(as.character(year), 0, 4), y = duration)) +
|
||||
geom_boxplot() +
|
||||
coord_flip() + ylab('Duration active in Days') + xlab('Year of Registration')
|
||||
```
|
||||
|
||||
# More Visualisations
|
||||
Other visualisations come to mind, and are left as an exercise to the reader.
|
||||
If you implemented some, feel free to add them to this vignette via a [Pull Request][PR].
|
||||
|
||||
* growth by phenomenon
|
||||
* growth by location -> (interactive) map
|
||||
* set inactive rate in relation to total box count
|
||||
* filter timespans with big dips in growth rate, and extrapolate the amount of
|
||||
senseBoxes that could be on the platform today, assuming there were no production issues ;)
|
||||
|
||||
[PR]: https://github.com/sensebox/opensensmapr/pulls
|
||||
|
||||
|
2473
inst/doc/osem-history_revised.html
Normal file
2473
inst/doc/osem-history_revised.html
Normal file
File diff suppressed because one or more lines are too long
|
@ -1,73 +1,75 @@
|
|||
## ----setup, include=FALSE------------------------------------------------
|
||||
## ----setup, include=FALSE-----------------------------------------------------
|
||||
knitr::opts_chunk$set(echo = TRUE)
|
||||
|
||||
## ----results = F---------------------------------------------------------
|
||||
## ----results = FALSE----------------------------------------------------------
|
||||
library(magrittr)
|
||||
library(opensensmapr)
|
||||
|
||||
all_sensors = osem_boxes()
|
||||
# all_sensors = osem_boxes(cache = '.')
|
||||
all_sensors = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
||||
|
||||
## ------------------------------------------------------------------------
|
||||
## -----------------------------------------------------------------------------
|
||||
summary(all_sensors)
|
||||
|
||||
## ----message=F, warning=F------------------------------------------------
|
||||
if (!require('maps')) install.packages('maps')
|
||||
if (!require('maptools')) install.packages('maptools')
|
||||
if (!require('rgeos')) install.packages('rgeos')
|
||||
|
||||
## ---- message=FALSE, warning=FALSE--------------------------------------------
|
||||
plot(all_sensors)
|
||||
|
||||
## ------------------------------------------------------------------------
|
||||
## -----------------------------------------------------------------------------
|
||||
phenoms = osem_phenomena(all_sensors)
|
||||
str(phenoms)
|
||||
|
||||
## ------------------------------------------------------------------------
|
||||
## -----------------------------------------------------------------------------
|
||||
phenoms[phenoms > 20]
|
||||
|
||||
## ----results = F---------------------------------------------------------
|
||||
pm25_sensors = osem_boxes(
|
||||
exposure = 'outdoor',
|
||||
date = Sys.time(), # ±4 hours
|
||||
phenomenon = 'PM2.5'
|
||||
)
|
||||
## ----results = FALSE, eval=FALSE----------------------------------------------
|
||||
# pm25_sensors = osem_boxes(
|
||||
# exposure = 'outdoor',
|
||||
# date = Sys.time(), # ±4 hours
|
||||
# phenomenon = 'PM2.5'
|
||||
# )
|
||||
|
||||
## -----------------------------------------------------------------------------
|
||||
pm25_sensors = readRDS('pm25_sensors.rds') # read precomputed file to save resources
|
||||
|
||||
## ------------------------------------------------------------------------
|
||||
summary(pm25_sensors)
|
||||
plot(pm25_sensors)
|
||||
|
||||
## ------------------------------------------------------------------------
|
||||
## ---- results=FALSE, message=FALSE--------------------------------------------
|
||||
library(sf)
|
||||
library(units)
|
||||
library(lubridate)
|
||||
library(dplyr)
|
||||
|
||||
# construct a bounding box: 12 kilometers around Berlin
|
||||
berlin = st_point(c(13.4034, 52.5120)) %>%
|
||||
st_sfc(crs = 4326) %>%
|
||||
st_transform(3857) %>% # allow setting a buffer in meters
|
||||
st_buffer(set_units(12, km)) %>%
|
||||
st_transform(4326) %>% # the opensensemap expects WGS 84
|
||||
st_bbox()
|
||||
|
||||
## ----results = F---------------------------------------------------------
|
||||
pm25 = osem_measurements(
|
||||
berlin,
|
||||
phenomenon = 'PM2.5',
|
||||
from = now() - days(20), # defaults to 2 days
|
||||
to = now()
|
||||
)
|
||||
## ----bbox, results = FALSE, eval=FALSE----------------------------------------
|
||||
# # construct a bounding box: 12 kilometers around Berlin
|
||||
# berlin = st_point(c(13.4034, 52.5120)) %>%
|
||||
# st_sfc(crs = 4326) %>%
|
||||
# st_transform(3857) %>% # allow setting a buffer in meters
|
||||
# st_buffer(set_units(12, km)) %>%
|
||||
# st_transform(4326) %>% # the opensensemap expects WGS 84
|
||||
# st_bbox()
|
||||
# pm25 = osem_measurements(
|
||||
# berlin,
|
||||
# phenomenon = 'PM2.5',
|
||||
# from = now() - days(3), # defaults to 2 days
|
||||
# to = now()
|
||||
# )
|
||||
#
|
||||
|
||||
## -----------------------------------------------------------------------------
|
||||
pm25 = readRDS('pm25_berlin.rds') # read precomputed file to save resources
|
||||
plot(pm25)
|
||||
|
||||
## ------------------------------------------------------------------------
|
||||
## ---- warning=FALSE-----------------------------------------------------------
|
||||
outliers = filter(pm25, value > 100)$sensorId
|
||||
bad_sensors = outliers[, drop = T] %>% levels()
|
||||
bad_sensors = outliers[, drop = TRUE] %>% levels()
|
||||
|
||||
pm25 = mutate(pm25, invalid = sensorId %in% bad_sensors)
|
||||
|
||||
## ------------------------------------------------------------------------
|
||||
st_as_sf(pm25) %>% st_geometry() %>% plot(col = factor(pm25$invalid), axes = T)
|
||||
## -----------------------------------------------------------------------------
|
||||
st_as_sf(pm25) %>% st_geometry() %>% plot(col = factor(pm25$invalid), axes = TRUE)
|
||||
|
||||
## ------------------------------------------------------------------------
|
||||
## -----------------------------------------------------------------------------
|
||||
pm25 %>% filter(invalid == FALSE) %>% plot()
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ knitr::opts_chunk$set(echo = TRUE)
|
|||
```
|
||||
|
||||
This package provides data ingestion functions for almost any data stored on the
|
||||
open data platform for environemental sensordata <https://opensensemap.org>.
|
||||
open data platform for environmental sensordata <https://opensensemap.org>.
|
||||
Its main goals are to provide means for:
|
||||
|
||||
- big data analysis of the measurements stored on the platform
|
||||
|
@ -28,11 +28,12 @@ Its main goals are to provide means for:
|
|||
Before we look at actual observations, lets get a grasp of the openSenseMap
|
||||
datasets' structure.
|
||||
|
||||
```{r results = F}
|
||||
```{r results = FALSE}
|
||||
library(magrittr)
|
||||
library(opensensmapr)
|
||||
|
||||
all_sensors = osem_boxes()
|
||||
# all_sensors = osem_boxes(cache = '.')
|
||||
all_sensors = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
||||
```
|
||||
```{r}
|
||||
summary(all_sensors)
|
||||
|
@ -47,11 +48,7 @@ couple of minutes ago.
|
|||
Another feature of interest is the spatial distribution of the boxes: `plot()`
|
||||
can help us out here. This function requires a bunch of optional dependencies though.
|
||||
|
||||
```{r message=F, warning=F}
|
||||
if (!require('maps')) install.packages('maps')
|
||||
if (!require('maptools')) install.packages('maptools')
|
||||
if (!require('rgeos')) install.packages('rgeos')
|
||||
|
||||
```{r, message=FALSE, warning=FALSE}
|
||||
plot(all_sensors)
|
||||
```
|
||||
|
||||
|
@ -81,7 +78,7 @@ We should check how many sensor stations provide useful data: We want only those
|
|||
boxes with a PM2.5 sensor, that are placed outdoors and are currently submitting
|
||||
measurements:
|
||||
|
||||
```{r results = F}
|
||||
```{r results = FALSE, eval=FALSE}
|
||||
pm25_sensors = osem_boxes(
|
||||
exposure = 'outdoor',
|
||||
date = Sys.time(), # ±4 hours
|
||||
|
@ -89,6 +86,8 @@ pm25_sensors = osem_boxes(
|
|||
)
|
||||
```
|
||||
```{r}
|
||||
pm25_sensors = readRDS('pm25_sensors.rds') # read precomputed file to save resources
|
||||
|
||||
summary(pm25_sensors)
|
||||
plot(pm25_sensors)
|
||||
```
|
||||
|
@ -97,16 +96,20 @@ Thats still more than 200 measuring stations, we can work with that.
|
|||
|
||||
### Analyzing sensor data
|
||||
Having analyzed the available data sources, let's finally get some measurements.
|
||||
We could call `osem_measurements(pm25_sensors)` now, however we are focussing on
|
||||
We could call `osem_measurements(pm25_sensors)` now, however we are focusing on
|
||||
a restricted area of interest, the city of Berlin.
|
||||
Luckily we can get the measurements filtered by a bounding box:
|
||||
|
||||
```{r}
|
||||
```{r, results=FALSE, message=FALSE}
|
||||
library(sf)
|
||||
library(units)
|
||||
library(lubridate)
|
||||
library(dplyr)
|
||||
|
||||
```
|
||||
|
||||
Since the API takes quite long to response measurements, especially filtered on space and time, we do not run the following chunks for publication of the package on CRAN.
|
||||
```{r bbox, results = FALSE, eval=FALSE}
|
||||
# construct a bounding box: 12 kilometers around Berlin
|
||||
berlin = st_point(c(13.4034, 52.5120)) %>%
|
||||
st_sfc(crs = 4326) %>%
|
||||
|
@ -114,24 +117,26 @@ berlin = st_point(c(13.4034, 52.5120)) %>%
|
|||
st_buffer(set_units(12, km)) %>%
|
||||
st_transform(4326) %>% # the opensensemap expects WGS 84
|
||||
st_bbox()
|
||||
```
|
||||
```{r results = F}
|
||||
pm25 = osem_measurements(
|
||||
berlin,
|
||||
phenomenon = 'PM2.5',
|
||||
from = now() - days(20), # defaults to 2 days
|
||||
from = now() - days(3), # defaults to 2 days
|
||||
to = now()
|
||||
)
|
||||
|
||||
```
|
||||
|
||||
```{r}
|
||||
pm25 = readRDS('pm25_berlin.rds') # read precomputed file to save resources
|
||||
plot(pm25)
|
||||
```
|
||||
|
||||
Now we can get started with actual spatiotemporal data analysis.
|
||||
First, lets mask the seemingly uncalibrated sensors:
|
||||
|
||||
```{r}
|
||||
```{r, warning=FALSE}
|
||||
outliers = filter(pm25, value > 100)$sensorId
|
||||
bad_sensors = outliers[, drop = T] %>% levels()
|
||||
bad_sensors = outliers[, drop = TRUE] %>% levels()
|
||||
|
||||
pm25 = mutate(pm25, invalid = sensorId %in% bad_sensors)
|
||||
```
|
||||
|
@ -139,7 +144,7 @@ pm25 = mutate(pm25, invalid = sensorId %in% bad_sensors)
|
|||
Then plot the measuring locations, flagging the outliers:
|
||||
|
||||
```{r}
|
||||
st_as_sf(pm25) %>% st_geometry() %>% plot(col = factor(pm25$invalid), axes = T)
|
||||
st_as_sf(pm25) %>% st_geometry() %>% plot(col = factor(pm25$invalid), axes = TRUE)
|
||||
```
|
||||
|
||||
Removing these sensors yields a nicer time series plot:
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,96 +1,51 @@
|
|||
## ----cache---------------------------------------------------------------
|
||||
b = osem_boxes(cache = tempdir())
|
||||
list.files(tempdir(), pattern = 'osemcache\\..*\\.rds')
|
||||
|
||||
# the next identical request will hit the cache only!
|
||||
b = osem_boxes(cache = tempdir())
|
||||
|
||||
# requests without the cache parameter will still be performed normally
|
||||
b = osem_boxes()
|
||||
|
||||
## ----cache_custom--------------------------------------------------------
|
||||
cacheDir = getwd() # current working directory
|
||||
b = osem_boxes(cache = cacheDir)
|
||||
|
||||
# the next identical request will hit the cache only!
|
||||
b = osem_boxes(cache = cacheDir)
|
||||
|
||||
## ----clearcache----------------------------------------------------------
|
||||
osem_clear_cache() # clears default cache
|
||||
osem_clear_cache(getwd()) # clears a custom cache
|
||||
|
||||
## ----setup, results='hide'-----------------------------------------------
|
||||
# this section requires:
|
||||
## ----setup, results='hide'----------------------------------------------------
|
||||
# this vignette requires:
|
||||
library(opensensmapr)
|
||||
library(jsonlite)
|
||||
library(readr)
|
||||
|
||||
# first get our example data:
|
||||
boxes = osem_boxes(grouptag = 'ifgi')
|
||||
measurements = osem_measurements(boxes, phenomenon = 'PM10')
|
||||
## ----cache--------------------------------------------------------------------
|
||||
b = osem_boxes(grouptag = 'ifgi', cache = tempdir())
|
||||
|
||||
## ----serialize_json------------------------------------------------------
|
||||
# serializing senseBoxes to JSON, and loading from file again:
|
||||
write(jsonlite::serializeJSON(measurements), 'boxes.json')
|
||||
boxes_from_file = jsonlite::unserializeJSON(readr::read_file('boxes.json'))
|
||||
# the next identical request will hit the cache only!
|
||||
b = osem_boxes(grouptag = 'ifgi', cache = tempdir())
|
||||
|
||||
## ----serialize_attrs-----------------------------------------------------
|
||||
# note the toJSON call
|
||||
write(jsonlite::toJSON(measurements), 'boxes_bad.json')
|
||||
boxes_without_attrs = jsonlite::fromJSON('boxes_bad.json')
|
||||
# requests without the cache parameter will still be performed normally
|
||||
b = osem_boxes(grouptag = 'ifgi')
|
||||
|
||||
boxes_with_attrs = osem_as_sensebox(boxes_without_attrs)
|
||||
class(boxes_with_attrs)
|
||||
## ----cachelisting-------------------------------------------------------------
|
||||
list.files(tempdir(), pattern = 'osemcache\\..*\\.rds')
|
||||
|
||||
## ----osem_offline--------------------------------------------------------
|
||||
# offline logic
|
||||
osem_offline = function (func, file, format='rds', ...) {
|
||||
# deserialize if file exists, otherwise download and serialize
|
||||
if (file.exists(file)) {
|
||||
if (format == 'json')
|
||||
jsonlite::unserializeJSON(readr::read_file(file))
|
||||
else
|
||||
readRDS(file)
|
||||
} else {
|
||||
data = func(...)
|
||||
if (format == 'json')
|
||||
write(jsonlite::serializeJSON(data), file = file)
|
||||
else
|
||||
saveRDS(data, file)
|
||||
data
|
||||
}
|
||||
}
|
||||
## ----cache_custom-------------------------------------------------------------
|
||||
cacheDir = getwd() # current working directory
|
||||
b = osem_boxes(grouptag = 'ifgi', cache = cacheDir)
|
||||
|
||||
# wrappers for each download function
|
||||
osem_measurements_offline = function (file, ...) {
|
||||
osem_offline(opensensmapr::osem_measurements, file, ...)
|
||||
}
|
||||
osem_boxes_offline = function (file, ...) {
|
||||
osem_offline(opensensmapr::osem_boxes, file, ...)
|
||||
}
|
||||
osem_box_offline = function (file, ...) {
|
||||
osem_offline(opensensmapr::osem_box, file, ...)
|
||||
}
|
||||
osem_counts_offline = function (file, ...) {
|
||||
osem_offline(opensensmapr::osem_counts, file, ...)
|
||||
}
|
||||
# the next identical request will hit the cache only!
|
||||
b = osem_boxes(grouptag = 'ifgi', cache = cacheDir)
|
||||
|
||||
## ----test----------------------------------------------------------------
|
||||
# first run; will download and save to disk
|
||||
b1 = osem_boxes_offline('mobileboxes.rds', exposure='mobile')
|
||||
## ----clearcache, results='hide'-----------------------------------------------
|
||||
osem_clear_cache() # clears default cache
|
||||
osem_clear_cache(getwd()) # clears a custom cache
|
||||
|
||||
# consecutive runs; will read from disk
|
||||
b2 = osem_boxes_offline('mobileboxes.rds', exposure='mobile')
|
||||
class(b1) == class(b2)
|
||||
## ----data, results='hide', eval=FALSE-----------------------------------------
|
||||
# # first get our example data:
|
||||
# measurements = osem_measurements('Windgeschwindigkeit')
|
||||
|
||||
# we can even omit the arguments now (though thats not really the point here)
|
||||
b3 = osem_boxes_offline('mobileboxes.rds')
|
||||
nrow(b1) == nrow(b3)
|
||||
## ----serialize_json, eval=FALSE-----------------------------------------------
|
||||
# # serializing senseBoxes to JSON, and loading from file again:
|
||||
# write(jsonlite::serializeJSON(measurements), 'measurements.json')
|
||||
# measurements_from_file = jsonlite::unserializeJSON(readr::read_file('measurements.json'))
|
||||
# class(measurements_from_file)
|
||||
|
||||
# verify that the custom sensebox methods are still working
|
||||
summary(b2)
|
||||
plot(b3)
|
||||
## ----serialize_attrs, eval=FALSE----------------------------------------------
|
||||
# # note the toJSON call instead of serializeJSON
|
||||
# write(jsonlite::toJSON(measurements), 'measurements_bad.json')
|
||||
# measurements_without_attrs = jsonlite::fromJSON('measurements_bad.json')
|
||||
# class(measurements_without_attrs)
|
||||
#
|
||||
# measurements_with_attrs = osem_as_measurements(measurements_without_attrs)
|
||||
# class(measurements_with_attrs)
|
||||
|
||||
## ----cleanup, results='hide'---------------------------------------------
|
||||
file.remove('mobileboxes.rds', 'boxes_bad.json', 'boxes.json', 'measurements.rds')
|
||||
## ----cleanup, include=FALSE, eval=FALSE---------------------------------------
|
||||
# file.remove('measurements.json', 'measurements_bad.json')
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ vignette: >
|
|||
---
|
||||
|
||||
It may be useful to download data from openSenseMap only once.
|
||||
For reproducible results, the data could be saved to disk, and reloaded at a
|
||||
For reproducible results, the data should be saved to disk, and reloaded at a
|
||||
later point.
|
||||
|
||||
This avoids..
|
||||
|
@ -21,40 +21,49 @@ This avoids..
|
|||
- stress on the openSenseMap-server.
|
||||
|
||||
This vignette shows how to use this built in `opensensmapr` feature, and
|
||||
how to do it yourself, if you want to store to other data formats.
|
||||
how to do it yourself in case you want to save to other data formats.
|
||||
|
||||
## Using openSensMapr Caching Feature
|
||||
```{r setup, results='hide'}
|
||||
# this vignette requires:
|
||||
library(opensensmapr)
|
||||
library(jsonlite)
|
||||
library(readr)
|
||||
```
|
||||
|
||||
## Using the opensensmapr Caching Feature
|
||||
All data retrieval functions of `opensensmapr` have a built in caching feature,
|
||||
which serializes an API response to disk.
|
||||
Subsequent identical requests will then return the serialized data instead of making
|
||||
another request.
|
||||
To do so, each request is given a unique ID based on its parameters.
|
||||
|
||||
To use this feature, just add a path to a directory to the `cache` parameter:
|
||||
```{r cache}
|
||||
b = osem_boxes(cache = tempdir())
|
||||
list.files(tempdir(), pattern = 'osemcache\\..*\\.rds')
|
||||
b = osem_boxes(grouptag = 'ifgi', cache = tempdir())
|
||||
|
||||
# the next identical request will hit the cache only!
|
||||
b = osem_boxes(cache = tempdir())
|
||||
b = osem_boxes(grouptag = 'ifgi', cache = tempdir())
|
||||
|
||||
# requests without the cache parameter will still be performed normally
|
||||
b = osem_boxes()
|
||||
b = osem_boxes(grouptag = 'ifgi')
|
||||
```
|
||||
|
||||
You can maintain multiple caches simultaneously which allows to store only
|
||||
serialized data related to a script in its directory:
|
||||
Looking at the cache directory we can see one file for each request, which is identified through a hash of the request URL:
|
||||
```{r cachelisting}
|
||||
list.files(tempdir(), pattern = 'osemcache\\..*\\.rds')
|
||||
```
|
||||
|
||||
You can maintain multiple caches simultaneously which allows to only store data related to a script in the same directory:
|
||||
```{r cache_custom}
|
||||
cacheDir = getwd() # current working directory
|
||||
b = osem_boxes(cache = cacheDir)
|
||||
b = osem_boxes(grouptag = 'ifgi', cache = cacheDir)
|
||||
|
||||
# the next identical request will hit the cache only!
|
||||
b = osem_boxes(cache = cacheDir)
|
||||
b = osem_boxes(grouptag = 'ifgi', cache = cacheDir)
|
||||
```
|
||||
|
||||
To get fresh results again, just call `osem_clear_cache()` for the respective cache:
|
||||
```{r clearcache}
|
||||
osem_clear_cache() # clears default cache
|
||||
```{r clearcache, results='hide'}
|
||||
osem_clear_cache() # clears default cache
|
||||
osem_clear_cache(getwd()) # clears a custom cache
|
||||
```
|
||||
|
||||
|
@ -62,108 +71,36 @@ osem_clear_cache(getwd()) # clears a custom cache
|
|||
If you want to roll your own serialization method to support custom data formats,
|
||||
here's how:
|
||||
|
||||
```{r setup, results='hide'}
|
||||
# this section requires:
|
||||
library(opensensmapr)
|
||||
library(jsonlite)
|
||||
library(readr)
|
||||
|
||||
```{r data, results='hide', eval=FALSE}
|
||||
# first get our example data:
|
||||
boxes = osem_boxes(grouptag = 'ifgi')
|
||||
measurements = osem_measurements(boxes, phenomenon = 'PM10')
|
||||
measurements = osem_measurements('Windgeschwindigkeit')
|
||||
```
|
||||
|
||||
If you are paranoid and worry about `.rds` files not being decodable anymore
|
||||
in the (distant) future, you could serialize to a plain text format such as JSON.
|
||||
This of course comes at the cost of storage space and performance.
|
||||
```{r serialize_json}
|
||||
```{r serialize_json, eval=FALSE}
|
||||
# serializing senseBoxes to JSON, and loading from file again:
|
||||
write(jsonlite::serializeJSON(measurements), 'boxes.json')
|
||||
boxes_from_file = jsonlite::unserializeJSON(readr::read_file('boxes.json'))
|
||||
write(jsonlite::serializeJSON(measurements), 'measurements.json')
|
||||
measurements_from_file = jsonlite::unserializeJSON(readr::read_file('measurements.json'))
|
||||
class(measurements_from_file)
|
||||
```
|
||||
|
||||
Both methods also persist the R object metadata (classes, attributes).
|
||||
This method also persists the R object metadata (classes, attributes).
|
||||
If you were to use a serialization method that can't persist object metadata, you
|
||||
could re-apply it with the following functions:
|
||||
|
||||
```{r serialize_attrs}
|
||||
# note the toJSON call
|
||||
write(jsonlite::toJSON(measurements), 'boxes_bad.json')
|
||||
boxes_without_attrs = jsonlite::fromJSON('boxes_bad.json')
|
||||
```{r serialize_attrs, eval=FALSE}
|
||||
# note the toJSON call instead of serializeJSON
|
||||
write(jsonlite::toJSON(measurements), 'measurements_bad.json')
|
||||
measurements_without_attrs = jsonlite::fromJSON('measurements_bad.json')
|
||||
class(measurements_without_attrs)
|
||||
|
||||
boxes_with_attrs = osem_as_sensebox(boxes_without_attrs)
|
||||
class(boxes_with_attrs)
|
||||
measurements_with_attrs = osem_as_measurements(measurements_without_attrs)
|
||||
class(measurements_with_attrs)
|
||||
```
|
||||
The same goes for measurements via `osem_as_measurements()`.
|
||||
The same goes for boxes via `osem_as_sensebox()`.
|
||||
|
||||
## Workflow for reproducible code
|
||||
For truly reproducible code you want it to work and return the same results --
|
||||
no matter if you run it the first time or a consecutive time, and without making
|
||||
changes to it.
|
||||
|
||||
Therefore we need a wrapper around the save-to-file & load-from-file logic.
|
||||
The following examples show a way to do just that, and where inspired by
|
||||
[this reproducible analysis by Daniel Nuest](https://github.com/nuest/sensebox-binder).
|
||||
|
||||
```{r osem_offline}
|
||||
# offline logic
|
||||
osem_offline = function (func, file, format='rds', ...) {
|
||||
# deserialize if file exists, otherwise download and serialize
|
||||
if (file.exists(file)) {
|
||||
if (format == 'json')
|
||||
jsonlite::unserializeJSON(readr::read_file(file))
|
||||
else
|
||||
readRDS(file)
|
||||
} else {
|
||||
data = func(...)
|
||||
if (format == 'json')
|
||||
write(jsonlite::serializeJSON(data), file = file)
|
||||
else
|
||||
saveRDS(data, file)
|
||||
data
|
||||
}
|
||||
}
|
||||
|
||||
# wrappers for each download function
|
||||
osem_measurements_offline = function (file, ...) {
|
||||
osem_offline(opensensmapr::osem_measurements, file, ...)
|
||||
}
|
||||
osem_boxes_offline = function (file, ...) {
|
||||
osem_offline(opensensmapr::osem_boxes, file, ...)
|
||||
}
|
||||
osem_box_offline = function (file, ...) {
|
||||
osem_offline(opensensmapr::osem_box, file, ...)
|
||||
}
|
||||
osem_counts_offline = function (file, ...) {
|
||||
osem_offline(opensensmapr::osem_counts, file, ...)
|
||||
}
|
||||
```{r cleanup, include=FALSE, eval=FALSE}
|
||||
file.remove('measurements.json', 'measurements_bad.json')
|
||||
```
|
||||
|
||||
Thats it! Now let's try it out:
|
||||
|
||||
```{r test}
|
||||
# first run; will download and save to disk
|
||||
b1 = osem_boxes_offline('mobileboxes.rds', exposure='mobile')
|
||||
|
||||
# consecutive runs; will read from disk
|
||||
b2 = osem_boxes_offline('mobileboxes.rds', exposure='mobile')
|
||||
class(b1) == class(b2)
|
||||
|
||||
# we can even omit the arguments now (though thats not really the point here)
|
||||
b3 = osem_boxes_offline('mobileboxes.rds')
|
||||
nrow(b1) == nrow(b3)
|
||||
|
||||
# verify that the custom sensebox methods are still working
|
||||
summary(b2)
|
||||
plot(b3)
|
||||
```
|
||||
|
||||
To re-download the data, just clear the files that were created in the process:
|
||||
```{r cleanup, results='hide'}
|
||||
file.remove('mobileboxes.rds', 'boxes_bad.json', 'boxes.json', 'measurements.rds')
|
||||
```
|
||||
|
||||
A possible extension to this scheme comes to mind: Omit the specification of a
|
||||
filename, and assign a unique ID to the request instead.
|
||||
For example, one could calculate the SHA-1 hash of the parameters, and use it
|
||||
as filename.
|
||||
|
|
File diff suppressed because one or more lines are too long
25
man/archive_fetch_measurements.Rd
Normal file
25
man/archive_fetch_measurements.Rd
Normal file
|
@ -0,0 +1,25 @@
|
|||
% Generated by roxygen2: do not edit by hand
|
||||
% Please edit documentation in R/archive.R
|
||||
\name{archive_fetch_measurements}
|
||||
\alias{archive_fetch_measurements}
|
||||
\title{fetch measurements from archive from a single box, and a single sensor}
|
||||
\usage{
|
||||
archive_fetch_measurements(box, sensorId, fromDate, toDate, progress)
|
||||
}
|
||||
\arguments{
|
||||
\item{box}{A sensebox data.frame with a single box}
|
||||
|
||||
\item{sensorId}{Character specifying the sensor}
|
||||
|
||||
\item{fromDate}{Start date for measurement download, must be convertable via `as.Date`.}
|
||||
|
||||
\item{toDate}{End date for measurement download (inclusive).}
|
||||
|
||||
\item{progress}{whether to print progress}
|
||||
}
|
||||
\value{
|
||||
A \code{tbl_df} containing observations of all selected sensors for each time stamp.
|
||||
}
|
||||
\description{
|
||||
fetch measurements from archive from a single box, and a single sensor
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
% Generated by roxygen2: do not edit by hand
|
||||
% Please edit documentation in R/measurement_utils.R
|
||||
% Please edit documentation in R/external_generics.R
|
||||
\name{filter.osem_measurements}
|
||||
\alias{filter.osem_measurements}
|
||||
\title{Return rows with matching conditions, while maintaining class & attributes}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
% Generated by roxygen2: do not edit by hand
|
||||
% Please edit documentation in R/box_utils.R
|
||||
% Please edit documentation in R/external_generics.R
|
||||
\name{filter.sensebox}
|
||||
\alias{filter.sensebox}
|
||||
\title{Return rows with matching conditions, while maintaining class & attributes}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
% Generated by roxygen2: do not edit by hand
|
||||
% Please edit documentation in R/measurement_utils.R
|
||||
% Please edit documentation in R/external_generics.R
|
||||
\name{mutate.osem_measurements}
|
||||
\alias{mutate.osem_measurements}
|
||||
\title{Add new variables to the data, while maintaining class & attributes}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
% Generated by roxygen2: do not edit by hand
|
||||
% Please edit documentation in R/box_utils.R
|
||||
% Please edit documentation in R/external_generics.R
|
||||
\name{mutate.sensebox}
|
||||
\alias{mutate.sensebox}
|
||||
\title{Add new variables to the data, while maintaining class & attributes}
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
\name{opensensmapr}
|
||||
\alias{opensensmapr}
|
||||
\alias{opensensmapr-package}
|
||||
\alias{opensensmapr-package}
|
||||
\title{opensensmapr: Get sensor data from opensensemap.org}
|
||||
\description{
|
||||
The opensensmapr package provides functions for
|
||||
|
@ -47,16 +46,27 @@ implemented:
|
|||
|
||||
\section{Retrieving measurements}{
|
||||
|
||||
Measurements can be retrieved through \code{\link{osem_measurements}} for a
|
||||
given phenomenon only. A subset of measurements may be selected by
|
||||
|
||||
There are two ways to retrieve measurements:
|
||||
\itemize{
|
||||
\item a list of senseBoxes, previously retrieved through
|
||||
\code{\link{osem_box}} or \code{\link{osem_boxes}}.
|
||||
\item a geographic bounding box, which can be generated with the
|
||||
\code{\link[sf]{sf}} package.
|
||||
\item a time frame
|
||||
\item a exposure type of the given box
|
||||
\item \code{\link{osem_measurements_archive}}:
|
||||
Downloads measurements for a \emph{single box} from the openSenseMap archive.
|
||||
This function does not provide realtime data, but is suitable for long time frames.
|
||||
|
||||
\item \code{\link{osem_measurements}}:
|
||||
This function retrieves (realtime) measurements from the API. It works for a
|
||||
\emph{single phenomenon} only, but provides various filters to select sensors by
|
||||
|
||||
\itemize{
|
||||
\item a list of senseBoxes, previously retrieved through
|
||||
\code{\link{osem_box}} or \code{\link{osem_boxes}}.
|
||||
\item a geographic bounding box, which can be generated with the
|
||||
\code{\link[sf]{sf}} package.
|
||||
\item a time frame
|
||||
\item a exposure type of the given box
|
||||
}
|
||||
|
||||
Use this function with caution for long time frames, as the API becomes
|
||||
quite slow is limited to 10.000 measurements per 30 day interval.
|
||||
}
|
||||
|
||||
Data is returned as \code{tibble} with the class \code{osem_measurements}.
|
||||
|
@ -67,6 +77,16 @@ Data is returned as \code{tibble} with the class \code{osem_measurements}.
|
|||
Count statistics about the database are provided with \code{\link{osem_counts}}.
|
||||
}
|
||||
|
||||
\section{Using a different API instance / endpoint}{
|
||||
|
||||
You can override the functions \code{osem_endpoint} and \code{osem_endpoint_archive}
|
||||
inside the package namespace:
|
||||
|
||||
\code{
|
||||
assignInNamespace("osem_endpoint", function() "http://mynewosem.org", "opensensmapr")
|
||||
}
|
||||
}
|
||||
|
||||
\section{Integration with other packages}{
|
||||
|
||||
The package aims to be compatible with the tidyverse.
|
||||
|
@ -85,18 +105,23 @@ Helpers are implemented to ease the further usage of the retrieved data:
|
|||
}
|
||||
|
||||
\seealso{
|
||||
Report bugs at \url{https://github.com/noerw/opensensmapR/issues}
|
||||
Report bugs at \url{https://github.com/sensebox/opensensmapR/issues}
|
||||
|
||||
openSenseMap API: \url{https://api.opensensemap.org/}
|
||||
|
||||
official openSenseMap API documentation: \url{https://docs.opensensemap.org/}
|
||||
}
|
||||
\author{
|
||||
\strong{Maintainer}: Norwin Roosen \email{hello@nroo.de}
|
||||
\strong{Maintainer}: Jan Stenkamp \email{jan.stenkamp@uni-muenster.de} [contributor]
|
||||
|
||||
Authors:
|
||||
\itemize{
|
||||
\item Norwin Roosen \email{hello@nroo.de}
|
||||
}
|
||||
|
||||
Other contributors:
|
||||
\itemize{
|
||||
\item Daniel Nuest \email{daniel.nuest@uni-muenster.de} (0000-0003-2392-6140) [contributor]
|
||||
\item Daniel Nuest \email{daniel.nuest@uni-muenster.de} (\href{https://orcid.org/0000-0003-2392-6140}{ORCID}) [contributor]
|
||||
}
|
||||
|
||||
}
|
||||
|
|
15
man/osem_archive_endpoint.Rd
Normal file
15
man/osem_archive_endpoint.Rd
Normal file
|
@ -0,0 +1,15 @@
|
|||
% Generated by roxygen2: do not edit by hand
|
||||
% Please edit documentation in R/archive.R
|
||||
\name{osem_archive_endpoint}
|
||||
\alias{osem_archive_endpoint}
|
||||
\title{Returns the default endpoint for the archive *download*
|
||||
While the front end domain is archive.opensensemap.org, file downloads
|
||||
are provided via sciebo.}
|
||||
\usage{
|
||||
osem_archive_endpoint()
|
||||
}
|
||||
\description{
|
||||
Returns the default endpoint for the archive *download*
|
||||
While the front end domain is archive.opensensemap.org, file downloads
|
||||
are provided via sciebo.
|
||||
}
|
|
@ -7,7 +7,11 @@
|
|||
osem_as_measurements(x)
|
||||
}
|
||||
\arguments{
|
||||
\item{x}{A data.frame to attach the class to}
|
||||
\item{x}{A data.frame to attach the class to.
|
||||
Should have at least a `value` and `createdAt` column.}
|
||||
}
|
||||
\value{
|
||||
data.frame of class \code{osem_measurements}
|
||||
}
|
||||
\description{
|
||||
Converts a foreign object to an osem_measurements data.frame.
|
||||
|
|
|
@ -9,6 +9,9 @@ osem_as_sensebox(x)
|
|||
\arguments{
|
||||
\item{x}{A data.frame to attach the class to}
|
||||
}
|
||||
\value{
|
||||
data.frame of class \code{sensebox}
|
||||
}
|
||||
\description{
|
||||
Converts a foreign object to a sensebox data.frame.
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ A \code{sensebox data.frame} containing a box in each row
|
|||
Get a single senseBox by its ID
|
||||
}
|
||||
\examples{
|
||||
\donttest{
|
||||
\dontrun{
|
||||
# get a specific box by ID
|
||||
b = osem_box('57000b8745fd40c8196ad04c')
|
||||
|
||||
|
|
19
man/osem_box_to_archivename.Rd
Normal file
19
man/osem_box_to_archivename.Rd
Normal file
|
@ -0,0 +1,19 @@
|
|||
% Generated by roxygen2: do not edit by hand
|
||||
% Please edit documentation in R/archive.R
|
||||
\name{osem_box_to_archivename}
|
||||
\alias{osem_box_to_archivename}
|
||||
\title{replace chars in box name according to archive script:
|
||||
https://github.com/sensebox/osem-archiver/blob/612e14b/helpers.sh#L66}
|
||||
\usage{
|
||||
osem_box_to_archivename(box)
|
||||
}
|
||||
\arguments{
|
||||
\item{box}{A sensebox data.frame}
|
||||
}
|
||||
\value{
|
||||
character with archive identifier for each box
|
||||
}
|
||||
\description{
|
||||
replace chars in box name according to archive script:
|
||||
https://github.com/sensebox/osem-archiver/blob/612e14b/helpers.sh#L66
|
||||
}
|
|
@ -4,9 +4,19 @@
|
|||
\alias{osem_boxes}
|
||||
\title{Get a set of senseBoxes from the openSenseMap}
|
||||
\usage{
|
||||
osem_boxes(exposure = NA, model = NA, grouptag = NA, date = NA,
|
||||
from = NA, to = NA, phenomenon = NA, endpoint = osem_endpoint(),
|
||||
progress = TRUE, cache = NA)
|
||||
osem_boxes(
|
||||
exposure = NA,
|
||||
model = NA,
|
||||
grouptag = NA,
|
||||
date = NA,
|
||||
from = NA,
|
||||
to = NA,
|
||||
phenomenon = NA,
|
||||
bbox = NA,
|
||||
endpoint = osem_endpoint(),
|
||||
progress = TRUE,
|
||||
cache = NA
|
||||
)
|
||||
}
|
||||
\arguments{
|
||||
\item{exposure}{Only return boxes with the given exposure ('indoor', 'outdoor', 'mobile')}
|
||||
|
@ -24,6 +34,11 @@ osem_boxes(exposure = NA, model = NA, grouptag = NA, date = NA,
|
|||
\item{phenomenon}{Only return boxes that measured the given phenomenon in the
|
||||
time interval as specified through \code{date} or \code{from / to}}
|
||||
|
||||
\item{bbox}{Only return boxes that are within the given boundingbox,
|
||||
vector of 4 WGS84 coordinates.
|
||||
Order is: longitude southwest, latitude southwest, longitude northeast, latitude northeast.
|
||||
Minimal and maximal values are: -180, 180 for longitude and -90, 90 for latitude.}
|
||||
|
||||
\item{endpoint}{The URL of the openSenseMap API instance}
|
||||
|
||||
\item{progress}{Whether to print download progress information, defaults to \code{TRUE}}
|
||||
|
@ -46,7 +61,7 @@ Note that some filters do not work together:
|
|||
}
|
||||
\examples{
|
||||
|
||||
\donttest{
|
||||
\dontrun{
|
||||
# get *all* boxes available on the API
|
||||
b = osem_boxes()
|
||||
|
||||
|
|
|
@ -17,11 +17,12 @@ Boolean whether the deletion was successful
|
|||
Purge cached responses from the given cache directory
|
||||
}
|
||||
\examples{
|
||||
\donttest{
|
||||
\dontrun{
|
||||
osem_boxes(cache = tempdir())
|
||||
osem_clear_cache()
|
||||
|
||||
cachedir = paste(getwd(), 'osemcache', sep = '/')
|
||||
dir.create(file.path(cachedir), showWarnings = FALSE)
|
||||
osem_boxes(cache = cachedir)
|
||||
osem_clear_cache(cachedir)
|
||||
}
|
||||
|
|
17
man/osem_ensure_api_available.Rd
Normal file
17
man/osem_ensure_api_available.Rd
Normal file
|
@ -0,0 +1,17 @@
|
|||
% Generated by roxygen2: do not edit by hand
|
||||
% Please edit documentation in R/api.R
|
||||
\name{osem_ensure_api_available}
|
||||
\alias{osem_ensure_api_available}
|
||||
\title{Check if the given openSenseMap API endpoint is available}
|
||||
\usage{
|
||||
osem_ensure_api_available(endpoint = osem_endpoint())
|
||||
}
|
||||
\arguments{
|
||||
\item{endpoint}{The API base URL to check, defaulting to \code{\link{osem_endpoint}}}
|
||||
}
|
||||
\value{
|
||||
\code{TRUE} if the API is available, otherwise \code{stop()} is called.
|
||||
}
|
||||
\description{
|
||||
Check if the given openSenseMap API endpoint is available
|
||||
}
|
17
man/osem_ensure_archive_available.Rd
Normal file
17
man/osem_ensure_archive_available.Rd
Normal file
|
@ -0,0 +1,17 @@
|
|||
% Generated by roxygen2: do not edit by hand
|
||||
% Please edit documentation in R/archive.R
|
||||
\name{osem_ensure_archive_available}
|
||||
\alias{osem_ensure_archive_available}
|
||||
\title{Check if the given openSenseMap archive endpoint is available}
|
||||
\usage{
|
||||
osem_ensure_archive_available(endpoint = osem_archive_endpoint())
|
||||
}
|
||||
\arguments{
|
||||
\item{endpoint}{The archive base URL to check, defaulting to \code{\link{osem_archive_endpoint}}}
|
||||
}
|
||||
\value{
|
||||
\code{TRUE} if the archive is available, otherwise \code{stop()} is called.
|
||||
}
|
||||
\description{
|
||||
Check if the given openSenseMap archive endpoint is available
|
||||
}
|
|
@ -5,19 +5,37 @@
|
|||
\alias{osem_measurements.default}
|
||||
\alias{osem_measurements.bbox}
|
||||
\alias{osem_measurements.sensebox}
|
||||
\title{Get the Measurements of a Phenomenon on opensensemap.org}
|
||||
\title{Fetch the Measurements of a Phenomenon on opensensemap.org}
|
||||
\usage{
|
||||
osem_measurements(x, ...)
|
||||
|
||||
\method{osem_measurements}{default}(x, ...)
|
||||
|
||||
\method{osem_measurements}{bbox}(x, phenomenon, exposure = NA, from = NA,
|
||||
to = NA, columns = NA, ..., endpoint = osem_endpoint(), progress = T,
|
||||
cache = NA)
|
||||
\method{osem_measurements}{bbox}(
|
||||
x,
|
||||
phenomenon,
|
||||
exposure = NA,
|
||||
from = NA,
|
||||
to = NA,
|
||||
columns = NA,
|
||||
...,
|
||||
endpoint = osem_endpoint(),
|
||||
progress = TRUE,
|
||||
cache = NA
|
||||
)
|
||||
|
||||
\method{osem_measurements}{sensebox}(x, phenomenon, exposure = NA,
|
||||
from = NA, to = NA, columns = NA, ..., endpoint = osem_endpoint(),
|
||||
progress = T, cache = NA)
|
||||
\method{osem_measurements}{sensebox}(
|
||||
x,
|
||||
phenomenon,
|
||||
exposure = NA,
|
||||
from = NA,
|
||||
to = NA,
|
||||
columns = NA,
|
||||
...,
|
||||
endpoint = osem_endpoint(),
|
||||
progress = TRUE,
|
||||
cache = NA
|
||||
)
|
||||
}
|
||||
\arguments{
|
||||
\item{x}{Depending on the method, either
|
||||
|
@ -58,15 +76,15 @@ a bounding box spanning the whole world.
|
|||
}
|
||||
\section{Methods (by class)}{
|
||||
\itemize{
|
||||
\item \code{default}: Get measurements from \strong{all} senseBoxes.
|
||||
\item \code{osem_measurements(default)}: Get measurements from \strong{all} senseBoxes.
|
||||
|
||||
\item \code{bbox}: Get measurements by a spatial filter.
|
||||
\item \code{osem_measurements(bbox)}: Get measurements by a spatial filter.
|
||||
|
||||
\item \code{osem_measurements(sensebox)}: Get measurements from a set of senseBoxes.
|
||||
|
||||
\item \code{sensebox}: Get measurements from a set of senseBoxes.
|
||||
}}
|
||||
|
||||
\examples{
|
||||
\donttest{
|
||||
\dontrun{
|
||||
# get measurements from all boxes on the phenomenon 'PM10' from the last 48h
|
||||
m = osem_measurements('PM10')
|
||||
|
||||
|
@ -89,7 +107,7 @@ a bounding box spanning the whole world.
|
|||
'height'
|
||||
))
|
||||
}
|
||||
\donttest{
|
||||
\dontrun{
|
||||
# get measurements from sensors within a custom WGS84 bounding box
|
||||
bbox = structure(c(7, 51, 8, 52), class = 'bbox')
|
||||
m = osem_measurements(bbox, 'Temperatur')
|
||||
|
@ -97,6 +115,7 @@ a bounding box spanning the whole world.
|
|||
# construct a bounding box 12km around berlin using the sf package,
|
||||
# and get measurements from stations within that box
|
||||
library(sf)
|
||||
library(units)
|
||||
bbox2 = st_point(c(13.4034, 52.5120)) \%>\%
|
||||
st_sfc(crs = 4326) \%>\%
|
||||
st_transform(3857) \%>\% # allow setting a buffer in meters
|
||||
|
|
75
man/osem_measurements_archive.Rd
Normal file
75
man/osem_measurements_archive.Rd
Normal file
|
@ -0,0 +1,75 @@
|
|||
% Generated by roxygen2: do not edit by hand
|
||||
% Please edit documentation in R/archive.R
|
||||
\name{osem_measurements_archive}
|
||||
\alias{osem_measurements_archive}
|
||||
\alias{osem_measurements_archive.sensebox}
|
||||
\title{Fetch day-wise measurements for a single box from the openSenseMap archive.}
|
||||
\usage{
|
||||
osem_measurements_archive(x, ...)
|
||||
|
||||
\method{osem_measurements_archive}{sensebox}(
|
||||
x,
|
||||
fromDate,
|
||||
toDate = fromDate,
|
||||
sensorFilter = ~TRUE,
|
||||
...,
|
||||
progress = TRUE
|
||||
)
|
||||
}
|
||||
\arguments{
|
||||
\item{x}{A `sensebox data.frame` of a single box, as retrieved via \code{\link{osem_box}},
|
||||
to download measurements for.}
|
||||
|
||||
\item{...}{see parameters below}
|
||||
|
||||
\item{fromDate}{Start date for measurement download, must be convertable via `as.Date`.}
|
||||
|
||||
\item{toDate}{End date for measurement download (inclusive).}
|
||||
|
||||
\item{sensorFilter}{A NSE formula matching to \code{x$sensors}, selecting a subset of sensors.}
|
||||
|
||||
\item{progress}{Whether to print download progress information, defaults to \code{TRUE}.}
|
||||
}
|
||||
\value{
|
||||
A \code{tbl_df} containing observations of all selected sensors for each time stamp.
|
||||
}
|
||||
\description{
|
||||
This function is significantly faster than \code{\link{osem_measurements}} for large
|
||||
time-frames, as daily CSV dumps for each sensor from
|
||||
\href{https://archive.opensensemap.org}{archive.opensensemap.org} are used.
|
||||
Note that the latest data available is from the previous day.
|
||||
}
|
||||
\details{
|
||||
By default, data for all sensors of a box is fetched, but you can select a
|
||||
subset with a \code{\link[dplyr]{dplyr}}-style NSE filter expression.
|
||||
|
||||
The function will warn when no data is available in the selected period,
|
||||
but continue the remaining download.
|
||||
}
|
||||
\section{Methods (by class)}{
|
||||
\itemize{
|
||||
\item \code{osem_measurements_archive(sensebox)}: Get daywise measurements for one or more sensors of a single box.
|
||||
|
||||
}}
|
||||
\examples{
|
||||
\donttest{
|
||||
# fetch measurements for a single day
|
||||
box = osem_box('593bcd656ccf3b0011791f5a')
|
||||
m = osem_measurements_archive(box, as.POSIXlt('2018-09-13'))
|
||||
|
||||
# fetch measurements for a date range and selected sensors
|
||||
sensors = ~ phenomenon \%in\% c('Temperatur', 'Beleuchtungsstärke')
|
||||
m = osem_measurements_archive(
|
||||
box,
|
||||
as.POSIXlt('2018-09-01'), as.POSIXlt('2018-09-30'),
|
||||
sensorFilter = sensors
|
||||
)
|
||||
}
|
||||
}
|
||||
\seealso{
|
||||
\href{https://archive.opensensemap.org}{openSenseMap archive}
|
||||
|
||||
\code{\link{osem_measurements}}
|
||||
|
||||
\code{\link{osem_box}}
|
||||
}
|
|
@ -21,21 +21,21 @@ Get the counts of sensors for each observed phenomenon.
|
|||
}
|
||||
\section{Methods (by class)}{
|
||||
\itemize{
|
||||
\item \code{sensebox}: Get counts of sensors observing each phenomenon
|
||||
\item \code{osem_phenomena(sensebox)}: Get counts of sensors observing each phenomenon
|
||||
from a set of senseBoxes.
|
||||
}}
|
||||
|
||||
}}
|
||||
\examples{
|
||||
# get the phenomena for a single senseBox
|
||||
osem_phenomena(osem_box('593bcd656ccf3b0011791f5a'))
|
||||
|
||||
# get the phenomena for a group of senseBoxes
|
||||
osem_phenomena(
|
||||
osem_boxes(grouptag = 'ifgi', exposure = 'outdoor', date = Sys.time())
|
||||
)
|
||||
|
||||
# get phenomena with at least 30 sensors on opensensemap
|
||||
\donttest{
|
||||
# get the phenomena for a group of senseBoxes
|
||||
osem_phenomena(
|
||||
osem_boxes(grouptag = 'ifgi', exposure = 'outdoor', date = Sys.time())
|
||||
)
|
||||
|
||||
# get phenomena with at least 30 sensors on opensensemap
|
||||
phenoms = osem_phenomena(osem_boxes())
|
||||
names(phenoms[phenoms > 29])
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
% Generated by roxygen2: do not edit by hand
|
||||
% Please edit documentation in R/measurement_utils.R
|
||||
% Please edit documentation in R/external_generics.R
|
||||
\name{st_as_sf.osem_measurements}
|
||||
\alias{st_as_sf.osem_measurements}
|
||||
\title{Convert a \code{osem_measurements} dataframe to an \code{\link[sf]{st_sf}} object.}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
% Generated by roxygen2: do not edit by hand
|
||||
% Please edit documentation in R/box_utils.R
|
||||
% Please edit documentation in R/external_generics.R
|
||||
\name{st_as_sf.sensebox}
|
||||
\alias{st_as_sf.sensebox}
|
||||
\title{Convert a \code{sensebox} dataframe to an \code{\link[sf]{st_sf}} object.}
|
||||
|
|
7
tests/testthat/test_api.R
Normal file
7
tests/testthat/test_api.R
Normal file
|
@ -0,0 +1,7 @@
|
|||
context('API error handling')
|
||||
|
||||
test_that('unavailable API yields informative error message', {
|
||||
expect_error({
|
||||
osem_boxes(endpoint = 'example.zip')
|
||||
}, 'The API at example.zip is currently not available')
|
||||
})
|
66
tests/testthat/test_archive.R
Normal file
66
tests/testthat/test_archive.R
Normal file
|
@ -0,0 +1,66 @@
|
|||
source('testhelpers.R')
|
||||
|
||||
context('osem_box_to_archivename()')
|
||||
|
||||
try({
|
||||
boxes = osem_boxes(grouptag = 'ifgi')
|
||||
box = osem_box('593bcd656ccf3b0011791f5a')
|
||||
})
|
||||
|
||||
test_that('osem_box_to_archive_name does the correct character replacements', {
|
||||
b = data.frame(
|
||||
name = 'aA1._- äß!"?$%&/',
|
||||
X_id = 'UUID'
|
||||
)
|
||||
|
||||
archivename = opensensmapr:::osem_box_to_archivename(b)
|
||||
expect_equal(archivename, 'UUID-aA1._-__________')
|
||||
})
|
||||
|
||||
test_that('osem_box_to_archive_name works for one box', {
|
||||
check_api()
|
||||
if (is.null(box)) skip('no box data could be fetched')
|
||||
|
||||
archivename = opensensmapr:::osem_box_to_archivename(box)
|
||||
expect_length(archivename, 1)
|
||||
expect_type(archivename, 'character')
|
||||
})
|
||||
|
||||
test_that('osem_box_to_archive_name works for multiple boxes', {
|
||||
check_api()
|
||||
if (is.null(boxes)) skip('no box data available')
|
||||
|
||||
archivename = opensensmapr:::osem_box_to_archivename(boxes)
|
||||
expect_length(archivename, nrow(boxes))
|
||||
expect_type(archivename, 'character')
|
||||
})
|
||||
|
||||
context('osem_measurements_archive()')
|
||||
|
||||
test_that('osem_measurements_archive works for one box', {
|
||||
check_api()
|
||||
if (is.null(box)) skip('no box data could be fetched')
|
||||
|
||||
m = osem_measurements_archive(box, as.POSIXlt('2018-08-08'))
|
||||
expect_length(m, nrow(box$sensors[[1]]) + 1) # one column for each sensor + createdAt
|
||||
expect_s3_class(m, c('data.frame'))
|
||||
})
|
||||
|
||||
test_that('osem_measurements_archive sensorFilter works for one box', {
|
||||
check_api()
|
||||
if (is.null(box)) skip('no box data could be fetched')
|
||||
|
||||
m = osem_measurements_archive(box, as.POSIXlt('2018-08-08'), sensorFilter = ~ phenomenon == 'Temperatur')
|
||||
expect_length(m, 2) # one column for Temperatur + createdAt
|
||||
expect_s3_class(m, c('data.frame'))
|
||||
})
|
||||
|
||||
test_that('osem_measurements_archive fails for multiple boxes', {
|
||||
check_api()
|
||||
if (is.null(boxes)) skip('no box data available')
|
||||
|
||||
expect_error(
|
||||
osem_measurements_archive(boxes, as.POSIXlt('2018-08-08')),
|
||||
'this function only works for exactly one senseBox!'
|
||||
)
|
||||
})
|
|
@ -2,20 +2,44 @@ source('testhelpers.R')
|
|||
context('box')
|
||||
|
||||
try({
|
||||
boxes = osem_boxes()
|
||||
box = osem_box('57000b8745fd40c8196ad04c')
|
||||
})
|
||||
|
||||
test_that('a single box can be retrieved by ID', {
|
||||
|
||||
test_that('required box attributes are correctly parsed', {
|
||||
check_api()
|
||||
|
||||
expect_is(box$X_id, 'character')
|
||||
expect_is(box$name, 'character')
|
||||
expect_is(box$exposure, 'character')
|
||||
expect_is(box$model, 'character')
|
||||
expect_is(box$lat, 'numeric')
|
||||
expect_is(box$lon, 'numeric')
|
||||
expect_is(box$createdAt, 'POSIXct')
|
||||
})
|
||||
|
||||
box = osem_box(boxes$X_id[[1]])
|
||||
|
||||
expect_true('sensebox' %in% class(box))
|
||||
expect_true('data.frame' %in% class(box))
|
||||
expect_true(nrow(box) == 1)
|
||||
expect_true(box$X_id == boxes$X_id[[1]])
|
||||
expect_silent(osem_box(boxes$X_id[[1]]))
|
||||
test_that('optional box attributes are correctly parsed', {
|
||||
check_api()
|
||||
|
||||
completebox = osem_box('5a676e49411a790019290f94') # all fields populated
|
||||
expect_is(completebox$description, 'character')
|
||||
expect_is(completebox$grouptag, 'character')
|
||||
expect_is(completebox$weblink, 'character')
|
||||
expect_is(completebox$updatedAt, 'POSIXct')
|
||||
expect_is(completebox$lastMeasurement, 'POSIXct')
|
||||
expect_is(completebox$height, c('numeric', 'integer'))
|
||||
expect_is(completebox$phenomena, 'list')
|
||||
expect_is(completebox$phenomena[[1]], 'character')
|
||||
expect_is(completebox$sensors, 'list')
|
||||
expect_is(completebox$sensors[[1]], 'data.frame')
|
||||
|
||||
# box with older schema, not recently updated..
|
||||
oldbox = osem_box('539fec94a8341554157931d7')
|
||||
expect_null(oldbox$description)
|
||||
expect_null(oldbox$grouptag)
|
||||
expect_null(oldbox$weblink)
|
||||
expect_null(oldbox$height)
|
||||
expect_null(oldbox$lastMeasurement)
|
||||
})
|
||||
|
||||
test_that('unknown box throws', {
|
||||
|
@ -25,13 +49,10 @@ test_that('unknown box throws', {
|
|||
expect_error(osem_box('57000b8745fd40c800000000'), 'not found')
|
||||
})
|
||||
|
||||
test_that('[.sensebox maintains attributes', {
|
||||
check_api()
|
||||
|
||||
expect_true(all(attributes(boxes[1:nrow(boxes), ]) %in% attributes(boxes)))
|
||||
})
|
||||
|
||||
test_that("print.sensebox filters important attributes for a single box", {
|
||||
check_api()
|
||||
|
||||
msg = capture.output({
|
||||
print(box)
|
||||
})
|
||||
|
@ -39,6 +60,8 @@ test_that("print.sensebox filters important attributes for a single box", {
|
|||
})
|
||||
|
||||
test_that("summary.sensebox outputs all metrics for a single box", {
|
||||
check_api()
|
||||
|
||||
msg = capture.output({
|
||||
summary(box)
|
||||
})
|
||||
|
|
|
@ -1,14 +1,17 @@
|
|||
source('testhelpers.R')
|
||||
context('boxes')
|
||||
|
||||
try({
|
||||
boxes = osem_boxes()
|
||||
})
|
||||
|
||||
test_that('a list of all boxes can be retrieved and returns a sensebox data.frame', {
|
||||
check_api()
|
||||
|
||||
boxes = osem_boxes()
|
||||
expect_true(is.data.frame(boxes))
|
||||
expect_true(is.factor(boxes$model))
|
||||
expect_true(is.character(boxes$name))
|
||||
expect_length(names(boxes), 14)
|
||||
expect_length(names(boxes), 18)
|
||||
expect_true(any('sensebox' %in% class(boxes)))
|
||||
})
|
||||
|
||||
|
@ -20,53 +23,60 @@ test_that('both from and to are required when requesting boxes, error otherwise'
|
|||
test_that('a list of boxes with phenomenon filter returns only the requested phenomenon', {
|
||||
check_api()
|
||||
|
||||
boxes = osem_boxes(phenomenon = 'Temperatur', date=Sys.time())
|
||||
expect_true(all(grep('Temperatur', boxes$phenomena)))
|
||||
boxes_phen = osem_boxes(phenomenon = 'Temperatur', date = Sys.time())
|
||||
expect_true(all(grep('Temperatur', boxes_phen$phenomena)))
|
||||
})
|
||||
|
||||
test_that('a list of boxes with exposure filter returns only the requested exposure', {
|
||||
check_api()
|
||||
|
||||
boxes = osem_boxes(exposure = 'mobile')
|
||||
expect_true(all(boxes$exposure == 'mobile'))
|
||||
boxes_exp = osem_boxes(exposure = 'mobile')
|
||||
expect_true(all(boxes_exp$exposure == 'mobile'))
|
||||
})
|
||||
|
||||
test_that('a list of boxes with model filter returns only the requested model', {
|
||||
check_api()
|
||||
|
||||
boxes = osem_boxes(model = 'homeWifi')
|
||||
expect_true(all(boxes$model == 'homeWifi'))
|
||||
boxes_mod = osem_boxes(model = 'homeWifi')
|
||||
expect_true(all(boxes_mod$model == 'homeWifi'))
|
||||
})
|
||||
|
||||
test_that('box query can combine exposure and model filter', {
|
||||
check_api()
|
||||
|
||||
boxes = osem_boxes(exposure = 'mobile', model = 'homeWifi')
|
||||
expect_true(all(boxes$model == 'homeWifi'))
|
||||
expect_true(all(boxes$exposure == 'mobile'))
|
||||
boxes_com = osem_boxes(exposure = 'mobile', model = 'homeWifi')
|
||||
expect_true(all(boxes_com$model == 'homeWifi'))
|
||||
expect_true(all(boxes_com$exposure == 'mobile'))
|
||||
})
|
||||
|
||||
test_that('a list of boxes with grouptype returns only boxes of that group', {
|
||||
check_api()
|
||||
|
||||
boxes = osem_boxes(grouptag = 'codeformuenster')
|
||||
expect_true(all(boxes$grouptag == 'codeformuenster'))
|
||||
boxes_gro = osem_boxes(grouptag = 'codeformuenster')
|
||||
expect_true(all(boxes_gro$grouptag == 'codeformuenster'))
|
||||
})
|
||||
|
||||
test_that('a list of boxes within a bbox only returns boxes within that bbox', {
|
||||
check_api()
|
||||
|
||||
boxes_box = osem_boxes(bbox = c(7.8, 51.8, 8.0, 52.0))
|
||||
expect_true(all(boxes_box$lon > 7.8 & boxes_box$lon < 8.0 & boxes_box$lat > 51.8 & boxes_box$lat < 52.0))
|
||||
})
|
||||
|
||||
test_that('endpoint can be (mis)configured', {
|
||||
check_api()
|
||||
|
||||
expect_error(osem_boxes(endpoint = 'http://not.the.opensensemap.org'), 'resolve host')
|
||||
expect_error(osem_boxes(endpoint = 'http://not.the.opensensemap.org'), 'The API at http://not.the.opensensemap.org is currently not available.')
|
||||
})
|
||||
|
||||
test_that('a response with no matches returns empty sensebox data.frame', {
|
||||
check_api()
|
||||
|
||||
suppressWarnings({
|
||||
boxes = osem_boxes(grouptag = 'does_not_exist')
|
||||
boxes_gro = osem_boxes(grouptag = 'does_not_exist')
|
||||
})
|
||||
expect_true(is.data.frame(boxes))
|
||||
expect_true(any('sensebox' %in% class(boxes)))
|
||||
expect_true(is.data.frame(boxes_gro))
|
||||
expect_true(any('sensebox' %in% class(boxes_gro)))
|
||||
})
|
||||
|
||||
test_that('a response with no matches gives a warning', {
|
||||
|
@ -83,7 +93,7 @@ test_that('data.frame can be converted to sensebox data.frame', {
|
|||
test_that('boxes can be converted to sf object', {
|
||||
check_api()
|
||||
|
||||
boxes = osem_boxes()
|
||||
# boxes = osem_boxes()
|
||||
boxes_sf = sf::st_as_sf(boxes)
|
||||
|
||||
expect_true(all(sf::st_is_simple(boxes_sf)))
|
||||
|
@ -93,7 +103,7 @@ test_that('boxes can be converted to sf object', {
|
|||
test_that('boxes converted to sf object keep all attributes', {
|
||||
check_api()
|
||||
|
||||
boxes = osem_boxes()
|
||||
# boxes = osem_boxes()
|
||||
boxes_sf = sf::st_as_sf(boxes)
|
||||
|
||||
# coord columns get removed!
|
||||
|
@ -117,7 +127,7 @@ test_that('box retrieval does not give progress information in non-interactive m
|
|||
test_that('print.sensebox filters important attributes for a set of boxes', {
|
||||
check_api()
|
||||
|
||||
boxes = osem_boxes()
|
||||
# boxes = osem_boxes()
|
||||
msg = capture.output({
|
||||
print(boxes)
|
||||
})
|
||||
|
@ -127,7 +137,7 @@ test_that('print.sensebox filters important attributes for a set of boxes', {
|
|||
test_that('summary.sensebox outputs all metrics for a set of boxes', {
|
||||
check_api()
|
||||
|
||||
boxes = osem_boxes()
|
||||
# boxes = osem_boxes()
|
||||
msg = capture.output({
|
||||
summary(boxes)
|
||||
})
|
||||
|
@ -165,3 +175,45 @@ test_that('requests can be cached', {
|
|||
osem_clear_cache()
|
||||
expect_length(list.files(tempdir(), pattern = 'osemcache\\..*\\.rds'), 0)
|
||||
})
|
||||
|
||||
context('single box from boxes')
|
||||
test_that('a single box can be retrieved by ID', {
|
||||
check_api()
|
||||
|
||||
box = osem_box(boxes$X_id[[1]])
|
||||
|
||||
expect_true('sensebox' %in% class(box))
|
||||
expect_true('data.frame' %in% class(box))
|
||||
expect_true(nrow(box) == 1)
|
||||
expect_true(box$X_id == boxes$X_id[[1]])
|
||||
expect_silent(osem_box(boxes$X_id[[1]]))
|
||||
})
|
||||
|
||||
test_that('[.sensebox maintains attributes', {
|
||||
check_api()
|
||||
|
||||
expect_true(all(attributes(boxes[1:nrow(boxes), ]) %in% attributes(boxes)))
|
||||
})
|
||||
|
||||
context('measurements boxes')
|
||||
test_that('measurements of specific boxes can be retrieved for one phenomenon and returns a measurements data.frame', {
|
||||
check_api()
|
||||
|
||||
# fix for subsetting
|
||||
class(boxes) = c('data.frame')
|
||||
three_boxes = boxes[1:3, ]
|
||||
class(boxes) = c('sensebox', 'data.frame')
|
||||
three_boxes = osem_as_sensebox(three_boxes)
|
||||
phens = names(osem_phenomena(three_boxes))
|
||||
|
||||
measurements = osem_measurements(x = three_boxes, phenomenon = phens[[1]])
|
||||
expect_true(is.data.frame(measurements))
|
||||
expect_true('osem_measurements' %in% class(measurements))
|
||||
})
|
||||
|
||||
test_that('phenomenon is required when requesting measurements, error otherwise', {
|
||||
check_api()
|
||||
|
||||
expect_error(osem_measurements(boxes), 'Parameter "phenomenon" is required')
|
||||
})
|
||||
|
||||
|
|
|
@ -1,15 +1,13 @@
|
|||
source('testhelpers.R')
|
||||
context('measurements')
|
||||
|
||||
try({
|
||||
boxes = osem_boxes()
|
||||
})
|
||||
|
||||
test_that('measurements can be retrieved for a phenomenon', {
|
||||
check_api()
|
||||
|
||||
measurements = osem_measurements('Windgeschwindigkeit')
|
||||
measurements = osem_measurements(x = 'Windgeschwindigkeit')
|
||||
expect_true(is.data.frame(measurements))
|
||||
expect_true(tibble::is_tibble(measurements))
|
||||
expect_true('osem_measurements' %in% class(measurements))
|
||||
})
|
||||
|
||||
|
@ -27,12 +25,7 @@ test_that('measurement retrieval does not give progress information in non-inter
|
|||
test_that('a response with no matching senseBoxes gives an error', {
|
||||
check_api()
|
||||
|
||||
expect_error(osem_measurements(x = 'Windgeschwindigkeit', exposure = 'indoor'), 'No senseBoxes found')
|
||||
})
|
||||
|
||||
test_that('data.frame can be converted to measurements data.frame', {
|
||||
df = osem_as_measurements(data.frame(c(1, 2), c('a', 'b')))
|
||||
expect_equal(class(df), c('osem_measurements', 'data.frame'))
|
||||
expect_error(osem_measurements(x = 'foobar', exposure = 'indoor'), 'No senseBoxes found')
|
||||
})
|
||||
|
||||
test_that('columns can be specified for phenomena', {
|
||||
|
@ -51,20 +44,6 @@ test_that('measurements can be retrieved for a phenomenon and exposure', {
|
|||
expect_equal(nrow(measurements), 0)
|
||||
})
|
||||
|
||||
test_that('measurements of specific boxes can be retrieved for one phenomenon and returns a measurements data.frame', {
|
||||
check_api()
|
||||
|
||||
# fix for subsetting
|
||||
class(boxes) = c('data.frame')
|
||||
three_boxes = boxes[1:3, ]
|
||||
class(boxes) = c('sensebox', 'data.frame')
|
||||
three_boxes = osem_as_sensebox(three_boxes)
|
||||
phens = names(osem_phenomena(three_boxes))
|
||||
|
||||
measurements = osem_measurements(x = three_boxes, phenomenon = phens[[1]])
|
||||
expect_true(is.data.frame(measurements))
|
||||
expect_true('osem_measurements' %in% class(measurements))
|
||||
})
|
||||
|
||||
test_that('measurements can be retrieved for a bounding box', {
|
||||
check_api()
|
||||
|
@ -108,8 +87,7 @@ test_that('both from and to are required when requesting measurements, error oth
|
|||
test_that('phenomenon is required when requesting measurements, error otherwise', {
|
||||
check_api()
|
||||
|
||||
expect_error(osem_measurements(), 'missing, with no default')
|
||||
expect_error(osem_measurements(boxes), 'Parameter "phenomenon" is required')
|
||||
expect_error(osem_measurements())
|
||||
|
||||
sfc = sf::st_sfc(sf::st_linestring(x = matrix(data = c(7, 8, 50, 51), ncol = 2)), crs = 4326)
|
||||
bbox = sf::st_bbox(sfc)
|
||||
|
@ -126,6 +104,13 @@ test_that('[.osem_measurements maintains attributes', {
|
|||
expect_true(all(attributes(m[1:nrow(m), ]) %in% attributes(m)))
|
||||
})
|
||||
|
||||
test_that('data.frame can be converted to measurements data.frame', {
|
||||
check_api()
|
||||
m = osem_measurements('Windrichtung')
|
||||
df = osem_as_measurements(data.frame(c(1, 2), c('a', 'b')))
|
||||
expect_equal(class(df), class(m))
|
||||
})
|
||||
|
||||
test_that('requests can be cached', {
|
||||
check_api()
|
||||
|
||||
|
|
|
@ -25,6 +25,8 @@ test_that('phenomena from boxes has all phenomena', {
|
|||
})
|
||||
|
||||
test_that('phenomena from a not sensebox data.frame returns error', {
|
||||
check_api()
|
||||
|
||||
expect_error(osem_phenomena(list()), 'no applicable method')
|
||||
expect_error(osem_phenomena(data.frame()), 'no applicable method')
|
||||
boxes_df = boxes
|
||||
|
|
|
@ -7,7 +7,7 @@ RUN apt-get update && \
|
|||
RUN Rscript -e 'install.packages("sf")'
|
||||
RUN Rscript -e 'install.packages("magrittr")'
|
||||
RUN Rscript -e 'install.packages("devtools")'
|
||||
RUN Rscript -e 'devtools::install_github("noerw/opensensmapR")'
|
||||
RUN Rscript -e 'devtools::install_github("sensebox/opensensmapR")'
|
||||
|
||||
# install crontab
|
||||
COPY crontab /crontab
|
||||
|
|
|
@ -21,7 +21,7 @@ docker run -v $(pwd)/data:/script/data osem-monitr
|
|||
```bash
|
||||
# install dependencies once
|
||||
Rscript -e 'install.packages(c("dplyr", "magrittr", "devtools"))'
|
||||
Rscript -e 'devtools::install_github("noerw/opensensmapR")'
|
||||
Rscript -e 'devtools::install_github("sensebox/opensensmapR")'
|
||||
|
||||
Rscript --save --restore get-counts.R
|
||||
Rscript --save --restore get-boxes.R
|
||||
|
|
BIN
vignettes/boxes_precomputed.rds
Normal file
BIN
vignettes/boxes_precomputed.rds
Normal file
Binary file not shown.
|
@ -43,7 +43,10 @@ So the first step is to retrieve *all the boxes*:
|
|||
```{r download}
|
||||
# if you want to see results for a specific subset of boxes,
|
||||
# just specify a filter such as grouptag='ifgi' here
|
||||
boxes = osem_boxes()
|
||||
|
||||
# boxes = osem_boxes(cache = '.')
|
||||
boxes = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
||||
|
||||
```
|
||||
|
||||
# Plot count of boxes by time {.tabset}
|
||||
|
@ -68,7 +71,7 @@ ggplot(exposure_counts, aes(x = createdAt, y = count, colour = exposure)) +
|
|||
Outdoor boxes are growing *fast*!
|
||||
We can also see the introduction of `mobile` sensor "stations" in 2017. While
|
||||
mobile boxes are still few, we can expect a quick rise in 2018 once the new
|
||||
[senseBox MCU with GPS support is released](https://sensebox.de/blog/2018-03-06-senseBox_MCU).
|
||||
senseBox MCU with GPS support is released.
|
||||
|
||||
Let's have a quick summary:
|
||||
```{r exposure_summary}
|
||||
|
@ -93,7 +96,7 @@ inconsistent (`Luftdaten`, `luftdaten.info`, ...)
|
|||
grouptag_counts = boxes %>%
|
||||
group_by(grouptag) %>%
|
||||
# only include grouptags with 8 or more members
|
||||
filter(length(grouptag) >= 8 && !is.na(grouptag)) %>%
|
||||
filter(length(grouptag) >= 8 & !is.na(grouptag)) %>%
|
||||
mutate(count = row_number(createdAt))
|
||||
|
||||
# helper for sorting the grouptags by boxcount
|
||||
|
@ -163,7 +166,7 @@ ggplot(boxes_by_date, aes(x = as.Date(week), colour = event)) +
|
|||
|
||||
We see a sudden rise in early 2017, which lines up with the fast growing grouptag `Luftdaten`.
|
||||
This was enabled by an integration of openSenseMap.org into the firmware of the
|
||||
air quality monitoring project [luftdaten.info](https://luftdaten.info).
|
||||
air quality monitoring project [luftdaten.info](https://sensor.community/de/).
|
||||
The dips in mid 2017 and early 2018 could possibly be explained by production/delivery issues
|
||||
of the senseBox hardware, but I have no data on the exact time frames to verify.
|
||||
|
||||
|
@ -192,7 +195,7 @@ spanning a large chunk of openSenseMap's existence.
|
|||
duration = boxes %>%
|
||||
group_by(grouptag) %>%
|
||||
# only include grouptags with 8 or more members
|
||||
filter(length(grouptag) >= 8 && !is.na(grouptag) && !is.na(updatedAt)) %>%
|
||||
filter(length(grouptag) >= 8 & !is.na(grouptag) & !is.na(updatedAt)) %>%
|
||||
mutate(duration = difftime(updatedAt, createdAt, units='days'))
|
||||
|
||||
ggplot(duration, aes(x = grouptag, y = duration)) +
|
||||
|
@ -240,4 +243,4 @@ If you implemented some, feel free to add them to this vignette via a [Pull Requ
|
|||
* filter timespans with big dips in growth rate, and extrapolate the amount of
|
||||
senseBoxes that could be on the platform today, assuming there were no production issues ;)
|
||||
|
||||
[PR]: https://github.com/noerw/opensensmapr/pulls
|
||||
[PR]: https://github.com/sensebox/opensensmapr/pulls
|
||||
|
|
297
vignettes/osem-history_revised.Rmd
Normal file
297
vignettes/osem-history_revised.Rmd
Normal file
|
@ -0,0 +1,297 @@
|
|||
---
|
||||
title: "Visualising the Development of openSenseMap.org in 2022"
|
||||
author: "Jan Stenkamp"
|
||||
date: '`r Sys.Date()`'
|
||||
output:
|
||||
html_document:
|
||||
code_folding: hide
|
||||
df_print: kable
|
||||
theme: lumen
|
||||
toc: yes
|
||||
toc_float: yes
|
||||
rmarkdown::html_vignette:
|
||||
df_print: kable
|
||||
fig_height: 5
|
||||
fig_width: 7
|
||||
toc: yes
|
||||
vignette: >
|
||||
%\VignetteIndexEntry{Visualising the Development of openSenseMap.org in 2022}
|
||||
%\VignetteEncoding{UTF-8}
|
||||
%\VignetteEngine{knitr::rmarkdown}
|
||||
---
|
||||
|
||||
> This vignette serves as an example on data wrangling & visualization with
|
||||
`opensensmapr`, `dplyr` and `ggplot2`.
|
||||
|
||||
```{r setup, results='hide', message=FALSE, warning=FALSE}
|
||||
# required packages:
|
||||
library(opensensmapr) # data download
|
||||
library(dplyr) # data wrangling
|
||||
library(ggplot2) # plotting
|
||||
library(lubridate) # date arithmetic
|
||||
library(zoo) # rollmean()
|
||||
```
|
||||
|
||||
openSenseMap.org has grown quite a bit in the last years; it would be interesting
|
||||
to see how we got to the current `r osem_counts()$boxes` sensor stations,
|
||||
split up by various attributes of the boxes.
|
||||
|
||||
While `opensensmapr` provides extensive methods of filtering boxes by attributes
|
||||
on the server, we do the filtering within R to save time and gain flexibility.
|
||||
|
||||
|
||||
So the first step is to retrieve *all the boxes*.
|
||||
|
||||
```{r download, results='hide', message=FALSE, warning=FALSE}
|
||||
# if you want to see results for a specific subset of boxes,
|
||||
# just specify a filter such as grouptag='ifgi' here
|
||||
|
||||
# boxes = osem_boxes(cache = '.')
|
||||
boxes = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
||||
```
|
||||
# Introduction
|
||||
In the following we just want to have a look at the boxes created in 2022, so we filter for them.
|
||||
|
||||
```{r}
|
||||
boxes = filter(boxes, locationtimestamp >= "2022-01-01" & locationtimestamp <="2022-12-31")
|
||||
summary(boxes) -> summary.data.frame
|
||||
```
|
||||
|
||||
<!-- This gives a good overview already: As of writing this, there are more than 11,000 -->
|
||||
<!-- sensor stations, of which ~30% are currently running. Most of them are placed -->
|
||||
<!-- outdoors and have around 5 sensors each. -->
|
||||
<!-- The oldest station is from August 2016, while the latest station was registered a -->
|
||||
<!-- couple of minutes ago. -->
|
||||
|
||||
Another feature of interest is the spatial distribution of the boxes: `plot()`
|
||||
can help us out here. This function requires a bunch of optional dependencies though.
|
||||
|
||||
```{r, message=FALSE, warning=FALSE}
|
||||
plot(boxes)
|
||||
```
|
||||
|
||||
But what do these sensor stations actually measure? Lets find out.
|
||||
`osem_phenomena()` gives us a named list of of the counts of each observed
|
||||
phenomenon for the given set of sensor stations:
|
||||
|
||||
```{r}
|
||||
phenoms = osem_phenomena(boxes)
|
||||
str(phenoms)
|
||||
```
|
||||
|
||||
Thats quite some noise there, with many phenomena being measured by a single
|
||||
sensor only, or many duplicated phenomena due to slightly different spellings.
|
||||
We should clean that up, but for now let's just filter out the noise and find
|
||||
those phenomena with high sensor numbers:
|
||||
|
||||
```{r}
|
||||
phenoms[phenoms > 50]
|
||||
```
|
||||
|
||||
|
||||
# Plot count of boxes by time {.tabset}
|
||||
By looking at the `createdAt` attribute of each box we know the exact time a box
|
||||
was registered. Because of some database migration issues the `createdAt` values are mostly wrong (~80% of boxes created 2022-03-30), so we are using the `timestamp` attribute of the `currentlocation` which should in most cases correspond to the creation date.
|
||||
|
||||
With this approach we have no information about boxes that were deleted in the
|
||||
meantime, but that's okay for now.
|
||||
|
||||
## ...and exposure
|
||||
```{r exposure_counts, message=FALSE}
|
||||
exposure_counts = boxes %>%
|
||||
group_by(exposure) %>%
|
||||
mutate(count = row_number(locationtimestamp))
|
||||
|
||||
exposure_colors = c(indoor = 'red', outdoor = 'lightgreen', mobile = 'blue', unknown = 'darkgrey')
|
||||
ggplot(exposure_counts, aes(x = locationtimestamp, y = count, colour = exposure)) +
|
||||
geom_line() +
|
||||
scale_colour_manual(values = exposure_colors) +
|
||||
xlab('Registration Date') + ylab('senseBox count')
|
||||
```
|
||||
|
||||
Outdoor boxes are growing *fast*!
|
||||
We can also see the introduction of `mobile` sensor "stations" in 2017.
|
||||
|
||||
Let's have a quick summary:
|
||||
```{r exposure_summary}
|
||||
exposure_counts %>%
|
||||
summarise(
|
||||
oldest = min(locationtimestamp),
|
||||
newest = max(locationtimestamp),
|
||||
count = max(count)
|
||||
) %>%
|
||||
arrange(desc(count))
|
||||
```
|
||||
|
||||
## ...and grouptag
|
||||
We can try to find out where the increases in growth came from, by analysing the
|
||||
box count by grouptag.
|
||||
|
||||
Caveats: Only a small subset of boxes has a grouptag, and we should assume
|
||||
that these groups are actually bigger. Also, we can see that grouptag naming is
|
||||
inconsistent (`Luftdaten`, `luftdaten.info`, ...)
|
||||
|
||||
```{r grouptag_counts, message=FALSE}
|
||||
grouptag_counts = boxes %>%
|
||||
group_by(grouptag) %>%
|
||||
# only include grouptags with 15 or more members
|
||||
filter(length(grouptag) >= 15 & !is.na(grouptag) & grouptag != '') %>%
|
||||
mutate(count = row_number(locationtimestamp))
|
||||
|
||||
# helper for sorting the grouptags by boxcount
|
||||
sortLvls = function(oldFactor, ascending = TRUE) {
|
||||
lvls = table(oldFactor) %>% sort(., decreasing = !ascending) %>% names()
|
||||
factor(oldFactor, levels = lvls)
|
||||
}
|
||||
grouptag_counts$grouptag = sortLvls(grouptag_counts$grouptag, ascending = FALSE)
|
||||
|
||||
ggplot(grouptag_counts, aes(x = locationtimestamp, y = count, colour = grouptag)) +
|
||||
geom_line(aes(group = grouptag)) +
|
||||
xlab('Registration Date') + ylab('senseBox count')
|
||||
```
|
||||
|
||||
```{r grouptag_summary}
|
||||
grouptag_counts %>%
|
||||
summarise(
|
||||
oldest = min(locationtimestamp),
|
||||
newest = max(locationtimestamp),
|
||||
count = max(count)
|
||||
) %>%
|
||||
arrange(desc(count))
|
||||
```
|
||||
|
||||
# Plot rate of growth and inactivity per week
|
||||
First we group the boxes by `locationtimestamp` into bins of one week:
|
||||
```{r growthrate_registered, warning=FALSE, message=FALSE, results='hide'}
|
||||
bins = 'week'
|
||||
mvavg_bins = 6
|
||||
|
||||
growth = boxes %>%
|
||||
mutate(week = cut(as.Date(locationtimestamp), breaks = bins)) %>%
|
||||
group_by(week) %>%
|
||||
summarize(count = length(week)) %>%
|
||||
mutate(event = 'registered')
|
||||
```
|
||||
|
||||
We can do the same for `updatedAt`, which informs us about the last change to
|
||||
a box, including uploaded measurements. As a lot of boxes were "updated" by the database
|
||||
migration, many of them are updated at 2022-03-30, so we try to use the `lastMeasurement`
|
||||
attribute instead of `updatedAt`. This leads to fewer boxes but also automatically excludes
|
||||
boxes which were created but never made a measurement.
|
||||
|
||||
This method of determining inactive boxes is fairly inaccurate and should be
|
||||
considered an approximation, because we have no information about intermediate
|
||||
inactive phases.
|
||||
Also deleted boxes would probably have a big impact here.
|
||||
```{r growthrate_inactive, warning=FALSE, message=FALSE, results='hide'}
|
||||
inactive = boxes %>%
|
||||
# remove boxes that were updated in the last two days,
|
||||
# b/c any box becomes inactive at some point by definition of updatedAt
|
||||
filter(lastMeasurement < now() - days(2)) %>%
|
||||
mutate(week = cut(as.Date(lastMeasurement), breaks = bins)) %>%
|
||||
filter(as.Date(week) > as.Date("2021-12-31")) %>%
|
||||
group_by(week) %>%
|
||||
summarize(count = length(week)) %>%
|
||||
mutate(event = 'inactive')
|
||||
```
|
||||
|
||||
Now we can combine both datasets for plotting:
|
||||
```{r growthrate, warning=FALSE, message=FALSE, results='hide'}
|
||||
boxes_by_date = bind_rows(growth, inactive) %>% group_by(event)
|
||||
|
||||
ggplot(boxes_by_date, aes(x = as.Date(week), colour = event)) +
|
||||
xlab('Time') + ylab(paste('rate per ', bins)) +
|
||||
scale_x_date(date_breaks="years", date_labels="%Y") +
|
||||
scale_colour_manual(values = c(registered = 'lightgreen', inactive = 'grey')) +
|
||||
geom_point(aes(y = count), size = 0.5) +
|
||||
# moving average, make first and last value NA (to ensure identical length of vectors)
|
||||
geom_line(aes(y = rollmean(count, mvavg_bins, fill = list(NA, NULL, NA))))
|
||||
```
|
||||
|
||||
And see in which weeks the most boxes become (in)active:
|
||||
```{r table_mostregistrations}
|
||||
boxes_by_date %>%
|
||||
filter(count > 50) %>%
|
||||
arrange(desc(count))
|
||||
```
|
||||
|
||||
# Plot duration of boxes being active {.tabset}
|
||||
While we are looking at `locationtimestamp` and `lastMeasurement`, we can also extract the duration of activity
|
||||
of each box, and look at metrics by exposure and grouptag once more:
|
||||
|
||||
## ...by exposure
|
||||
```{r exposure_duration, message=FALSE}
|
||||
durations = boxes %>%
|
||||
group_by(exposure) %>%
|
||||
filter(!is.na(lastMeasurement)) %>%
|
||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
||||
filter(duration >= 0)
|
||||
|
||||
ggplot(durations, aes(x = exposure, y = duration)) +
|
||||
geom_boxplot() +
|
||||
coord_flip() + ylab('Duration active in Days')
|
||||
```
|
||||
|
||||
The time of activity averages at only `r round(mean(durations$duration))` days,
|
||||
though there are boxes with `r round(max(durations$duration))` days of activity,
|
||||
spanning a large chunk of openSenseMap's existence.
|
||||
|
||||
## ...by grouptag
|
||||
```{r grouptag_duration, message=FALSE}
|
||||
durations = boxes %>%
|
||||
filter(!is.na(lastMeasurement)) %>%
|
||||
group_by(grouptag) %>%
|
||||
# only include grouptags with 20 or more members
|
||||
filter(length(grouptag) >= 15 & !is.na(grouptag) & !is.na(lastMeasurement)) %>%
|
||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
||||
filter(duration >= 0)
|
||||
|
||||
ggplot(durations, aes(x = grouptag, y = duration)) +
|
||||
geom_boxplot() +
|
||||
coord_flip() + ylab('Duration active in Days')
|
||||
|
||||
durations %>%
|
||||
summarize(
|
||||
duration_avg = round(mean(duration)),
|
||||
duration_min = round(min(duration)),
|
||||
duration_max = round(max(duration)),
|
||||
oldest_box = round(max(difftime(now(), locationtimestamp, units='days')))
|
||||
) %>%
|
||||
arrange(desc(duration_avg))
|
||||
```
|
||||
|
||||
The time of activity averages at only `r round(mean(durations$duration))` days,
|
||||
though there are boxes with `r round(max(durations$duration))` days of activity,
|
||||
spanning a large chunk of openSenseMap's existence.
|
||||
|
||||
## ...by year of registration
|
||||
This is less useful, as older boxes are active for a longer time by definition.
|
||||
If you have an idea how to compensate for that, please send a [Pull Request][PR]!
|
||||
|
||||
```{r year_duration, message=FALSE}
|
||||
# NOTE: boxes older than 2016 missing due to missing updatedAt in database
|
||||
duration = boxes %>%
|
||||
mutate(year = cut(as.Date(locationtimestamp), breaks = 'year')) %>%
|
||||
group_by(year) %>%
|
||||
filter(!is.na(lastMeasurement)) %>%
|
||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
||||
filter(duration >= 0)
|
||||
|
||||
ggplot(duration, aes(x = substr(as.character(year), 0, 4), y = duration)) +
|
||||
geom_boxplot() +
|
||||
coord_flip() + ylab('Duration active in Days') + xlab('Year of Registration')
|
||||
```
|
||||
|
||||
# More Visualisations
|
||||
Other visualisations come to mind, and are left as an exercise to the reader.
|
||||
If you implemented some, feel free to add them to this vignette via a [Pull Request][PR].
|
||||
|
||||
* growth by phenomenon
|
||||
* growth by location -> (interactive) map
|
||||
* set inactive rate in relation to total box count
|
||||
* filter timespans with big dips in growth rate, and extrapolate the amount of
|
||||
senseBoxes that could be on the platform today, assuming there were no production issues ;)
|
||||
|
||||
[PR]: https://github.com/sensebox/opensensmapr/pulls
|
||||
|
||||
|
|
@ -18,7 +18,7 @@ knitr::opts_chunk$set(echo = TRUE)
|
|||
```
|
||||
|
||||
This package provides data ingestion functions for almost any data stored on the
|
||||
open data platform for environemental sensordata <https://opensensemap.org>.
|
||||
open data platform for environmental sensordata <https://opensensemap.org>.
|
||||
Its main goals are to provide means for:
|
||||
|
||||
- big data analysis of the measurements stored on the platform
|
||||
|
@ -28,11 +28,12 @@ Its main goals are to provide means for:
|
|||
Before we look at actual observations, lets get a grasp of the openSenseMap
|
||||
datasets' structure.
|
||||
|
||||
```{r results = F}
|
||||
```{r results = FALSE}
|
||||
library(magrittr)
|
||||
library(opensensmapr)
|
||||
|
||||
all_sensors = osem_boxes()
|
||||
# all_sensors = osem_boxes(cache = '.')
|
||||
all_sensors = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
||||
```
|
||||
```{r}
|
||||
summary(all_sensors)
|
||||
|
@ -47,11 +48,7 @@ couple of minutes ago.
|
|||
Another feature of interest is the spatial distribution of the boxes: `plot()`
|
||||
can help us out here. This function requires a bunch of optional dependencies though.
|
||||
|
||||
```{r message=F, warning=F}
|
||||
if (!require('maps')) install.packages('maps')
|
||||
if (!require('maptools')) install.packages('maptools')
|
||||
if (!require('rgeos')) install.packages('rgeos')
|
||||
|
||||
```{r, message=FALSE, warning=FALSE}
|
||||
plot(all_sensors)
|
||||
```
|
||||
|
||||
|
@ -81,7 +78,7 @@ We should check how many sensor stations provide useful data: We want only those
|
|||
boxes with a PM2.5 sensor, that are placed outdoors and are currently submitting
|
||||
measurements:
|
||||
|
||||
```{r results = F}
|
||||
```{r results = FALSE, eval=FALSE}
|
||||
pm25_sensors = osem_boxes(
|
||||
exposure = 'outdoor',
|
||||
date = Sys.time(), # ±4 hours
|
||||
|
@ -89,6 +86,8 @@ pm25_sensors = osem_boxes(
|
|||
)
|
||||
```
|
||||
```{r}
|
||||
pm25_sensors = readRDS('pm25_sensors.rds') # read precomputed file to save resources
|
||||
|
||||
summary(pm25_sensors)
|
||||
plot(pm25_sensors)
|
||||
```
|
||||
|
@ -97,16 +96,20 @@ Thats still more than 200 measuring stations, we can work with that.
|
|||
|
||||
### Analyzing sensor data
|
||||
Having analyzed the available data sources, let's finally get some measurements.
|
||||
We could call `osem_measurements(pm25_sensors)` now, however we are focussing on
|
||||
We could call `osem_measurements(pm25_sensors)` now, however we are focusing on
|
||||
a restricted area of interest, the city of Berlin.
|
||||
Luckily we can get the measurements filtered by a bounding box:
|
||||
|
||||
```{r}
|
||||
```{r, results=FALSE, message=FALSE}
|
||||
library(sf)
|
||||
library(units)
|
||||
library(lubridate)
|
||||
library(dplyr)
|
||||
|
||||
```
|
||||
|
||||
Since the API takes quite long to response measurements, especially filtered on space and time, we do not run the following chunks for publication of the package on CRAN.
|
||||
```{r bbox, results = FALSE, eval=FALSE}
|
||||
# construct a bounding box: 12 kilometers around Berlin
|
||||
berlin = st_point(c(13.4034, 52.5120)) %>%
|
||||
st_sfc(crs = 4326) %>%
|
||||
|
@ -114,24 +117,26 @@ berlin = st_point(c(13.4034, 52.5120)) %>%
|
|||
st_buffer(set_units(12, km)) %>%
|
||||
st_transform(4326) %>% # the opensensemap expects WGS 84
|
||||
st_bbox()
|
||||
```
|
||||
```{r results = F}
|
||||
pm25 = osem_measurements(
|
||||
berlin,
|
||||
phenomenon = 'PM2.5',
|
||||
from = now() - days(20), # defaults to 2 days
|
||||
from = now() - days(3), # defaults to 2 days
|
||||
to = now()
|
||||
)
|
||||
|
||||
```
|
||||
|
||||
```{r}
|
||||
pm25 = readRDS('pm25_berlin.rds') # read precomputed file to save resources
|
||||
plot(pm25)
|
||||
```
|
||||
|
||||
Now we can get started with actual spatiotemporal data analysis.
|
||||
First, lets mask the seemingly uncalibrated sensors:
|
||||
|
||||
```{r}
|
||||
```{r, warning=FALSE}
|
||||
outliers = filter(pm25, value > 100)$sensorId
|
||||
bad_sensors = outliers[, drop = T] %>% levels()
|
||||
bad_sensors = outliers[, drop = TRUE] %>% levels()
|
||||
|
||||
pm25 = mutate(pm25, invalid = sensorId %in% bad_sensors)
|
||||
```
|
||||
|
@ -139,7 +144,7 @@ pm25 = mutate(pm25, invalid = sensorId %in% bad_sensors)
|
|||
Then plot the measuring locations, flagging the outliers:
|
||||
|
||||
```{r}
|
||||
st_as_sf(pm25) %>% st_geometry() %>% plot(col = factor(pm25$invalid), axes = T)
|
||||
st_as_sf(pm25) %>% st_geometry() %>% plot(col = factor(pm25$invalid), axes = TRUE)
|
||||
```
|
||||
|
||||
Removing these sensors yields a nicer time series plot:
|
||||
|
|
|
@ -10,7 +10,7 @@ vignette: >
|
|||
---
|
||||
|
||||
It may be useful to download data from openSenseMap only once.
|
||||
For reproducible results, the data could be saved to disk, and reloaded at a
|
||||
For reproducible results, the data should be saved to disk, and reloaded at a
|
||||
later point.
|
||||
|
||||
This avoids..
|
||||
|
@ -21,40 +21,49 @@ This avoids..
|
|||
- stress on the openSenseMap-server.
|
||||
|
||||
This vignette shows how to use this built in `opensensmapr` feature, and
|
||||
how to do it yourself, if you want to store to other data formats.
|
||||
how to do it yourself in case you want to save to other data formats.
|
||||
|
||||
## Using openSensMapr Caching Feature
|
||||
```{r setup, results='hide'}
|
||||
# this vignette requires:
|
||||
library(opensensmapr)
|
||||
library(jsonlite)
|
||||
library(readr)
|
||||
```
|
||||
|
||||
## Using the opensensmapr Caching Feature
|
||||
All data retrieval functions of `opensensmapr` have a built in caching feature,
|
||||
which serializes an API response to disk.
|
||||
Subsequent identical requests will then return the serialized data instead of making
|
||||
another request.
|
||||
To do so, each request is given a unique ID based on its parameters.
|
||||
|
||||
To use this feature, just add a path to a directory to the `cache` parameter:
|
||||
```{r cache}
|
||||
b = osem_boxes(cache = tempdir())
|
||||
list.files(tempdir(), pattern = 'osemcache\\..*\\.rds')
|
||||
b = osem_boxes(grouptag = 'ifgi', cache = tempdir())
|
||||
|
||||
# the next identical request will hit the cache only!
|
||||
b = osem_boxes(cache = tempdir())
|
||||
b = osem_boxes(grouptag = 'ifgi', cache = tempdir())
|
||||
|
||||
# requests without the cache parameter will still be performed normally
|
||||
b = osem_boxes()
|
||||
b = osem_boxes(grouptag = 'ifgi')
|
||||
```
|
||||
|
||||
You can maintain multiple caches simultaneously which allows to store only
|
||||
serialized data related to a script in its directory:
|
||||
Looking at the cache directory we can see one file for each request, which is identified through a hash of the request URL:
|
||||
```{r cachelisting}
|
||||
list.files(tempdir(), pattern = 'osemcache\\..*\\.rds')
|
||||
```
|
||||
|
||||
You can maintain multiple caches simultaneously which allows to only store data related to a script in the same directory:
|
||||
```{r cache_custom}
|
||||
cacheDir = getwd() # current working directory
|
||||
b = osem_boxes(cache = cacheDir)
|
||||
b = osem_boxes(grouptag = 'ifgi', cache = cacheDir)
|
||||
|
||||
# the next identical request will hit the cache only!
|
||||
b = osem_boxes(cache = cacheDir)
|
||||
b = osem_boxes(grouptag = 'ifgi', cache = cacheDir)
|
||||
```
|
||||
|
||||
To get fresh results again, just call `osem_clear_cache()` for the respective cache:
|
||||
```{r clearcache}
|
||||
osem_clear_cache() # clears default cache
|
||||
```{r clearcache, results='hide'}
|
||||
osem_clear_cache() # clears default cache
|
||||
osem_clear_cache(getwd()) # clears a custom cache
|
||||
```
|
||||
|
||||
|
@ -62,108 +71,36 @@ osem_clear_cache(getwd()) # clears a custom cache
|
|||
If you want to roll your own serialization method to support custom data formats,
|
||||
here's how:
|
||||
|
||||
```{r setup, results='hide'}
|
||||
# this section requires:
|
||||
library(opensensmapr)
|
||||
library(jsonlite)
|
||||
library(readr)
|
||||
|
||||
```{r data, results='hide', eval=FALSE}
|
||||
# first get our example data:
|
||||
boxes = osem_boxes(grouptag = 'ifgi')
|
||||
measurements = osem_measurements(boxes, phenomenon = 'PM10')
|
||||
measurements = osem_measurements('Windgeschwindigkeit')
|
||||
```
|
||||
|
||||
If you are paranoid and worry about `.rds` files not being decodable anymore
|
||||
in the (distant) future, you could serialize to a plain text format such as JSON.
|
||||
This of course comes at the cost of storage space and performance.
|
||||
```{r serialize_json}
|
||||
```{r serialize_json, eval=FALSE}
|
||||
# serializing senseBoxes to JSON, and loading from file again:
|
||||
write(jsonlite::serializeJSON(measurements), 'boxes.json')
|
||||
boxes_from_file = jsonlite::unserializeJSON(readr::read_file('boxes.json'))
|
||||
write(jsonlite::serializeJSON(measurements), 'measurements.json')
|
||||
measurements_from_file = jsonlite::unserializeJSON(readr::read_file('measurements.json'))
|
||||
class(measurements_from_file)
|
||||
```
|
||||
|
||||
Both methods also persist the R object metadata (classes, attributes).
|
||||
This method also persists the R object metadata (classes, attributes).
|
||||
If you were to use a serialization method that can't persist object metadata, you
|
||||
could re-apply it with the following functions:
|
||||
|
||||
```{r serialize_attrs}
|
||||
# note the toJSON call
|
||||
write(jsonlite::toJSON(measurements), 'boxes_bad.json')
|
||||
boxes_without_attrs = jsonlite::fromJSON('boxes_bad.json')
|
||||
```{r serialize_attrs, eval=FALSE}
|
||||
# note the toJSON call instead of serializeJSON
|
||||
write(jsonlite::toJSON(measurements), 'measurements_bad.json')
|
||||
measurements_without_attrs = jsonlite::fromJSON('measurements_bad.json')
|
||||
class(measurements_without_attrs)
|
||||
|
||||
boxes_with_attrs = osem_as_sensebox(boxes_without_attrs)
|
||||
class(boxes_with_attrs)
|
||||
measurements_with_attrs = osem_as_measurements(measurements_without_attrs)
|
||||
class(measurements_with_attrs)
|
||||
```
|
||||
The same goes for measurements via `osem_as_measurements()`.
|
||||
The same goes for boxes via `osem_as_sensebox()`.
|
||||
|
||||
## Workflow for reproducible code
|
||||
For truly reproducible code you want it to work and return the same results --
|
||||
no matter if you run it the first time or a consecutive time, and without making
|
||||
changes to it.
|
||||
|
||||
Therefore we need a wrapper around the save-to-file & load-from-file logic.
|
||||
The following examples show a way to do just that, and where inspired by
|
||||
[this reproducible analysis by Daniel Nuest](https://github.com/nuest/sensebox-binder).
|
||||
|
||||
```{r osem_offline}
|
||||
# offline logic
|
||||
osem_offline = function (func, file, format='rds', ...) {
|
||||
# deserialize if file exists, otherwise download and serialize
|
||||
if (file.exists(file)) {
|
||||
if (format == 'json')
|
||||
jsonlite::unserializeJSON(readr::read_file(file))
|
||||
else
|
||||
readRDS(file)
|
||||
} else {
|
||||
data = func(...)
|
||||
if (format == 'json')
|
||||
write(jsonlite::serializeJSON(data), file = file)
|
||||
else
|
||||
saveRDS(data, file)
|
||||
data
|
||||
}
|
||||
}
|
||||
|
||||
# wrappers for each download function
|
||||
osem_measurements_offline = function (file, ...) {
|
||||
osem_offline(opensensmapr::osem_measurements, file, ...)
|
||||
}
|
||||
osem_boxes_offline = function (file, ...) {
|
||||
osem_offline(opensensmapr::osem_boxes, file, ...)
|
||||
}
|
||||
osem_box_offline = function (file, ...) {
|
||||
osem_offline(opensensmapr::osem_box, file, ...)
|
||||
}
|
||||
osem_counts_offline = function (file, ...) {
|
||||
osem_offline(opensensmapr::osem_counts, file, ...)
|
||||
}
|
||||
```{r cleanup, include=FALSE, eval=FALSE}
|
||||
file.remove('measurements.json', 'measurements_bad.json')
|
||||
```
|
||||
|
||||
Thats it! Now let's try it out:
|
||||
|
||||
```{r test}
|
||||
# first run; will download and save to disk
|
||||
b1 = osem_boxes_offline('mobileboxes.rds', exposure='mobile')
|
||||
|
||||
# consecutive runs; will read from disk
|
||||
b2 = osem_boxes_offline('mobileboxes.rds', exposure='mobile')
|
||||
class(b1) == class(b2)
|
||||
|
||||
# we can even omit the arguments now (though thats not really the point here)
|
||||
b3 = osem_boxes_offline('mobileboxes.rds')
|
||||
nrow(b1) == nrow(b3)
|
||||
|
||||
# verify that the custom sensebox methods are still working
|
||||
summary(b2)
|
||||
plot(b3)
|
||||
```
|
||||
|
||||
To re-download the data, just clear the files that were created in the process:
|
||||
```{r cleanup, results='hide'}
|
||||
file.remove('mobileboxes.rds', 'boxes_bad.json', 'boxes.json', 'measurements.rds')
|
||||
```
|
||||
|
||||
A possible extension to this scheme comes to mind: Omit the specification of a
|
||||
filename, and assign a unique ID to the request instead.
|
||||
For example, one could calculate the SHA-1 hash of the parameters, and use it
|
||||
as filename.
|
||||
|
|
BIN
vignettes/pm25_berlin.rds
Normal file
BIN
vignettes/pm25_berlin.rds
Normal file
Binary file not shown.
BIN
vignettes/pm25_sensors.rds
Normal file
BIN
vignettes/pm25_sensors.rds
Normal file
Binary file not shown.
Loading…
Add table
Reference in a new issue