mirror of
https://github.com/sensebox/opensensmapr
synced 2025-04-08 16:00:29 +02:00
Compare commits
No commits in common. "master" and "v0.4.0" have entirely different histories.
67 changed files with 1799 additions and 7358 deletions
|
@ -1,11 +1,9 @@
|
||||||
^.*\.Rproj$
|
^.*\.Rproj$
|
||||||
^\.Rproj\.user$
|
^\.Rproj\.user$
|
||||||
|
^CHANGES\.md$
|
||||||
^tools*$
|
^tools*$
|
||||||
^\.travis\.yml$
|
^\.travis\.yml$
|
||||||
^appveyor\.yml$
|
^appveyor\.yml$
|
||||||
^CONDUCT\.md$
|
^CONDUCT\.md$
|
||||||
^codecov\.yml$
|
^codecov\.yml$
|
||||||
^\.lintr$
|
^\.lintr$
|
||||||
^opensensmapr_.*\.tar\.gz$
|
|
||||||
^cran-comments\.md$
|
|
||||||
^CRAN-SUBMISSION$
|
|
||||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -5,6 +5,5 @@
|
||||||
.Ruserdata
|
.Ruserdata
|
||||||
*.Rcheck
|
*.Rcheck
|
||||||
*.log
|
*.log
|
||||||
cran-comments.md
|
|
||||||
|
|
||||||
opensensmapr_*.tar.gz
|
opensensmapr_*.tar.gz
|
||||||
|
|
2
.lintr
2
.lintr
|
@ -1,4 +1,4 @@
|
||||||
exclusions: list.files(path = 'inst/doc', full.names = TRUE)
|
exclusions: list('inst/doc/osem-intro.R')
|
||||||
linters: with_defaults(
|
linters: with_defaults(
|
||||||
# we use snake case
|
# we use snake case
|
||||||
camel_case_linter = NULL,
|
camel_case_linter = NULL,
|
||||||
|
|
25
.travis.yml
25
.travis.yml
|
@ -3,12 +3,18 @@
|
||||||
language: R
|
language: R
|
||||||
sudo: false
|
sudo: false
|
||||||
cache: packages
|
cache: packages
|
||||||
warnings_are_errors: true
|
|
||||||
|
r:
|
||||||
|
- release
|
||||||
|
- devel
|
||||||
|
|
||||||
r_github_packages:
|
r_github_packages:
|
||||||
- r-lib/covr
|
- r-lib/covr
|
||||||
- jimhester/lintr
|
- jimhester/lintr
|
||||||
|
|
||||||
|
r_build_args: "--no-build-vignettes"
|
||||||
|
r_check_args: "--as-cran --no-vignettes"
|
||||||
|
|
||||||
before_install:
|
before_install:
|
||||||
- sudo add-apt-repository ppa:ubuntugis/ubuntugis-unstable --yes
|
- sudo add-apt-repository ppa:ubuntugis/ubuntugis-unstable --yes
|
||||||
- sudo apt-get --yes --force-yes update -qq
|
- sudo apt-get --yes --force-yes update -qq
|
||||||
|
@ -20,20 +26,3 @@ before_install:
|
||||||
after_success:
|
after_success:
|
||||||
- Rscript -e 'covr::codecov()'
|
- Rscript -e 'covr::codecov()'
|
||||||
- Rscript -e 'lintr::lint_package()'
|
- Rscript -e 'lintr::lint_package()'
|
||||||
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
# fast build
|
|
||||||
- r: devel
|
|
||||||
r_build_args: "--no-build-vignettes"
|
|
||||||
r_check_args: "--no-vignettes --no-manual"
|
|
||||||
env: NOT_CRAN=true
|
|
||||||
|
|
||||||
# strict builds
|
|
||||||
- r: devel
|
|
||||||
r_check_args: "--as-cran"
|
|
||||||
env: NOT_CRAN=false
|
|
||||||
- r: release
|
|
||||||
r_check_args: "--as-cran"
|
|
||||||
env: NOT_CRAN=false
|
|
||||||
|
|
||||||
|
|
|
@ -1,44 +1,6 @@
|
||||||
# opensensmapr changelog
|
# opensensmapr changelog
|
||||||
This project does its best to adhere to semantic versioning.
|
This project does its best to adhere to semantic versioning.
|
||||||
|
|
||||||
### 2023-03-06: v0.6.0
|
|
||||||
- fix package bugs to pass CRAN tests after 4 years of maintenance break
|
|
||||||
- updated hyperlinks
|
|
||||||
- don't throw error for empty sensors
|
|
||||||
- updated tests
|
|
||||||
- updated maintainer
|
|
||||||
- updated vignettes
|
|
||||||
- use precomputed data to create vignettes
|
|
||||||
- change archive url to 'https://archive.opensensemap.org/' and checking its availability before requesting data
|
|
||||||
- new features:
|
|
||||||
- added param bbox for osem_boxes function
|
|
||||||
- support of multiple grouptags
|
|
||||||
|
|
||||||
### 2019-02-09: v0.5.1
|
|
||||||
- fix package to work with API v6
|
|
||||||
- box$lastMeasurement may be missing now for long inactive boxes
|
|
||||||
- add tests
|
|
||||||
|
|
||||||
### 2018-10-20: v0.5.0
|
|
||||||
- fix dynamic method export
|
|
||||||
- add `osem_measurements_archive()` to fetch measurements from the archive (#23)
|
|
||||||
- add `box$sensors` containing a data.frame with sensor metadata
|
|
||||||
- add sensor-IDs to `box$phenomena`
|
|
||||||
|
|
||||||
### 2018-09-21: v0.4.3
|
|
||||||
- dynamically export S3 methods of foreign generics
|
|
||||||
for compatibility with upcoming R 3.6.0
|
|
||||||
- add `readr` as default dependency
|
|
||||||
|
|
||||||
### 2018-09-05: v0.4.2
|
|
||||||
- move to sensebox GitHub organization
|
|
||||||
- pass ... to plot.sensebox()
|
|
||||||
|
|
||||||
### 2018-06-07: v0.4.1
|
|
||||||
- fix `osem_as_measurements()` returning wrong classes
|
|
||||||
- improve vignettes
|
|
||||||
- be on CRAN eventually.. hopefully??
|
|
||||||
|
|
||||||
### 2018-05-25: v0.4.0
|
### 2018-05-25: v0.4.0
|
||||||
- add caching feature for requests; see vignette osem-serialization
|
- add caching feature for requests; see vignette osem-serialization
|
||||||
- add vignette osem-serialization
|
- add vignette osem-serialization
|
||||||
|
@ -76,7 +38,7 @@ This project does its best to adhere to semantic versioning.
|
||||||
### 2017-08-23: v0.2.0
|
### 2017-08-23: v0.2.0
|
||||||
- add auto paging for `osem_measurements()`, allowing data retrieval for arbitrary time intervals (#2)
|
- add auto paging for `osem_measurements()`, allowing data retrieval for arbitrary time intervals (#2)
|
||||||
- improve plots for `osem_measurements` & `sensebox` (#1)
|
- improve plots for `osem_measurements` & `sensebox` (#1)
|
||||||
- add `sensorId` & `unit` column to `get_measurements()` output by default
|
- add `sensorId` & `unit` colummn to `get_measurements()` output by default
|
||||||
- show download progress info, hide readr output
|
- show download progress info, hide readr output
|
||||||
- shorten vignette `osem-intro`
|
- shorten vignette `osem-intro`
|
||||||
|
|
27
CONDUCT.md
27
CONDUCT.md
|
@ -1,20 +1,25 @@
|
||||||
# Contributor Code of Conduct
|
# Contributor Code of Conduct
|
||||||
|
|
||||||
As contributors and maintainers of this project, we pledge to respect all people who
|
As contributors and maintainers of this project, we pledge to respect all people who
|
||||||
contribute through any means.
|
contribute through reporting issues, posting feature requests, updating documentation,
|
||||||
|
submitting pull requests or patches, and other activities.
|
||||||
|
|
||||||
We are committed to making participation in this project a harassment-free experience for
|
We are committed to making participation in this project a harassment-free experience for
|
||||||
everyone, regardless of their level of experience and personal or cultural traits.
|
everyone, regardless of level of experience, gender, gender identity and expression,
|
||||||
|
sexual orientation, disability, personal appearance, body size, race, ethnicity, age, or religion.
|
||||||
|
|
||||||
Examples of unacceptable behavior by participants include derogatory comments,
|
Examples of unacceptable behavior by participants include the use of sexual language or
|
||||||
personal attacks, and trolling, both in public or private.
|
imagery, derogatory comments or personal attacks, trolling, public or private harassment,
|
||||||
|
insults, or other unprofessional conduct.
|
||||||
|
|
||||||
Project maintainers have the right and responsibility to remove, edit, or reject any
|
Project maintainers have the right and responsibility to remove, edit, or reject comments,
|
||||||
contributions that are not aligned to this Code of Conduct. Project maintainers who
|
commits, code, wiki edits, issues, and other contributions that are not aligned to this
|
||||||
do not follow the Code of Conduct may be removed from the project team.
|
Code of Conduct. Project maintainers who do not follow the Code of Conduct may be removed
|
||||||
|
from the project team.
|
||||||
|
|
||||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
|
||||||
opening an issue or contacting one or more of the project maintainers.
|
opening an issue or contacting one or more of the project maintainers.
|
||||||
|
|
||||||
This Code of Conduct is adapted from the [Contributor Covenant version 1.0.0](http://contributor-covenant.org/version/1/0/0/).
|
This Code of Conduct is adapted from the Contributor Covenant
|
||||||
|
(http:contributor-covenant.org), version 1.0.0, available at
|
||||||
|
http://contributor-covenant.org/version/1/0/0/
|
||||||
|
|
24
DESCRIPTION
24
DESCRIPTION
|
@ -1,23 +1,18 @@
|
||||||
Package: opensensmapr
|
Package: opensensmapr
|
||||||
Type: Package
|
Type: Package
|
||||||
Title: Client for the Data API of 'openSenseMap.org'
|
Title: Client for the Data API of openSenseMap.org
|
||||||
Version: 0.6.0
|
Version: 0.4.0
|
||||||
URL: https://github.com/sensebox/opensensmapR
|
URL: http://github.com/noerw/opensensmapR
|
||||||
BugReports: https://github.com/sensebox/opensensmapR/issues
|
BugReports: http://github.com/noerw/opensensmapR/issues
|
||||||
Depends:
|
|
||||||
R (>= 3.5.0)
|
|
||||||
Imports:
|
Imports:
|
||||||
dplyr,
|
dplyr,
|
||||||
httr,
|
httr,
|
||||||
digest,
|
digest,
|
||||||
lazyeval,
|
|
||||||
readr,
|
|
||||||
purrr,
|
|
||||||
magrittr
|
magrittr
|
||||||
Suggests:
|
Suggests:
|
||||||
maps,
|
maps,
|
||||||
maptools,
|
maptools,
|
||||||
tibble,
|
readr,
|
||||||
rgeos,
|
rgeos,
|
||||||
sf,
|
sf,
|
||||||
knitr,
|
knitr,
|
||||||
|
@ -30,9 +25,8 @@ Suggests:
|
||||||
lintr,
|
lintr,
|
||||||
testthat,
|
testthat,
|
||||||
covr
|
covr
|
||||||
Authors@R: c(person("Norwin", "Roosen", role = c("aut"), email = "hello@nroo.de"),
|
Authors@R: c(person("Norwin", "Roosen", role = c("aut", "cre"), email = "hello@nroo.de"),
|
||||||
person("Daniel", "Nuest", role = c("ctb"), email = "daniel.nuest@uni-muenster.de", comment = c(ORCID = "0000-0003-2392-6140")),
|
person("Daniel", "Nuest", role = c("ctb"), email = "daniel.nuest@uni-muenster.de", comment = c(ORCID = "0000-0003-2392-6140")))
|
||||||
person("Jan", "Stenkamp", role = c("ctb", "cre"), email = "jan.stenkamp@uni-muenster.de"))
|
|
||||||
Description: Download environmental measurements and sensor station metadata
|
Description: Download environmental measurements and sensor station metadata
|
||||||
from the API of open data sensor web platform <https://opensensemap.org> for
|
from the API of open data sensor web platform <https://opensensemap.org> for
|
||||||
analysis in R.
|
analysis in R.
|
||||||
|
@ -41,8 +35,8 @@ Description: Download environmental measurements and sensor station metadata
|
||||||
phenomena.
|
phenomena.
|
||||||
The package aims to be compatible with 'sf' and the 'Tidyverse', and provides
|
The package aims to be compatible with 'sf' and the 'Tidyverse', and provides
|
||||||
several helper functions for data exploration and transformation.
|
several helper functions for data exploration and transformation.
|
||||||
License: GPL (>= 2)
|
License: GPL (>= 2) | file LICENSE
|
||||||
Encoding: UTF-8
|
Encoding: UTF-8
|
||||||
LazyData: true
|
LazyData: true
|
||||||
RoxygenNote: 7.2.3
|
RoxygenNote: 6.0.1
|
||||||
VignetteBuilder: knitr
|
VignetteBuilder: knitr
|
||||||
|
|
339
LICENSE
Normal file
339
LICENSE
Normal file
|
@ -0,0 +1,339 @@
|
||||||
|
GNU GENERAL PUBLIC LICENSE
|
||||||
|
Version 2, June 1991
|
||||||
|
|
||||||
|
Copyright (C) 1989, 1991 Free Software Foundation, Inc., <http://fsf.org/>
|
||||||
|
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The licenses for most software are designed to take away your
|
||||||
|
freedom to share and change it. By contrast, the GNU General Public
|
||||||
|
License is intended to guarantee your freedom to share and change free
|
||||||
|
software--to make sure the software is free for all its users. This
|
||||||
|
General Public License applies to most of the Free Software
|
||||||
|
Foundation's software and to any other program whose authors commit to
|
||||||
|
using it. (Some other Free Software Foundation software is covered by
|
||||||
|
the GNU Lesser General Public License instead.) You can apply it to
|
||||||
|
your programs, too.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
this service if you wish), that you receive source code or can get it
|
||||||
|
if you want it, that you can change the software or use pieces of it
|
||||||
|
in new free programs; and that you know you can do these things.
|
||||||
|
|
||||||
|
To protect your rights, we need to make restrictions that forbid
|
||||||
|
anyone to deny you these rights or to ask you to surrender the rights.
|
||||||
|
These restrictions translate to certain responsibilities for you if you
|
||||||
|
distribute copies of the software, or if you modify it.
|
||||||
|
|
||||||
|
For example, if you distribute copies of such a program, whether
|
||||||
|
gratis or for a fee, you must give the recipients all the rights that
|
||||||
|
you have. You must make sure that they, too, receive or can get the
|
||||||
|
source code. And you must show them these terms so they know their
|
||||||
|
rights.
|
||||||
|
|
||||||
|
We protect your rights with two steps: (1) copyright the software, and
|
||||||
|
(2) offer you this license which gives you legal permission to copy,
|
||||||
|
distribute and/or modify the software.
|
||||||
|
|
||||||
|
Also, for each author's protection and ours, we want to make certain
|
||||||
|
that everyone understands that there is no warranty for this free
|
||||||
|
software. If the software is modified by someone else and passed on, we
|
||||||
|
want its recipients to know that what they have is not the original, so
|
||||||
|
that any problems introduced by others will not reflect on the original
|
||||||
|
authors' reputations.
|
||||||
|
|
||||||
|
Finally, any free program is threatened constantly by software
|
||||||
|
patents. We wish to avoid the danger that redistributors of a free
|
||||||
|
program will individually obtain patent licenses, in effect making the
|
||||||
|
program proprietary. To prevent this, we have made it clear that any
|
||||||
|
patent must be licensed for everyone's free use or not licensed at all.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
GNU GENERAL PUBLIC LICENSE
|
||||||
|
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||||
|
|
||||||
|
0. This License applies to any program or other work which contains
|
||||||
|
a notice placed by the copyright holder saying it may be distributed
|
||||||
|
under the terms of this General Public License. The "Program", below,
|
||||||
|
refers to any such program or work, and a "work based on the Program"
|
||||||
|
means either the Program or any derivative work under copyright law:
|
||||||
|
that is to say, a work containing the Program or a portion of it,
|
||||||
|
either verbatim or with modifications and/or translated into another
|
||||||
|
language. (Hereinafter, translation is included without limitation in
|
||||||
|
the term "modification".) Each licensee is addressed as "you".
|
||||||
|
|
||||||
|
Activities other than copying, distribution and modification are not
|
||||||
|
covered by this License; they are outside its scope. The act of
|
||||||
|
running the Program is not restricted, and the output from the Program
|
||||||
|
is covered only if its contents constitute a work based on the
|
||||||
|
Program (independent of having been made by running the Program).
|
||||||
|
Whether that is true depends on what the Program does.
|
||||||
|
|
||||||
|
1. You may copy and distribute verbatim copies of the Program's
|
||||||
|
source code as you receive it, in any medium, provided that you
|
||||||
|
conspicuously and appropriately publish on each copy an appropriate
|
||||||
|
copyright notice and disclaimer of warranty; keep intact all the
|
||||||
|
notices that refer to this License and to the absence of any warranty;
|
||||||
|
and give any other recipients of the Program a copy of this License
|
||||||
|
along with the Program.
|
||||||
|
|
||||||
|
You may charge a fee for the physical act of transferring a copy, and
|
||||||
|
you may at your option offer warranty protection in exchange for a fee.
|
||||||
|
|
||||||
|
2. You may modify your copy or copies of the Program or any portion
|
||||||
|
of it, thus forming a work based on the Program, and copy and
|
||||||
|
distribute such modifications or work under the terms of Section 1
|
||||||
|
above, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) You must cause the modified files to carry prominent notices
|
||||||
|
stating that you changed the files and the date of any change.
|
||||||
|
|
||||||
|
b) You must cause any work that you distribute or publish, that in
|
||||||
|
whole or in part contains or is derived from the Program or any
|
||||||
|
part thereof, to be licensed as a whole at no charge to all third
|
||||||
|
parties under the terms of this License.
|
||||||
|
|
||||||
|
c) If the modified program normally reads commands interactively
|
||||||
|
when run, you must cause it, when started running for such
|
||||||
|
interactive use in the most ordinary way, to print or display an
|
||||||
|
announcement including an appropriate copyright notice and a
|
||||||
|
notice that there is no warranty (or else, saying that you provide
|
||||||
|
a warranty) and that users may redistribute the program under
|
||||||
|
these conditions, and telling the user how to view a copy of this
|
||||||
|
License. (Exception: if the Program itself is interactive but
|
||||||
|
does not normally print such an announcement, your work based on
|
||||||
|
the Program is not required to print an announcement.)
|
||||||
|
|
||||||
|
These requirements apply to the modified work as a whole. If
|
||||||
|
identifiable sections of that work are not derived from the Program,
|
||||||
|
and can be reasonably considered independent and separate works in
|
||||||
|
themselves, then this License, and its terms, do not apply to those
|
||||||
|
sections when you distribute them as separate works. But when you
|
||||||
|
distribute the same sections as part of a whole which is a work based
|
||||||
|
on the Program, the distribution of the whole must be on the terms of
|
||||||
|
this License, whose permissions for other licensees extend to the
|
||||||
|
entire whole, and thus to each and every part regardless of who wrote it.
|
||||||
|
|
||||||
|
Thus, it is not the intent of this section to claim rights or contest
|
||||||
|
your rights to work written entirely by you; rather, the intent is to
|
||||||
|
exercise the right to control the distribution of derivative or
|
||||||
|
collective works based on the Program.
|
||||||
|
|
||||||
|
In addition, mere aggregation of another work not based on the Program
|
||||||
|
with the Program (or with a work based on the Program) on a volume of
|
||||||
|
a storage or distribution medium does not bring the other work under
|
||||||
|
the scope of this License.
|
||||||
|
|
||||||
|
3. You may copy and distribute the Program (or a work based on it,
|
||||||
|
under Section 2) in object code or executable form under the terms of
|
||||||
|
Sections 1 and 2 above provided that you also do one of the following:
|
||||||
|
|
||||||
|
a) Accompany it with the complete corresponding machine-readable
|
||||||
|
source code, which must be distributed under the terms of Sections
|
||||||
|
1 and 2 above on a medium customarily used for software interchange; or,
|
||||||
|
|
||||||
|
b) Accompany it with a written offer, valid for at least three
|
||||||
|
years, to give any third party, for a charge no more than your
|
||||||
|
cost of physically performing source distribution, a complete
|
||||||
|
machine-readable copy of the corresponding source code, to be
|
||||||
|
distributed under the terms of Sections 1 and 2 above on a medium
|
||||||
|
customarily used for software interchange; or,
|
||||||
|
|
||||||
|
c) Accompany it with the information you received as to the offer
|
||||||
|
to distribute corresponding source code. (This alternative is
|
||||||
|
allowed only for noncommercial distribution and only if you
|
||||||
|
received the program in object code or executable form with such
|
||||||
|
an offer, in accord with Subsection b above.)
|
||||||
|
|
||||||
|
The source code for a work means the preferred form of the work for
|
||||||
|
making modifications to it. For an executable work, complete source
|
||||||
|
code means all the source code for all modules it contains, plus any
|
||||||
|
associated interface definition files, plus the scripts used to
|
||||||
|
control compilation and installation of the executable. However, as a
|
||||||
|
special exception, the source code distributed need not include
|
||||||
|
anything that is normally distributed (in either source or binary
|
||||||
|
form) with the major components (compiler, kernel, and so on) of the
|
||||||
|
operating system on which the executable runs, unless that component
|
||||||
|
itself accompanies the executable.
|
||||||
|
|
||||||
|
If distribution of executable or object code is made by offering
|
||||||
|
access to copy from a designated place, then offering equivalent
|
||||||
|
access to copy the source code from the same place counts as
|
||||||
|
distribution of the source code, even though third parties are not
|
||||||
|
compelled to copy the source along with the object code.
|
||||||
|
|
||||||
|
4. You may not copy, modify, sublicense, or distribute the Program
|
||||||
|
except as expressly provided under this License. Any attempt
|
||||||
|
otherwise to copy, modify, sublicense or distribute the Program is
|
||||||
|
void, and will automatically terminate your rights under this License.
|
||||||
|
However, parties who have received copies, or rights, from you under
|
||||||
|
this License will not have their licenses terminated so long as such
|
||||||
|
parties remain in full compliance.
|
||||||
|
|
||||||
|
5. You are not required to accept this License, since you have not
|
||||||
|
signed it. However, nothing else grants you permission to modify or
|
||||||
|
distribute the Program or its derivative works. These actions are
|
||||||
|
prohibited by law if you do not accept this License. Therefore, by
|
||||||
|
modifying or distributing the Program (or any work based on the
|
||||||
|
Program), you indicate your acceptance of this License to do so, and
|
||||||
|
all its terms and conditions for copying, distributing or modifying
|
||||||
|
the Program or works based on it.
|
||||||
|
|
||||||
|
6. Each time you redistribute the Program (or any work based on the
|
||||||
|
Program), the recipient automatically receives a license from the
|
||||||
|
original licensor to copy, distribute or modify the Program subject to
|
||||||
|
these terms and conditions. You may not impose any further
|
||||||
|
restrictions on the recipients' exercise of the rights granted herein.
|
||||||
|
You are not responsible for enforcing compliance by third parties to
|
||||||
|
this License.
|
||||||
|
|
||||||
|
7. If, as a consequence of a court judgment or allegation of patent
|
||||||
|
infringement or for any other reason (not limited to patent issues),
|
||||||
|
conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot
|
||||||
|
distribute so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you
|
||||||
|
may not distribute the Program at all. For example, if a patent
|
||||||
|
license would not permit royalty-free redistribution of the Program by
|
||||||
|
all those who receive copies directly or indirectly through you, then
|
||||||
|
the only way you could satisfy both it and this License would be to
|
||||||
|
refrain entirely from distribution of the Program.
|
||||||
|
|
||||||
|
If any portion of this section is held invalid or unenforceable under
|
||||||
|
any particular circumstance, the balance of the section is intended to
|
||||||
|
apply and the section as a whole is intended to apply in other
|
||||||
|
circumstances.
|
||||||
|
|
||||||
|
It is not the purpose of this section to induce you to infringe any
|
||||||
|
patents or other property right claims or to contest validity of any
|
||||||
|
such claims; this section has the sole purpose of protecting the
|
||||||
|
integrity of the free software distribution system, which is
|
||||||
|
implemented by public license practices. Many people have made
|
||||||
|
generous contributions to the wide range of software distributed
|
||||||
|
through that system in reliance on consistent application of that
|
||||||
|
system; it is up to the author/donor to decide if he or she is willing
|
||||||
|
to distribute software through any other system and a licensee cannot
|
||||||
|
impose that choice.
|
||||||
|
|
||||||
|
This section is intended to make thoroughly clear what is believed to
|
||||||
|
be a consequence of the rest of this License.
|
||||||
|
|
||||||
|
8. If the distribution and/or use of the Program is restricted in
|
||||||
|
certain countries either by patents or by copyrighted interfaces, the
|
||||||
|
original copyright holder who places the Program under this License
|
||||||
|
may add an explicit geographical distribution limitation excluding
|
||||||
|
those countries, so that distribution is permitted only in or among
|
||||||
|
countries not thus excluded. In such case, this License incorporates
|
||||||
|
the limitation as if written in the body of this License.
|
||||||
|
|
||||||
|
9. The Free Software Foundation may publish revised and/or new versions
|
||||||
|
of the General Public License from time to time. Such new versions will
|
||||||
|
be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the Program
|
||||||
|
specifies a version number of this License which applies to it and "any
|
||||||
|
later version", you have the option of following the terms and conditions
|
||||||
|
either of that version or of any later version published by the Free
|
||||||
|
Software Foundation. If the Program does not specify a version number of
|
||||||
|
this License, you may choose any version ever published by the Free Software
|
||||||
|
Foundation.
|
||||||
|
|
||||||
|
10. If you wish to incorporate parts of the Program into other free
|
||||||
|
programs whose distribution conditions are different, write to the author
|
||||||
|
to ask for permission. For software which is copyrighted by the Free
|
||||||
|
Software Foundation, write to the Free Software Foundation; we sometimes
|
||||||
|
make exceptions for this. Our decision will be guided by the two goals
|
||||||
|
of preserving the free status of all derivatives of our free software and
|
||||||
|
of promoting the sharing and reuse of software generally.
|
||||||
|
|
||||||
|
NO WARRANTY
|
||||||
|
|
||||||
|
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
|
||||||
|
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
|
||||||
|
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
|
||||||
|
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
|
||||||
|
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
|
||||||
|
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
|
||||||
|
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
|
||||||
|
REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
|
||||||
|
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
|
||||||
|
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
|
||||||
|
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
|
||||||
|
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
|
||||||
|
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
|
||||||
|
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
|
||||||
|
POSSIBILITY OF SUCH DAMAGES.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
convey the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
{description}
|
||||||
|
Copyright (C) {year} {fullname}
|
||||||
|
|
||||||
|
This program is free software; you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation; either version 2 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License along
|
||||||
|
with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
|
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If the program is interactive, make it output a short notice like this
|
||||||
|
when it starts in an interactive mode:
|
||||||
|
|
||||||
|
Gnomovision version 69, Copyright (C) year name of author
|
||||||
|
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||||
|
This is free software, and you are welcome to redistribute it
|
||||||
|
under certain conditions; type `show c' for details.
|
||||||
|
|
||||||
|
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||||
|
parts of the General Public License. Of course, the commands you use may
|
||||||
|
be called something other than `show w' and `show c'; they could even be
|
||||||
|
mouse-clicks or menu items--whatever suits your program.
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or your
|
||||||
|
school, if any, to sign a "copyright disclaimer" for the program, if
|
||||||
|
necessary. Here is a sample; alter the names:
|
||||||
|
|
||||||
|
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
|
||||||
|
`Gnomovision' (which makes passes at compilers) written by James Hacker.
|
||||||
|
|
||||||
|
{signature of Ty Coon}, 1 April 1989
|
||||||
|
Ty Coon, President of Vice
|
||||||
|
|
||||||
|
This General Public License does not permit incorporating your program into
|
||||||
|
proprietary programs. If your program is a subroutine library, you may
|
||||||
|
consider it more useful to permit linking proprietary applications with the
|
||||||
|
library. If this is what you want to do, use the GNU Lesser General
|
||||||
|
Public License instead of this License.
|
|
@ -5,14 +5,16 @@ S3method("[",sensebox)
|
||||||
S3method(osem_measurements,bbox)
|
S3method(osem_measurements,bbox)
|
||||||
S3method(osem_measurements,default)
|
S3method(osem_measurements,default)
|
||||||
S3method(osem_measurements,sensebox)
|
S3method(osem_measurements,sensebox)
|
||||||
S3method(osem_measurements_archive,default)
|
|
||||||
S3method(osem_measurements_archive,sensebox)
|
|
||||||
S3method(osem_phenomena,sensebox)
|
S3method(osem_phenomena,sensebox)
|
||||||
S3method(plot,osem_measurements)
|
S3method(plot,osem_measurements)
|
||||||
S3method(plot,sensebox)
|
S3method(plot,sensebox)
|
||||||
S3method(print,osem_measurements)
|
S3method(print,osem_measurements)
|
||||||
S3method(print,sensebox)
|
S3method(print,sensebox)
|
||||||
S3method(summary,sensebox)
|
S3method(summary,sensebox)
|
||||||
|
export(filter.osem_measurements)
|
||||||
|
export(filter.sensebox)
|
||||||
|
export(mutate.osem_measurements)
|
||||||
|
export(mutate.sensebox)
|
||||||
export(osem_as_measurements)
|
export(osem_as_measurements)
|
||||||
export(osem_as_sensebox)
|
export(osem_as_sensebox)
|
||||||
export(osem_box)
|
export(osem_box)
|
||||||
|
@ -21,8 +23,9 @@ export(osem_clear_cache)
|
||||||
export(osem_counts)
|
export(osem_counts)
|
||||||
export(osem_endpoint)
|
export(osem_endpoint)
|
||||||
export(osem_measurements)
|
export(osem_measurements)
|
||||||
export(osem_measurements_archive)
|
|
||||||
export(osem_phenomena)
|
export(osem_phenomena)
|
||||||
|
export(st_as_sf.osem_measurements)
|
||||||
|
export(st_as_sf.sensebox)
|
||||||
importFrom(graphics,legend)
|
importFrom(graphics,legend)
|
||||||
importFrom(graphics,par)
|
importFrom(graphics,par)
|
||||||
importFrom(graphics,plot)
|
importFrom(graphics,plot)
|
||||||
|
|
25
R/00utils.R
25
R/00utils.R
|
@ -1,13 +1,13 @@
|
||||||
# parses from/to params for get_measurements_ and get_boxes_
|
# parses from/to params for get_measurements_ and get_boxes_
|
||||||
parse_dateparams = function (from, to) {
|
parse_dateparams = function (from, to) {
|
||||||
from = date_as_utc(from)
|
from = utc_date(from)
|
||||||
to = date_as_utc(to)
|
to = utc_date(to)
|
||||||
if (from - to > 0) stop('"from" must be earlier than "to"')
|
if (from - to > 0) stop('"from" must be earlier than "to"')
|
||||||
c(date_as_isostring(from), date_as_isostring(to))
|
c(date_as_isostring(from), date_as_isostring(to))
|
||||||
}
|
}
|
||||||
|
|
||||||
# NOTE: cannot handle mixed vectors of POSIXlt and POSIXct
|
# NOTE: cannot handle mixed vectors of POSIXlt and POSIXct
|
||||||
date_as_utc = function (date) {
|
utc_date = function (date) {
|
||||||
time = as.POSIXct(date)
|
time = as.POSIXct(date)
|
||||||
attr(time, 'tzone') = 'UTC'
|
attr(time, 'tzone') = 'UTC'
|
||||||
time
|
time
|
||||||
|
@ -16,7 +16,14 @@ date_as_utc = function (date) {
|
||||||
# NOTE: cannot handle mixed vectors of POSIXlt and POSIXct
|
# NOTE: cannot handle mixed vectors of POSIXlt and POSIXct
|
||||||
date_as_isostring = function (date) format.Date(date, format = '%FT%TZ')
|
date_as_isostring = function (date) format.Date(date, format = '%FT%TZ')
|
||||||
|
|
||||||
isostring_as_date = function (x) as.POSIXct(strptime(x, format = '%FT%T', tz = 'GMT'))
|
#' Simple factory function meant to implement dplyr functions for other classes,
|
||||||
|
#' which call an callback to attach the original class again after the fact.
|
||||||
|
#'
|
||||||
|
#' @param callback The function to call after the dplyr function
|
||||||
|
#' @noRd
|
||||||
|
dplyr_class_wrapper = function(callback) {
|
||||||
|
function(.data, ..., .dots) callback(NextMethod())
|
||||||
|
}
|
||||||
|
|
||||||
#' Checks for an interactive session using interactive() and a knitr process in
|
#' Checks for an interactive session using interactive() and a knitr process in
|
||||||
#' the callstack. See https://stackoverflow.com/a/33108841
|
#' the callstack. See https://stackoverflow.com/a/33108841
|
||||||
|
@ -26,13 +33,3 @@ is_non_interactive = function () {
|
||||||
ff = sapply(sys.calls(), function(f) as.character(f[1]))
|
ff = sapply(sys.calls(), function(f) as.character(f[1]))
|
||||||
any(ff %in% c('knit2html', 'render')) || !interactive()
|
any(ff %in% c('knit2html', 'render')) || !interactive()
|
||||||
}
|
}
|
||||||
|
|
||||||
#' custom recursive lapply with better handling of NULL values
|
|
||||||
#' from https://stackoverflow.com/a/38950304
|
|
||||||
#' @noRd
|
|
||||||
recursive_lapply = function(x, fn) {
|
|
||||||
if (is.list(x))
|
|
||||||
lapply(x, recursive_lapply, fn)
|
|
||||||
else
|
|
||||||
fn(x)
|
|
||||||
}
|
|
||||||
|
|
47
R/api.R
47
R/api.R
|
@ -4,32 +4,11 @@
|
||||||
# for CSV responses (get_measurements) the readr package is a hidden dependency
|
# for CSV responses (get_measurements) the readr package is a hidden dependency
|
||||||
# ==============================================================================
|
# ==============================================================================
|
||||||
|
|
||||||
default_api = 'https://api.opensensemap.org'
|
|
||||||
|
|
||||||
#' Get the default openSenseMap API endpoint
|
#' Get the default openSenseMap API endpoint
|
||||||
#' @export
|
#' @export
|
||||||
#' @return A character string with the HTTP URL of the openSenseMap API
|
#' @return A character string with the HTTP URL of the openSenseMap API
|
||||||
osem_endpoint = function() default_api
|
osem_endpoint = function() {
|
||||||
|
'https://api.opensensemap.org'
|
||||||
#' Check if the given openSenseMap API endpoint is available
|
|
||||||
#' @param endpoint The API base URL to check, defaulting to \code{\link{osem_endpoint}}
|
|
||||||
#' @return \code{TRUE} if the API is available, otherwise \code{stop()} is called.
|
|
||||||
osem_ensure_api_available = function(endpoint = osem_endpoint()) {
|
|
||||||
code = FALSE
|
|
||||||
try({
|
|
||||||
code = httr::status_code(httr::GET(endpoint, path='stats'))
|
|
||||||
}, silent = TRUE)
|
|
||||||
|
|
||||||
if (code == 200)
|
|
||||||
return(TRUE)
|
|
||||||
|
|
||||||
errtext = paste('The API at', endpoint, 'is currently not available.')
|
|
||||||
if (code != FALSE)
|
|
||||||
errtext = paste0(errtext, ' (HTTP code ', code, ')')
|
|
||||||
if (endpoint == default_api)
|
|
||||||
errtext = c(errtext, 'If the issue persists, please check back at https://status.sensebox.de/778247404 and notify support@sensebox.de')
|
|
||||||
stop(paste(errtext, collapse='\n '), call. = FALSE)
|
|
||||||
FALSE
|
|
||||||
}
|
}
|
||||||
|
|
||||||
get_boxes_ = function (..., endpoint) {
|
get_boxes_ = function (..., endpoint) {
|
||||||
|
@ -45,9 +24,8 @@ get_boxes_ = function (..., endpoint) {
|
||||||
df = dplyr::bind_rows(boxesList)
|
df = dplyr::bind_rows(boxesList)
|
||||||
df$exposure = df$exposure %>% as.factor()
|
df$exposure = df$exposure %>% as.factor()
|
||||||
df$model = df$model %>% as.factor()
|
df$model = df$model %>% as.factor()
|
||||||
if (!is.null(df$grouptag)){
|
if (!is.null(df$grouptag))
|
||||||
df$grouptag = df$grouptag %>% as.factor()
|
df$grouptag = df$grouptag %>% as.factor()
|
||||||
}
|
|
||||||
df
|
df
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -56,10 +34,12 @@ get_box_ = function (boxId, endpoint, ...) {
|
||||||
parse_senseboxdata()
|
parse_senseboxdata()
|
||||||
}
|
}
|
||||||
|
|
||||||
parse_measurement_csv = function (resText) {
|
get_measurements_ = function (..., endpoint) {
|
||||||
|
result = osem_get_resource(endpoint, c('boxes', 'data'), ..., type = 'text')
|
||||||
|
|
||||||
# parse the CSV response manually & mute readr
|
# parse the CSV response manually & mute readr
|
||||||
suppressWarnings({
|
suppressWarnings({
|
||||||
result = readr::read_csv(resText, col_types = readr::cols(
|
result = readr::read_csv(result, col_types = readr::cols(
|
||||||
# factor as default would raise issues with concatenation of multiple requests
|
# factor as default would raise issues with concatenation of multiple requests
|
||||||
.default = readr::col_character(),
|
.default = readr::col_character(),
|
||||||
createdAt = readr::col_datetime(),
|
createdAt = readr::col_datetime(),
|
||||||
|
@ -71,11 +51,6 @@ parse_measurement_csv = function (resText) {
|
||||||
})
|
})
|
||||||
|
|
||||||
osem_as_measurements(result)
|
osem_as_measurements(result)
|
||||||
}
|
|
||||||
|
|
||||||
get_measurements_ = function (..., endpoint) {
|
|
||||||
osem_get_resource(endpoint, c('boxes', 'data'), ..., type = 'text') %>%
|
|
||||||
parse_measurement_csv
|
|
||||||
}
|
}
|
||||||
|
|
||||||
get_stats_ = function (endpoint, cache) {
|
get_stats_ = function (endpoint, cache) {
|
||||||
|
@ -94,7 +69,7 @@ get_stats_ = function (endpoint, cache) {
|
||||||
#' @param cache Optional path to a directory were responses will be cached. If not NA, no requests will be made when a request for the given is already cached.
|
#' @param cache Optional path to a directory were responses will be cached. If not NA, no requests will be made when a request for the given is already cached.
|
||||||
#' @return Result of a Request to openSenseMap API
|
#' @return Result of a Request to openSenseMap API
|
||||||
#' @noRd
|
#' @noRd
|
||||||
osem_get_resource = function (host, path, ..., type = 'parsed', progress = TRUE, cache = NA) {
|
osem_get_resource = function (host, path, ..., type = 'parsed', progress = T, cache = NA) {
|
||||||
query = list(...)
|
query = list(...)
|
||||||
if (!is.na(cache)) {
|
if (!is.na(cache)) {
|
||||||
filename = osem_cache_filename(path, query, host) %>% paste(cache, ., sep = '/')
|
filename = osem_cache_filename(path, query, host) %>% paste(cache, ., sep = '/')
|
||||||
|
@ -121,12 +96,11 @@ osem_cache_filename = function (path, query = list(), host = osem_endpoint()) {
|
||||||
#'
|
#'
|
||||||
#' @export
|
#' @export
|
||||||
#' @examples
|
#' @examples
|
||||||
#' \dontrun{
|
#' \donttest{
|
||||||
#' osem_boxes(cache = tempdir())
|
#' osem_boxes(cache = tempdir())
|
||||||
#' osem_clear_cache()
|
#' osem_clear_cache()
|
||||||
#'
|
#'
|
||||||
#' cachedir = paste(getwd(), 'osemcache', sep = '/')
|
#' cachedir = paste(getwd(), 'osemcache', sep = '/')
|
||||||
#' dir.create(file.path(cachedir), showWarnings = FALSE)
|
|
||||||
#' osem_boxes(cache = cachedir)
|
#' osem_boxes(cache = cachedir)
|
||||||
#' osem_clear_cache(cachedir)
|
#' osem_clear_cache(cachedir)
|
||||||
#' }
|
#' }
|
||||||
|
@ -138,9 +112,6 @@ osem_clear_cache = function (location = tempdir()) {
|
||||||
}
|
}
|
||||||
|
|
||||||
osem_request_ = function (host, path, query = list(), type = 'parsed', progress = TRUE) {
|
osem_request_ = function (host, path, query = list(), type = 'parsed', progress = TRUE) {
|
||||||
# stop() if API is not available
|
|
||||||
osem_ensure_api_available(host)
|
|
||||||
|
|
||||||
progress = if (progress && !is_non_interactive()) httr::progress() else NULL
|
progress = if (progress && !is_non_interactive()) httr::progress() else NULL
|
||||||
res = httr::GET(host, progress, path = path, query = query)
|
res = httr::GET(host, progress, path = path, query = query)
|
||||||
|
|
||||||
|
|
173
R/archive.R
173
R/archive.R
|
@ -1,173 +0,0 @@
|
||||||
# client for archive.opensensemap.org
|
|
||||||
# in this archive, CSV files for measurements of each sensor per day is provided.
|
|
||||||
|
|
||||||
default_archive_url = 'https://archive.opensensemap.org/'
|
|
||||||
|
|
||||||
#' Returns the default endpoint for the archive *download*
|
|
||||||
#' While the front end domain is archive.opensensemap.org, file downloads
|
|
||||||
#' are provided via sciebo.
|
|
||||||
osem_archive_endpoint = function () default_archive_url
|
|
||||||
|
|
||||||
#' Fetch day-wise measurements for a single box from the openSenseMap archive.
|
|
||||||
#'
|
|
||||||
#' This function is significantly faster than \code{\link{osem_measurements}} for large
|
|
||||||
#' time-frames, as daily CSV dumps for each sensor from
|
|
||||||
#' \href{https://archive.opensensemap.org}{archive.opensensemap.org} are used.
|
|
||||||
#' Note that the latest data available is from the previous day.
|
|
||||||
#'
|
|
||||||
#' By default, data for all sensors of a box is fetched, but you can select a
|
|
||||||
#' subset with a \code{\link[dplyr]{dplyr}}-style NSE filter expression.
|
|
||||||
#'
|
|
||||||
#' The function will warn when no data is available in the selected period,
|
|
||||||
#' but continue the remaining download.
|
|
||||||
#'
|
|
||||||
#' @param x A `sensebox data.frame` of a single box, as retrieved via \code{\link{osem_box}},
|
|
||||||
#' to download measurements for.
|
|
||||||
#' @param ... see parameters below
|
|
||||||
#' @param fromDate Start date for measurement download, must be convertable via `as.Date`.
|
|
||||||
#' @param toDate End date for measurement download (inclusive).
|
|
||||||
#' @param sensorFilter A NSE formula matching to \code{x$sensors}, selecting a subset of sensors.
|
|
||||||
#' @param progress Whether to print download progress information, defaults to \code{TRUE}.
|
|
||||||
#' @return A \code{tbl_df} containing observations of all selected sensors for each time stamp.
|
|
||||||
#'
|
|
||||||
#' @seealso \href{https://archive.opensensemap.org}{openSenseMap archive}
|
|
||||||
#' @seealso \code{\link{osem_measurements}}
|
|
||||||
#' @seealso \code{\link{osem_box}}
|
|
||||||
#'
|
|
||||||
#' @export
|
|
||||||
osem_measurements_archive = function (x, ...) UseMethod('osem_measurements_archive')
|
|
||||||
|
|
||||||
#' @export
|
|
||||||
osem_measurements_archive.default = function (x, ...) {
|
|
||||||
# NOTE: to implement for a different class:
|
|
||||||
# in order to call `archive_fetch_measurements()`, `box` must be a dataframe
|
|
||||||
# with a single row and the columns `X_id` and `name`
|
|
||||||
stop(paste('not implemented for class', toString(class(x))))
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ==============================================================================
|
|
||||||
#
|
|
||||||
#' @describeIn osem_measurements_archive Get daywise measurements for one or more sensors of a single box.
|
|
||||||
#' @export
|
|
||||||
#' @examples
|
|
||||||
#' \donttest{
|
|
||||||
#' # fetch measurements for a single day
|
|
||||||
#' box = osem_box('593bcd656ccf3b0011791f5a')
|
|
||||||
#' m = osem_measurements_archive(box, as.POSIXlt('2018-09-13'))
|
|
||||||
#'
|
|
||||||
#' # fetch measurements for a date range and selected sensors
|
|
||||||
#' sensors = ~ phenomenon %in% c('Temperatur', 'Beleuchtungsstärke')
|
|
||||||
#' m = osem_measurements_archive(
|
|
||||||
#' box,
|
|
||||||
#' as.POSIXlt('2018-09-01'), as.POSIXlt('2018-09-30'),
|
|
||||||
#' sensorFilter = sensors
|
|
||||||
#' )
|
|
||||||
#' }
|
|
||||||
osem_measurements_archive.sensebox = function (x, fromDate, toDate = fromDate, sensorFilter = ~ TRUE, ..., progress = TRUE) {
|
|
||||||
if (nrow(x) != 1)
|
|
||||||
stop('this function only works for exactly one senseBox!')
|
|
||||||
|
|
||||||
# filter sensors using NSE, for example: `~ phenomenon == 'Temperatur'`
|
|
||||||
sensors = x$sensors[[1]] %>%
|
|
||||||
dplyr::filter(lazyeval::f_eval(sensorFilter, .))
|
|
||||||
|
|
||||||
# fetch each sensor separately
|
|
||||||
dfs = by(sensors, 1:nrow(sensors), function (sensor) {
|
|
||||||
df = archive_fetch_measurements(x, sensor$id, fromDate, toDate, progress) %>%
|
|
||||||
dplyr::select(createdAt, value) %>%
|
|
||||||
#dplyr::mutate(unit = sensor$unit, sensor = sensor$sensor) %>% # inject sensor metadata
|
|
||||||
dplyr::rename_at(., 'value', function(v) sensor$phenomenon)
|
|
||||||
})
|
|
||||||
|
|
||||||
# merge all data.frames by timestamp
|
|
||||||
dfs %>% purrr::reduce(dplyr::full_join, 'createdAt')
|
|
||||||
}
|
|
||||||
|
|
||||||
#' fetch measurements from archive from a single box, and a single sensor
|
|
||||||
#'
|
|
||||||
#' @param box A sensebox data.frame with a single box
|
|
||||||
#' @param sensorId Character specifying the sensor
|
|
||||||
#' @param fromDate Start date for measurement download, must be convertable via `as.Date`.
|
|
||||||
#' @param toDate End date for measurement download (inclusive).
|
|
||||||
#' @param progress whether to print progress
|
|
||||||
#' @return A \code{tbl_df} containing observations of all selected sensors for each time stamp.
|
|
||||||
archive_fetch_measurements = function (box, sensorId, fromDate, toDate, progress) {
|
|
||||||
osem_ensure_archive_available()
|
|
||||||
|
|
||||||
dates = list()
|
|
||||||
from = fromDate
|
|
||||||
while (from <= toDate) {
|
|
||||||
dates = append(dates, list(from))
|
|
||||||
from = from + as.difftime(1, units = 'days')
|
|
||||||
}
|
|
||||||
|
|
||||||
http_handle = httr::handle(osem_archive_endpoint()) # reuse the http connection for speed!
|
|
||||||
progress = if (progress && !is_non_interactive()) httr::progress() else NULL
|
|
||||||
|
|
||||||
measurements = lapply(dates, function(date) {
|
|
||||||
url = build_archive_url(date, box, sensorId)
|
|
||||||
res = httr::GET(url, progress, handle = http_handle)
|
|
||||||
|
|
||||||
if (httr::http_error(res)) {
|
|
||||||
warning(paste(
|
|
||||||
httr::status_code(res),
|
|
||||||
'on day', format.Date(date, '%F'),
|
|
||||||
'for sensor', sensorId
|
|
||||||
))
|
|
||||||
|
|
||||||
if (httr::status_code(res) == 404)
|
|
||||||
return(data.frame(createdAt = as.POSIXlt(x = integer(0), origin = date), value = double()))
|
|
||||||
}
|
|
||||||
|
|
||||||
measurements = httr::content(res, type = 'text', encoding = 'UTF-8') %>%
|
|
||||||
parse_measurement_csv
|
|
||||||
})
|
|
||||||
|
|
||||||
measurements %>% dplyr::bind_rows()
|
|
||||||
}
|
|
||||||
|
|
||||||
#' returns URL to fetch measurements from a sensor for a specific date,
|
|
||||||
#' based on `osem_archive_endpoint()`
|
|
||||||
#' @noRd
|
|
||||||
build_archive_url = function (date, box, sensorId) {
|
|
||||||
d = format.Date(date, '%F')
|
|
||||||
format = 'csv'
|
|
||||||
|
|
||||||
paste(
|
|
||||||
osem_archive_endpoint(),
|
|
||||||
d,
|
|
||||||
osem_box_to_archivename(box),
|
|
||||||
paste(paste(sensorId, d, sep = '-'), format, sep = '.'),
|
|
||||||
sep = '/'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#' replace chars in box name according to archive script:
|
|
||||||
#' https://github.com/sensebox/osem-archiver/blob/612e14b/helpers.sh#L66
|
|
||||||
#'
|
|
||||||
#' @param box A sensebox data.frame
|
|
||||||
#' @return character with archive identifier for each box
|
|
||||||
osem_box_to_archivename = function (box) {
|
|
||||||
name = gsub('[^A-Za-z0-9._-]', '_', box$name)
|
|
||||||
paste(box$X_id, name, sep = '-')
|
|
||||||
}
|
|
||||||
|
|
||||||
#' Check if the given openSenseMap archive endpoint is available
|
|
||||||
#' @param endpoint The archive base URL to check, defaulting to \code{\link{osem_archive_endpoint}}
|
|
||||||
#' @return \code{TRUE} if the archive is available, otherwise \code{stop()} is called.
|
|
||||||
osem_ensure_archive_available = function(endpoint = osem_archive_endpoint()) {
|
|
||||||
code = FALSE
|
|
||||||
try({
|
|
||||||
code = httr::status_code(httr::GET(endpoint))
|
|
||||||
}, silent = TRUE)
|
|
||||||
|
|
||||||
if (code == 200)
|
|
||||||
return(TRUE)
|
|
||||||
|
|
||||||
errtext = paste('The archive at', endpoint, 'is currently not available.')
|
|
||||||
if (code != FALSE)
|
|
||||||
errtext = paste0(errtext, ' (HTTP code ', code, ')')
|
|
||||||
stop(paste(errtext, collapse='\n '), call. = FALSE)
|
|
||||||
FALSE
|
|
||||||
}
|
|
74
R/box.R
74
R/box.R
|
@ -18,10 +18,6 @@
|
||||||
#' @param to Only return boxes that were measuring earlier than this time
|
#' @param to Only return boxes that were measuring earlier than this time
|
||||||
#' @param phenomenon Only return boxes that measured the given phenomenon in the
|
#' @param phenomenon Only return boxes that measured the given phenomenon in the
|
||||||
#' time interval as specified through \code{date} or \code{from / to}
|
#' time interval as specified through \code{date} or \code{from / to}
|
||||||
#' @param bbox Only return boxes that are within the given boundingbox,
|
|
||||||
#' vector of 4 WGS84 coordinates.
|
|
||||||
#' Order is: longitude southwest, latitude southwest, longitude northeast, latitude northeast.
|
|
||||||
#' Minimal and maximal values are: -180, 180 for longitude and -90, 90 for latitude.
|
|
||||||
#' @param endpoint The URL of the openSenseMap API instance
|
#' @param endpoint The URL of the openSenseMap API instance
|
||||||
#' @param progress Whether to print download progress information, defaults to \code{TRUE}
|
#' @param progress Whether to print download progress information, defaults to \code{TRUE}
|
||||||
#' @param cache Whether to cache the result, defaults to false.
|
#' @param cache Whether to cache the result, defaults to false.
|
||||||
|
@ -37,7 +33,7 @@
|
||||||
#' @export
|
#' @export
|
||||||
#' @examples
|
#' @examples
|
||||||
#'
|
#'
|
||||||
#' \dontrun{
|
#' \donttest{
|
||||||
#' # get *all* boxes available on the API
|
#' # get *all* boxes available on the API
|
||||||
#' b = osem_boxes()
|
#' b = osem_boxes()
|
||||||
#'
|
#'
|
||||||
|
@ -71,8 +67,7 @@
|
||||||
#' b = osem_boxes(progress = FALSE)
|
#' b = osem_boxes(progress = FALSE)
|
||||||
#' }
|
#' }
|
||||||
osem_boxes = function (exposure = NA, model = NA, grouptag = NA,
|
osem_boxes = function (exposure = NA, model = NA, grouptag = NA,
|
||||||
date = NA, from = NA, to = NA, phenomenon = NA,
|
date = NA, from = NA, to = NA, phenomenon = NA,
|
||||||
bbox = NA,
|
|
||||||
endpoint = osem_endpoint(),
|
endpoint = osem_endpoint(),
|
||||||
progress = TRUE,
|
progress = TRUE,
|
||||||
cache = NA) {
|
cache = NA) {
|
||||||
|
@ -98,13 +93,11 @@ osem_boxes = function (exposure = NA, model = NA, grouptag = NA,
|
||||||
if (!is.na(model)) query$model = model
|
if (!is.na(model)) query$model = model
|
||||||
if (!is.na(grouptag)) query$grouptag = grouptag
|
if (!is.na(grouptag)) query$grouptag = grouptag
|
||||||
if (!is.na(phenomenon)) query$phenomenon = phenomenon
|
if (!is.na(phenomenon)) query$phenomenon = phenomenon
|
||||||
if (all(!is.na(bbox))) query$bbox = paste(bbox, collapse = ', ')
|
|
||||||
|
|
||||||
if (!is.na(to) && !is.na(from))
|
if (!is.na(to) && !is.na(from))
|
||||||
query$date = parse_dateparams(from, to) %>% paste(collapse = ',')
|
query$date = parse_dateparams(from, to) %>% paste(collapse = ',')
|
||||||
else if (!is.na(date))
|
else if (!is.na(date))
|
||||||
query$date = date_as_utc(date) %>% date_as_isostring()
|
query$date = utc_date(date) %>% date_as_isostring()
|
||||||
|
|
||||||
|
|
||||||
do.call(get_boxes_, query)
|
do.call(get_boxes_, query)
|
||||||
}
|
}
|
||||||
|
@ -125,7 +118,7 @@ osem_boxes = function (exposure = NA, model = NA, grouptag = NA,
|
||||||
#' @seealso \code{\link{osem_clear_cache}}
|
#' @seealso \code{\link{osem_clear_cache}}
|
||||||
#' @export
|
#' @export
|
||||||
#' @examples
|
#' @examples
|
||||||
#' \dontrun{
|
#' \donttest{
|
||||||
#' # get a specific box by ID
|
#' # get a specific box by ID
|
||||||
#' b = osem_box('57000b8745fd40c8196ad04c')
|
#' b = osem_box('57000b8745fd40c8196ad04c')
|
||||||
#'
|
#'
|
||||||
|
@ -154,63 +147,32 @@ parse_senseboxdata = function (boxdata) {
|
||||||
# to allow a simple data.frame structure
|
# to allow a simple data.frame structure
|
||||||
sensors = boxdata$sensors
|
sensors = boxdata$sensors
|
||||||
location = boxdata$currentLocation
|
location = boxdata$currentLocation
|
||||||
lastMeasurement = boxdata$lastMeasurementAt # rename for backwards compat < 0.5.1
|
boxdata[c('loc', 'locations', 'currentLocation', 'sensors', 'image', 'boxType')] = NULL
|
||||||
grouptags = boxdata$grouptag
|
thebox = as.data.frame(boxdata, stringsAsFactors = F)
|
||||||
boxdata[c(
|
|
||||||
'loc', 'locations', 'currentLocation', 'sensors', 'image', 'boxType', 'lastMeasurementAt', 'grouptag'
|
|
||||||
)] = NULL
|
|
||||||
thebox = as.data.frame(boxdata, stringsAsFactors = FALSE)
|
|
||||||
|
|
||||||
# parse timestamps (updatedAt might be not defined)
|
# parse timestamps (updatedAt might be not defined)
|
||||||
thebox$createdAt = isostring_as_date(thebox$createdAt)
|
thebox$createdAt = as.POSIXct(strptime(thebox$createdAt, format = '%FT%T', tz = 'GMT'))
|
||||||
if (!is.null(thebox$updatedAt))
|
if (!is.null(thebox$updatedAt))
|
||||||
thebox$updatedAt = isostring_as_date(thebox$updatedAt)
|
thebox$updatedAt = as.POSIXct(strptime(thebox$updatedAt, format = '%FT%T', tz = 'GMT'))
|
||||||
if (!is.null(lastMeasurement))
|
|
||||||
thebox$lastMeasurement = isostring_as_date(lastMeasurement)
|
|
||||||
|
|
||||||
# add empty sensortype to sensors without type
|
|
||||||
if(!('sensorType' %in% names(sensors[[1]]))) {
|
|
||||||
sensors[[1]]$sensorType <- NA
|
|
||||||
}
|
|
||||||
|
|
||||||
# create a dataframe of sensors
|
|
||||||
thebox$sensors = sensors %>%
|
|
||||||
recursive_lapply(function (x) if (is.null(x)) NA else x) %>% # replace NULLs with NA
|
|
||||||
lapply(as.data.frame, stringsAsFactors = FALSE) %>%
|
|
||||||
dplyr::bind_rows(.) %>%
|
|
||||||
dplyr::select(phenomenon = title, id = X_id, unit, sensor = sensorType) %>%
|
|
||||||
list
|
|
||||||
|
|
||||||
# extract metadata from sensors
|
# extract metadata from sensors
|
||||||
thebox$phenomena = sensors %>%
|
thebox$phenomena = lapply(sensors, function(s) s$title) %>% unlist %>% list
|
||||||
stats::setNames(lapply(., function (s) s$`_id`)) %>%
|
|
||||||
lapply(function(s) s$title) %>%
|
# FIXME: if one sensor has NA, max() returns bullshit
|
||||||
unlist %>% list # convert to vector
|
get_last_measurement = function(s) {
|
||||||
|
if (!is.null(s$lastMeasurement))
|
||||||
|
as.POSIXct(strptime(s$lastMeasurement$createdAt, format = '%FT%T', tz = 'GMT'))
|
||||||
|
else
|
||||||
|
NA
|
||||||
|
}
|
||||||
|
thebox$lastMeasurement = max(lapply(sensors, get_last_measurement)[[1]])
|
||||||
|
|
||||||
# extract coordinates & transform to simple feature object
|
# extract coordinates & transform to simple feature object
|
||||||
thebox$lon = location$coordinates[[1]]
|
thebox$lon = location$coordinates[[1]]
|
||||||
thebox$lat = location$coordinates[[2]]
|
thebox$lat = location$coordinates[[2]]
|
||||||
thebox$locationtimestamp = isostring_as_date(location$timestamp)
|
|
||||||
if (length(location$coordinates) == 3)
|
if (length(location$coordinates) == 3)
|
||||||
thebox$height = location$coordinates[[3]]
|
thebox$height = location$coordinates[[3]]
|
||||||
|
|
||||||
# extract grouptag(s) from box
|
|
||||||
if (length(grouptags) == 0)
|
|
||||||
thebox$grouptag = NULL
|
|
||||||
if (length(grouptags) > 0) {
|
|
||||||
# if box does not have grouptag dont set attribute
|
|
||||||
if(grouptags[[1]] == '') {
|
|
||||||
thebox$grouptag = NULL
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
thebox$grouptag = grouptags[[1]]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (length(grouptags) > 1)
|
|
||||||
thebox$grouptag2 = grouptags[[2]]
|
|
||||||
if (length(grouptags) > 2)
|
|
||||||
thebox$grouptag3 = grouptags[[3]]
|
|
||||||
|
|
||||||
# attach a custom class for methods
|
# attach a custom class for methods
|
||||||
osem_as_sensebox(thebox)
|
osem_as_sensebox(thebox)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,14 @@
|
||||||
#' @export
|
#' @export
|
||||||
plot.sensebox = function (x, ..., mar = c(2, 2, 1, 1)) {
|
plot.sensebox = function (x, ..., mar = c(2, 2, 1, 1)) {
|
||||||
|
if (
|
||||||
|
!requireNamespace('sf', quietly = TRUE) ||
|
||||||
|
!requireNamespace('maps', quietly = TRUE) ||
|
||||||
|
!requireNamespace('maptools', quietly = TRUE) ||
|
||||||
|
!requireNamespace('rgeos', quietly = TRUE)
|
||||||
|
) {
|
||||||
|
stop('this functions requires additional packages. install them with
|
||||||
|
install.packages(c("sf", "maps", "maptools", "rgeos"))')
|
||||||
|
}
|
||||||
|
|
||||||
geom = x %>%
|
geom = x %>%
|
||||||
sf::st_as_sf() %>%
|
sf::st_as_sf() %>%
|
||||||
|
@ -11,12 +20,12 @@ plot.sensebox = function (x, ..., mar = c(2, 2, 1, 1)) {
|
||||||
sf::st_as_sf() %>%
|
sf::st_as_sf() %>%
|
||||||
sf::st_geometry()
|
sf::st_geometry()
|
||||||
|
|
||||||
oldpar <- par(no.readonly = TRUE)
|
oldpar = par()
|
||||||
on.exit(par(oldpar))
|
|
||||||
par(mar = mar)
|
par(mar = mar)
|
||||||
plot(world, col = 'gray', xlim = bbox[c(1, 3)], ylim = bbox[c(2, 4)], axes = TRUE, ...)
|
plot(world, col = 'gray', xlim = bbox[c(1, 3)], ylim = bbox[c(2, 4)], axes = T)
|
||||||
plot(geom, add = TRUE, col = x$exposure, ...)
|
plot(geom, add = T, col = x$exposure)
|
||||||
legend('left', legend = levels(x$exposure), col = 1:length(x$exposure), pch = 1)
|
legend('left', legend = levels(x$exposure), col = 1:length(x$exposure), pch = 1)
|
||||||
|
par(mar = oldpar$mar)
|
||||||
|
|
||||||
invisible(x)
|
invisible(x)
|
||||||
}
|
}
|
||||||
|
@ -30,14 +39,14 @@ print.sensebox = function(x, columns = c('name', 'exposure', 'lastMeasurement',
|
||||||
|
|
||||||
#' @export
|
#' @export
|
||||||
summary.sensebox = function(object, ...) {
|
summary.sensebox = function(object, ...) {
|
||||||
cat('boxes total:', nrow(object), fill = TRUE)
|
cat('boxes total:', nrow(object), fill = T)
|
||||||
cat('\nboxes by exposure:')
|
cat('\nboxes by exposure:')
|
||||||
table(object$exposure) %>% print()
|
table(object$exposure) %>% print()
|
||||||
cat('\nboxes by model:')
|
cat('\nboxes by model:')
|
||||||
table(object$model) %>% print()
|
table(object$model) %>% print()
|
||||||
cat('\n')
|
cat('\n')
|
||||||
|
|
||||||
diffNow = (date_as_utc(Sys.time()) - object$lastMeasurement) %>% as.numeric(unit = 'hours')
|
diffNow = (utc_date(Sys.time()) - object$lastMeasurement) %>% as.numeric(unit = 'hours')
|
||||||
list(
|
list(
|
||||||
'last_measurement_within' = c(
|
'last_measurement_within' = c(
|
||||||
'1h' = nrow(dplyr::filter(object, diffNow <= 1)),
|
'1h' = nrow(dplyr::filter(object, diffNow <= 1)),
|
||||||
|
@ -50,10 +59,10 @@ summary.sensebox = function(object, ...) {
|
||||||
|
|
||||||
oldest = object[object$createdAt == min(object$createdAt), ]
|
oldest = object[object$createdAt == min(object$createdAt), ]
|
||||||
newest = object[object$createdAt == max(object$createdAt), ]
|
newest = object[object$createdAt == max(object$createdAt), ]
|
||||||
cat('oldest box:', format(oldest$createdAt, '%F %T'), paste0('(', oldest$name, ')'), fill = TRUE)
|
cat('oldest box:', format(oldest$createdAt, '%F %T'), paste0('(', oldest$name, ')'), fill = T)
|
||||||
cat('newest box:', format(newest$createdAt, '%F %T'), paste0('(', newest$name, ')'), fill = TRUE)
|
cat('newest box:', format(newest$createdAt, '%F %T'), paste0('(', newest$name, ')'), fill = T)
|
||||||
|
|
||||||
cat('\nsensors per box:', fill = TRUE)
|
cat('\nsensors per box:', fill = T)
|
||||||
lapply(object$phenomena, length) %>%
|
lapply(object$phenomena, length) %>%
|
||||||
as.numeric() %>%
|
as.numeric() %>%
|
||||||
summary() %>%
|
summary() %>%
|
||||||
|
@ -62,12 +71,52 @@ summary.sensebox = function(object, ...) {
|
||||||
invisible(object)
|
invisible(object)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
#
|
||||||
#' Converts a foreign object to a sensebox data.frame.
|
#' Converts a foreign object to a sensebox data.frame.
|
||||||
#' @param x A data.frame to attach the class to
|
#' @param x A data.frame to attach the class to
|
||||||
#' @return data.frame of class \code{sensebox}
|
|
||||||
#' @export
|
#' @export
|
||||||
osem_as_sensebox = function(x) {
|
osem_as_sensebox = function(x) {
|
||||||
ret = as.data.frame(x)
|
ret = as.data.frame(x)
|
||||||
class(ret) = c('sensebox', class(x))
|
class(ret) = c('sensebox', class(x))
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#' Return rows with matching conditions, while maintaining class & attributes
|
||||||
|
#' @param .data A sensebox data.frame to filter
|
||||||
|
#' @param .dots see corresponding function in package \code{\link{dplyr}}
|
||||||
|
#' @param ... other arguments
|
||||||
|
#' @seealso \code{\link[dplyr]{filter}}
|
||||||
|
#' @export
|
||||||
|
filter.sensebox = dplyr_class_wrapper(osem_as_sensebox)
|
||||||
|
|
||||||
|
#' Add new variables to the data, while maintaining class & attributes
|
||||||
|
#' @param .data A sensebox data.frame to mutate
|
||||||
|
#' @param .dots see corresponding function in package \code{\link{dplyr}}
|
||||||
|
#' @param ... other arguments
|
||||||
|
#' @seealso \code{\link[dplyr]{mutate}}
|
||||||
|
#' @export
|
||||||
|
mutate.sensebox = dplyr_class_wrapper(osem_as_sensebox)
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
#
|
||||||
|
#' maintains class / attributes after subsetting
|
||||||
|
#' @noRd
|
||||||
|
#' @export
|
||||||
|
`[.sensebox` = function(x, i, ...) {
|
||||||
|
s = NextMethod('[')
|
||||||
|
mostattributes(s) = attributes(s)
|
||||||
|
s
|
||||||
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
#
|
||||||
|
#' Convert a \code{sensebox} dataframe to an \code{\link[sf]{st_sf}} object.
|
||||||
|
#'
|
||||||
|
#' @param x The object to convert
|
||||||
|
#' @param ... maybe more objects to convert
|
||||||
|
#' @return The object with an st_geometry column attached.
|
||||||
|
#' @export
|
||||||
|
st_as_sf.sensebox = function (x, ...) {
|
||||||
|
NextMethod(x, ..., coords = c('lon', 'lat'), crs = 4326)
|
||||||
|
}
|
||||||
|
|
|
@ -1,126 +0,0 @@
|
||||||
# helpers for the dplyr & co related functions
|
|
||||||
# also delayed method registration
|
|
||||||
#
|
|
||||||
# Methods for external generics (except when from `base`) should be registered,
|
|
||||||
# but not exported: see https://github.com/klutometis/roxygen/issues/796
|
|
||||||
# Until roxygen supports this usecase properly, we're using a different
|
|
||||||
# workaround than suggested, copied from edzer's sf package:
|
|
||||||
# dynamically register the methods only when the related package is loaded as well.
|
|
||||||
|
|
||||||
|
|
||||||
# ====================== base generics =========================
|
|
||||||
|
|
||||||
#' maintains class / attributes after subsetting
|
|
||||||
#' @noRd
|
|
||||||
#' @export
|
|
||||||
`[.sensebox` = function(x, i, ...) {
|
|
||||||
s = NextMethod('[')
|
|
||||||
mostattributes(s) = attributes(s)
|
|
||||||
s
|
|
||||||
}
|
|
||||||
|
|
||||||
#' maintains class / attributes after subsetting
|
|
||||||
#' @noRd
|
|
||||||
#' @export
|
|
||||||
`[.osem_measurements` = function(x, i, ...) {
|
|
||||||
s = NextMethod()
|
|
||||||
mostattributes(s) = attributes(x)
|
|
||||||
s
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# ====================== dplyr generics =========================
|
|
||||||
|
|
||||||
#' Simple factory function meant to implement dplyr functions for other classes,
|
|
||||||
#' which call an callback to attach the original class again after the fact.
|
|
||||||
#'
|
|
||||||
#' @param callback The function to call after the dplyr function
|
|
||||||
#' @noRd
|
|
||||||
dplyr_class_wrapper = function(callback) {
|
|
||||||
function(.data, ..., .dots) callback(NextMethod())
|
|
||||||
}
|
|
||||||
|
|
||||||
#' Return rows with matching conditions, while maintaining class & attributes
|
|
||||||
#' @param .data A sensebox data.frame to filter
|
|
||||||
#' @param .dots see corresponding function in package \code{\link{dplyr}}
|
|
||||||
#' @param ... other arguments
|
|
||||||
#' @seealso \code{\link[dplyr]{filter}}
|
|
||||||
filter.sensebox = dplyr_class_wrapper(osem_as_sensebox)
|
|
||||||
|
|
||||||
#' Add new variables to the data, while maintaining class & attributes
|
|
||||||
#' @param .data A sensebox data.frame to mutate
|
|
||||||
#' @param .dots see corresponding function in package \code{\link{dplyr}}
|
|
||||||
#' @param ... other arguments
|
|
||||||
#' @seealso \code{\link[dplyr]{mutate}}
|
|
||||||
mutate.sensebox = dplyr_class_wrapper(osem_as_sensebox)
|
|
||||||
|
|
||||||
#' Return rows with matching conditions, while maintaining class & attributes
|
|
||||||
#' @param .data A osem_measurements data.frame to filter
|
|
||||||
#' @param .dots see corresponding function in package \code{\link{dplyr}}
|
|
||||||
#' @param ... other arguments
|
|
||||||
#' @seealso \code{\link[dplyr]{filter}}
|
|
||||||
filter.osem_measurements = dplyr_class_wrapper(osem_as_measurements)
|
|
||||||
|
|
||||||
#' Add new variables to the data, while maintaining class & attributes
|
|
||||||
#' @param .data A osem_measurements data.frame to mutate
|
|
||||||
#' @param .dots see corresponding function in package \code{\link{dplyr}}
|
|
||||||
#' @param ... other arguments
|
|
||||||
#' @seealso \code{\link[dplyr]{mutate}}
|
|
||||||
mutate.osem_measurements = dplyr_class_wrapper(osem_as_measurements)
|
|
||||||
|
|
||||||
|
|
||||||
# ====================== sf generics =========================
|
|
||||||
|
|
||||||
#' Convert a \code{sensebox} dataframe to an \code{\link[sf]{st_sf}} object.
|
|
||||||
#'
|
|
||||||
#' @param x The object to convert
|
|
||||||
#' @param ... maybe more objects to convert
|
|
||||||
#' @return The object with an st_geometry column attached.
|
|
||||||
st_as_sf.sensebox = function (x, ...) {
|
|
||||||
NextMethod(x, ..., coords = c('lon', 'lat'), crs = 4326)
|
|
||||||
}
|
|
||||||
|
|
||||||
#' Convert a \code{osem_measurements} dataframe to an \code{\link[sf]{st_sf}} object.
|
|
||||||
#'
|
|
||||||
#' @param x The object to convert
|
|
||||||
#' @param ... maybe more objects to convert
|
|
||||||
#' @return The object with an st_geometry column attached.
|
|
||||||
st_as_sf.osem_measurements = function (x, ...) {
|
|
||||||
NextMethod(x, ..., coords = c('lon', 'lat'), crs = 4326)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# from: https://github.com/tidyverse/hms/blob/master/R/zzz.R
|
|
||||||
# Thu Apr 19 10:53:24 CEST 2018
|
|
||||||
register_s3_method <- function(pkg, generic, class, fun = NULL) {
|
|
||||||
stopifnot(is.character(pkg), length(pkg) == 1)
|
|
||||||
stopifnot(is.character(generic), length(generic) == 1)
|
|
||||||
stopifnot(is.character(class), length(class) == 1)
|
|
||||||
|
|
||||||
if (is.null(fun)) {
|
|
||||||
fun <- get(paste0(generic, ".", class), envir = parent.frame())
|
|
||||||
} else {
|
|
||||||
stopifnot(is.function(fun))
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pkg %in% loadedNamespaces()) {
|
|
||||||
registerS3method(generic, class, fun, envir = asNamespace(pkg))
|
|
||||||
}
|
|
||||||
|
|
||||||
# Always register hook in case package is later unloaded & reloaded
|
|
||||||
setHook(
|
|
||||||
packageEvent(pkg, "onLoad"),
|
|
||||||
function(...) {
|
|
||||||
registerS3method(generic, class, fun, envir = asNamespace(pkg))
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
.onLoad = function(libname, pkgname) {
|
|
||||||
register_s3_method('dplyr', 'filter', 'sensebox')
|
|
||||||
register_s3_method('dplyr', 'mutate', 'sensebox')
|
|
||||||
register_s3_method('dplyr', 'filter', 'osem_measurements')
|
|
||||||
register_s3_method('dplyr', 'mutate', 'osem_measurements')
|
|
||||||
register_s3_method('sf', 'st_as_sf', 'sensebox')
|
|
||||||
register_s3_method('sf', 'st_as_sf', 'osem_measurements')
|
|
||||||
}
|
|
|
@ -1,6 +1,6 @@
|
||||||
# ==============================================================================
|
# ==============================================================================
|
||||||
#
|
#
|
||||||
#' Fetch the Measurements of a Phenomenon on opensensemap.org
|
#' Get the Measurements of a Phenomenon on opensensemap.org
|
||||||
#'
|
#'
|
||||||
#' Measurements can be retrieved either for a set of boxes, or through a spatial
|
#' Measurements can be retrieved either for a set of boxes, or through a spatial
|
||||||
#' bounding box filter. To get all measurements, the \code{default} function applies
|
#' bounding box filter. To get all measurements, the \code{default} function applies
|
||||||
|
@ -39,7 +39,7 @@ osem_measurements = function (x, ...) UseMethod('osem_measurements')
|
||||||
#' @describeIn osem_measurements Get measurements from \strong{all} senseBoxes.
|
#' @describeIn osem_measurements Get measurements from \strong{all} senseBoxes.
|
||||||
#' @export
|
#' @export
|
||||||
#' @examples
|
#' @examples
|
||||||
#' \dontrun{
|
#' \donttest{
|
||||||
#' # get measurements from all boxes on the phenomenon 'PM10' from the last 48h
|
#' # get measurements from all boxes on the phenomenon 'PM10' from the last 48h
|
||||||
#' m = osem_measurements('PM10')
|
#' m = osem_measurements('PM10')
|
||||||
#'
|
#'
|
||||||
|
@ -72,7 +72,7 @@ osem_measurements.default = function (x, ...) {
|
||||||
#' @describeIn osem_measurements Get measurements by a spatial filter.
|
#' @describeIn osem_measurements Get measurements by a spatial filter.
|
||||||
#' @export
|
#' @export
|
||||||
#' @examples
|
#' @examples
|
||||||
#' \dontrun{
|
#' \donttest{
|
||||||
#' # get measurements from sensors within a custom WGS84 bounding box
|
#' # get measurements from sensors within a custom WGS84 bounding box
|
||||||
#' bbox = structure(c(7, 51, 8, 52), class = 'bbox')
|
#' bbox = structure(c(7, 51, 8, 52), class = 'bbox')
|
||||||
#' m = osem_measurements(bbox, 'Temperatur')
|
#' m = osem_measurements(bbox, 'Temperatur')
|
||||||
|
@ -80,7 +80,6 @@ osem_measurements.default = function (x, ...) {
|
||||||
#' # construct a bounding box 12km around berlin using the sf package,
|
#' # construct a bounding box 12km around berlin using the sf package,
|
||||||
#' # and get measurements from stations within that box
|
#' # and get measurements from stations within that box
|
||||||
#' library(sf)
|
#' library(sf)
|
||||||
#' library(units)
|
|
||||||
#' bbox2 = st_point(c(13.4034, 52.5120)) %>%
|
#' bbox2 = st_point(c(13.4034, 52.5120)) %>%
|
||||||
#' st_sfc(crs = 4326) %>%
|
#' st_sfc(crs = 4326) %>%
|
||||||
#' st_transform(3857) %>% # allow setting a buffer in meters
|
#' st_transform(3857) %>% # allow setting a buffer in meters
|
||||||
|
@ -99,7 +98,7 @@ osem_measurements.bbox = function (x, phenomenon, exposure = NA,
|
||||||
from = NA, to = NA, columns = NA,
|
from = NA, to = NA, columns = NA,
|
||||||
...,
|
...,
|
||||||
endpoint = osem_endpoint(),
|
endpoint = osem_endpoint(),
|
||||||
progress = TRUE,
|
progress = T,
|
||||||
cache = NA) {
|
cache = NA) {
|
||||||
bbox = x
|
bbox = x
|
||||||
environment() %>%
|
environment() %>%
|
||||||
|
@ -137,7 +136,7 @@ osem_measurements.sensebox = function (x, phenomenon, exposure = NA,
|
||||||
from = NA, to = NA, columns = NA,
|
from = NA, to = NA, columns = NA,
|
||||||
...,
|
...,
|
||||||
endpoint = osem_endpoint(),
|
endpoint = osem_endpoint(),
|
||||||
progress = TRUE,
|
progress = T,
|
||||||
cache = NA) {
|
cache = NA) {
|
||||||
boxes = x
|
boxes = x
|
||||||
environment() %>%
|
environment() %>%
|
||||||
|
@ -181,8 +180,8 @@ parse_get_measurements_params = function (params) {
|
||||||
|
|
||||||
if (!is.na(params$from) && !is.na(params$to)) {
|
if (!is.na(params$from) && !is.na(params$to)) {
|
||||||
parse_dateparams(params$from, params$to) # only for validation sideeffect
|
parse_dateparams(params$from, params$to) # only for validation sideeffect
|
||||||
query$`from-date` = date_as_utc(params$from)
|
query$`from-date` = utc_date(params$from)
|
||||||
query$`to-date` = date_as_utc(params$to)
|
query$`to-date` = utc_date(params$to)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!is.na(params$exposure)) query$exposure = params$exposure
|
if (!is.na(params$exposure)) query$exposure = params$exposure
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
#' @export
|
#' @export
|
||||||
plot.osem_measurements = function (x, ..., mar = c(2, 4, 1, 1)) {
|
plot.osem_measurements = function (x, ..., mar = c(2, 4, 1, 1)) {
|
||||||
oldpar <- par(no.readonly = TRUE)
|
oldpar = par()
|
||||||
on.exit(par(oldpar))
|
|
||||||
par(mar = mar)
|
par(mar = mar)
|
||||||
plot(value~createdAt, x, col = factor(x$sensorId), xlab = NA, ylab = x$unit[1], ...)
|
plot(value~createdAt, x, col = factor(x$sensorId), xlab = NA, ylab = x$unit[1], ...)
|
||||||
|
par(mar = oldpar$mar)
|
||||||
invisible(x)
|
invisible(x)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -14,12 +14,47 @@ print.osem_measurements = function (x, ...) {
|
||||||
}
|
}
|
||||||
|
|
||||||
#' Converts a foreign object to an osem_measurements data.frame.
|
#' Converts a foreign object to an osem_measurements data.frame.
|
||||||
#' @param x A data.frame to attach the class to.
|
#' @param x A data.frame to attach the class to
|
||||||
#' Should have at least a `value` and `createdAt` column.
|
|
||||||
#' @return data.frame of class \code{osem_measurements}
|
|
||||||
#' @export
|
#' @export
|
||||||
osem_as_measurements = function(x) {
|
osem_as_measurements = function(x) {
|
||||||
ret = tibble::as_tibble(x)
|
ret = as.data.frame(x)
|
||||||
class(ret) = c('osem_measurements', class(ret))
|
class(ret) = c('osem_measurements', class(x))
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#' Return rows with matching conditions, while maintaining class & attributes
|
||||||
|
#' @param .data A osem_measurements data.frame to filter
|
||||||
|
#' @param .dots see corresponding function in package \code{\link{dplyr}}
|
||||||
|
#' @param ... other arguments
|
||||||
|
#' @seealso \code{\link[dplyr]{filter}}
|
||||||
|
#' @export
|
||||||
|
filter.osem_measurements = dplyr_class_wrapper(osem_as_measurements)
|
||||||
|
|
||||||
|
#' Add new variables to the data, while maintaining class & attributes
|
||||||
|
#' @param .data A osem_measurements data.frame to mutate
|
||||||
|
#' @param .dots see corresponding function in package \code{\link{dplyr}}
|
||||||
|
#' @param ... other arguments
|
||||||
|
#' @seealso \code{\link[dplyr]{mutate}}
|
||||||
|
#' @export
|
||||||
|
mutate.osem_measurements = dplyr_class_wrapper(osem_as_measurements)
|
||||||
|
|
||||||
|
#' maintains class / attributes after subsetting
|
||||||
|
#' @noRd
|
||||||
|
#' @export
|
||||||
|
`[.osem_measurements` = function(x, i, ...) {
|
||||||
|
s = NextMethod()
|
||||||
|
mostattributes(s) = attributes(x)
|
||||||
|
s
|
||||||
|
}
|
||||||
|
|
||||||
|
# ==============================================================================
|
||||||
|
#
|
||||||
|
#' Convert a \code{osem_measurements} dataframe to an \code{\link[sf]{st_sf}} object.
|
||||||
|
#'
|
||||||
|
#' @param x The object to convert
|
||||||
|
#' @param ... maybe more objects to convert
|
||||||
|
#' @return The object with an st_geometry column attached.
|
||||||
|
#' @export
|
||||||
|
st_as_sf.osem_measurements = function (x, ...) {
|
||||||
|
NextMethod(x, ..., coords = c('lon', 'lat'), crs = 4326)
|
||||||
|
}
|
||||||
|
|
|
@ -37,27 +37,16 @@
|
||||||
#' }
|
#' }
|
||||||
#'
|
#'
|
||||||
#' @section Retrieving measurements:
|
#' @section Retrieving measurements:
|
||||||
#' There are two ways to retrieve measurements:
|
#' Measurements can be retrieved through \code{\link{osem_measurements}} for a
|
||||||
|
#' given phenomenon only. A subset of measurements may be selected by
|
||||||
|
#'
|
||||||
#' \itemize{
|
#' \itemize{
|
||||||
#' \item \code{\link{osem_measurements_archive}}:
|
#' \item a list of senseBoxes, previously retrieved through
|
||||||
#' Downloads measurements for a \emph{single box} from the openSenseMap archive.
|
#' \code{\link{osem_box}} or \code{\link{osem_boxes}}.
|
||||||
#' This function does not provide realtime data, but is suitable for long time frames.
|
#' \item a geographic bounding box, which can be generated with the
|
||||||
#'
|
#' \code{\link[sf]{sf}} package.
|
||||||
#' \item \code{\link{osem_measurements}}:
|
#' \item a time frame
|
||||||
#' This function retrieves (realtime) measurements from the API. It works for a
|
#' \item a exposure type of the given box
|
||||||
#' \emph{single phenomenon} only, but provides various filters to select sensors by
|
|
||||||
#'
|
|
||||||
#' \itemize{
|
|
||||||
#' \item a list of senseBoxes, previously retrieved through
|
|
||||||
#' \code{\link{osem_box}} or \code{\link{osem_boxes}}.
|
|
||||||
#' \item a geographic bounding box, which can be generated with the
|
|
||||||
#' \code{\link[sf]{sf}} package.
|
|
||||||
#' \item a time frame
|
|
||||||
#' \item a exposure type of the given box
|
|
||||||
#' }
|
|
||||||
#'
|
|
||||||
#' Use this function with caution for long time frames, as the API becomes
|
|
||||||
#' quite slow is limited to 10.000 measurements per 30 day interval.
|
|
||||||
#' }
|
#' }
|
||||||
#'
|
#'
|
||||||
#' Data is returned as \code{tibble} with the class \code{osem_measurements}.
|
#' Data is returned as \code{tibble} with the class \code{osem_measurements}.
|
||||||
|
@ -65,14 +54,6 @@
|
||||||
#' @section Retrieving statistics:
|
#' @section Retrieving statistics:
|
||||||
#' Count statistics about the database are provided with \code{\link{osem_counts}}.
|
#' Count statistics about the database are provided with \code{\link{osem_counts}}.
|
||||||
#'
|
#'
|
||||||
#' @section Using a different API instance / endpoint:
|
|
||||||
#' You can override the functions \code{osem_endpoint} and \code{osem_endpoint_archive}
|
|
||||||
#' inside the package namespace:
|
|
||||||
#'
|
|
||||||
#' \code{
|
|
||||||
#' assignInNamespace("osem_endpoint", function() "http://mynewosem.org", "opensensmapr")
|
|
||||||
#' }
|
|
||||||
#'
|
|
||||||
#' @section Integration with other packages:
|
#' @section Integration with other packages:
|
||||||
#' The package aims to be compatible with the tidyverse.
|
#' The package aims to be compatible with the tidyverse.
|
||||||
#' Helpers are implemented to ease the further usage of the retrieved data:
|
#' Helpers are implemented to ease the further usage of the retrieved data:
|
||||||
|
@ -88,7 +69,7 @@
|
||||||
#' \code{\link{dplyr}}.
|
#' \code{\link{dplyr}}.
|
||||||
#' }
|
#' }
|
||||||
#'
|
#'
|
||||||
#' @seealso Report bugs at \url{https://github.com/sensebox/opensensmapR/issues}
|
#' @seealso Report bugs at \url{https://github.com/noerw/opensensmapR/issues}
|
||||||
#' @seealso openSenseMap API: \url{https://api.opensensemap.org/}
|
#' @seealso openSenseMap API: \url{https://api.opensensemap.org/}
|
||||||
#' @seealso official openSenseMap API documentation: \url{https://docs.opensensemap.org/}
|
#' @seealso official openSenseMap API documentation: \url{https://docs.opensensemap.org/}
|
||||||
#' @docType package
|
#' @docType package
|
||||||
|
@ -100,13 +81,4 @@
|
||||||
`%>%` = magrittr::`%>%`
|
`%>%` = magrittr::`%>%`
|
||||||
|
|
||||||
# just to make R CMD check happy, due to NSE (dplyr) functions
|
# just to make R CMD check happy, due to NSE (dplyr) functions
|
||||||
globalVariables(c(
|
globalVariables(c('lastMeasurement', '.'))
|
||||||
'createdAt',
|
|
||||||
'lastMeasurement',
|
|
||||||
'sensorType',
|
|
||||||
'title',
|
|
||||||
'unit',
|
|
||||||
'value',
|
|
||||||
'X_id',
|
|
||||||
'.'
|
|
||||||
))
|
|
||||||
|
|
|
@ -18,13 +18,13 @@ osem_phenomena = function (boxes) UseMethod('osem_phenomena')
|
||||||
#' # get the phenomena for a single senseBox
|
#' # get the phenomena for a single senseBox
|
||||||
#' osem_phenomena(osem_box('593bcd656ccf3b0011791f5a'))
|
#' osem_phenomena(osem_box('593bcd656ccf3b0011791f5a'))
|
||||||
#'
|
#'
|
||||||
#' \donttest{
|
#' # get the phenomena for a group of senseBoxes
|
||||||
#' # get the phenomena for a group of senseBoxes
|
#' osem_phenomena(
|
||||||
#' osem_phenomena(
|
#' osem_boxes(grouptag = 'ifgi', exposure = 'outdoor', date = Sys.time())
|
||||||
#' osem_boxes(grouptag = 'ifgi', exposure = 'outdoor', date = Sys.time())
|
#' )
|
||||||
#' )
|
|
||||||
#'
|
#'
|
||||||
#' # get phenomena with at least 30 sensors on opensensemap
|
#' # get phenomena with at least 30 sensors on opensensemap
|
||||||
|
#' \donttest{
|
||||||
#' phenoms = osem_phenomena(osem_boxes())
|
#' phenoms = osem_phenomena(osem_boxes())
|
||||||
#' names(phenoms[phenoms > 29])
|
#' names(phenoms[phenoms > 29])
|
||||||
#' }
|
#' }
|
||||||
|
@ -33,5 +33,5 @@ osem_phenomena.sensebox = function (boxes) {
|
||||||
table() %>% # get count for each phenomenon
|
table() %>% # get count for each phenomenon
|
||||||
as.list()
|
as.list()
|
||||||
|
|
||||||
p[order(unlist(p), decreasing = TRUE)]
|
p[order(unlist(p), decreasing = T)]
|
||||||
}
|
}
|
||||||
|
|
128
README.md
128
README.md
|
@ -1,44 +1,9 @@
|
||||||
# opensensmapr
|
# opensensmapr
|
||||||
|
|
||||||
[](https://cran.r-project.org/package=opensensmapr)
|
[](https://cran.r-project.org/package=opensensmapr) [](https://travis-ci.org/noerw/opensensmapR) [](https://ci.appveyor.com/project/noerw/opensensmapR) [](https://codecov.io/github/noerw/opensensmapR?branch=master)
|
||||||
[](https://travis-ci.org/sensebox/opensensmapR)
|
|
||||||
[](https://ci.appveyor.com/project/noerw/opensensmapr/branch/master)
|
|
||||||
|
|
||||||
This R package ingests data from the API of [opensensemap.org][osem] for analysis in R.
|
This R package ingests data (environmental measurements, sensor stations) from the API of opensensemap.org for analysis in R.
|
||||||
|
The package aims to be compatible with sf and the tidyverse.
|
||||||
Features include:
|
|
||||||
|
|
||||||
- `osem_boxes()`: fetch sensor station ("box") metadata, with various filters
|
|
||||||
- `osem_measurements()`: fetch measurements by phenomenon, with various filters such as submitting spatial extent, time range, sensor type, box, exposure..
|
|
||||||
- no time frame limitation through request paging!
|
|
||||||
- many helper functions to help understand the queried data
|
|
||||||
- caching queries for reproducibility
|
|
||||||
|
|
||||||
The package aims to be compatible with the [`tidyverse`][tidy] and [`sf`][sf],
|
|
||||||
so it is easy to analyze or vizualize the data with state of the art packages.
|
|
||||||
|
|
||||||
[osem]: https://opensensemap.org/
|
|
||||||
[sf]: https://github.com/r-spatial/sf
|
|
||||||
[tidy]: https://www.tidyverse.org/
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
Complete documentation is provided via the R help system:
|
|
||||||
Each function's documentation can be viewed with `?<function-name>`.
|
|
||||||
A comprehensive overview of all functions is given in `?opensensmapr`.
|
|
||||||
|
|
||||||
There are also vignettes showcasing applications of this package:
|
|
||||||
|
|
||||||
- [Visualising the History of openSenseMap.org][osem-history]: Showcase of `opensensmapr` with `dplyr` + `ggplot2`
|
|
||||||
- [Exploring the openSenseMap dataset][osem-intro]: Showcase of included helper functions
|
|
||||||
- [Caching openSenseMap Data for reproducibility][osem-serialization]
|
|
||||||
|
|
||||||
[osem-intro]: https://sensebox.github.io/opensensmapR/inst/doc/osem-intro.html
|
|
||||||
[osem-history]: https://sensebox.github.io/opensensmapR/inst/doc/osem-history.html
|
|
||||||
[osem-serialization]: https://sensebox.github.io/opensensmapR/inst/doc/osem-serialization.html
|
|
||||||
|
|
||||||
If you used this package for an analysis and think it could serve as a good
|
|
||||||
example or showcase, feel free to add a vignette to the package via a [PR](#contribute)!
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
|
@ -48,73 +13,56 @@ The package is available on CRAN, install it via
|
||||||
install.packages('opensensmapr')
|
install.packages('opensensmapr')
|
||||||
```
|
```
|
||||||
|
|
||||||
To install the very latest versions from GitHub, run:
|
To install the veryy latest from GitHub, run:
|
||||||
|
|
||||||
```r
|
```r
|
||||||
install.packages('devtools')
|
install.packages('devtools')
|
||||||
devtools::install_github('sensebox/opensensmapr@master') # latest stable version
|
devtools::install_github('noerw/opensensmapr@master') # latest stable version
|
||||||
devtools::install_github('sensebox/opensensmapr@development') # bleeding edge version
|
devtools::install_github('noerw/opensensmapr@development') # bleeding edge version
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
Each function's documentation can be viewed with `?<function-name>`.
|
||||||
|
An overview is given in `?opensensmapr`.
|
||||||
|
A verbose usage example is shown in the vignette [`osem-intro`](https://noerw.github.com/opensensmapR/inst/doc/osem-intro.html).
|
||||||
|
|
||||||
|
In short, the following pseudocode shows the main functions for data retrieval:
|
||||||
|
|
||||||
|
```r
|
||||||
|
# retrieve a single box by id, or many boxes by some property-filters
|
||||||
|
b = osem_box('boxId')
|
||||||
|
b = osem_boxes(filter1, filter2, ...)
|
||||||
|
|
||||||
|
# get the counts of observed phenomena for a list of boxes
|
||||||
|
p = osem_phenomena(b)
|
||||||
|
|
||||||
|
# get measurements for a phenomenon
|
||||||
|
m = osem_measurements(phenomenon, filter1, ...)
|
||||||
|
# get measurements for a phenomenon from selected boxes
|
||||||
|
m = osem_measurements(b, phenomenon, filter1, ...)
|
||||||
|
# get measurements for a phenomenon from a geographic bounding box
|
||||||
|
m = osem_measurements(bbox, phenomenon, filter1, ...)
|
||||||
|
|
||||||
|
# get general count statistics of the openSenseMap database
|
||||||
|
osem_counts()
|
||||||
|
```
|
||||||
|
|
||||||
|
Additionally there are some helpers: `summary.sensebox(), plot.sensebox(), st_as_sf.sensebox(), osem_as_sensebox(), [.sensebox(), filter.sensebox(), mutate.sensebox(), ...`.
|
||||||
|
|
||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
This project adheres to semantic versioning, for changes in recent versions please consult [NEWS.md](NEWS.md).
|
This project adheres to semantic versioning, for changes in recent versions please consult [CHANGES.md](CHANGES.md).
|
||||||
|
|
||||||
## Contributing & Development
|
## Contributing & Development
|
||||||
|
|
||||||
Contributions are very welcome!
|
Contributions are very welcome!
|
||||||
When submitting a patch, please follow the existing code stlye,
|
When submitting a patch, please follow the existing [code style](.lintr),
|
||||||
and run `R CMD check --no-vignettes .` on the package.
|
and run `R CMD check --no-vignettes .` on the package.
|
||||||
Where feasible, also add tests for the added / changed functionality in `tests/testthat`.
|
|
||||||
|
|
||||||
Please note that this project is released with a Contributor Code of Conduct.
|
Please note that this project is released with a [Contributor Code of Conduct](CONDUCT.md).
|
||||||
By participating in this project you agree to abide by its terms.
|
By participating in this project you agree to abide by its terms.
|
||||||
|
|
||||||
### development environment
|
|
||||||
|
|
||||||
To set up the development environment for testing and checking, all suggested packages should be installed.
|
|
||||||
On linux, these require some system dependencies:
|
|
||||||
```sh
|
|
||||||
# install dependencies for sf (see https://github.com/r-spatial/sf#installing)
|
|
||||||
sudo dnf install gdal-devel proj-devel proj-epsg proj-nad geos-devel udunits2-devel
|
|
||||||
|
|
||||||
# install suggested packages
|
|
||||||
R -e "install.packages(c('maps', 'maptools', 'tibble', 'rgeos', 'sf',
|
|
||||||
'knitr', 'rmarkdown', 'lubridate', 'units', 'jsonlite', 'ggplot2',
|
|
||||||
'zoo', 'lintr', 'testthat', 'covr')"
|
|
||||||
```
|
|
||||||
|
|
||||||
### build
|
|
||||||
|
|
||||||
To build the package, either use `devtools::build()` or run
|
|
||||||
```sh
|
|
||||||
R CMD build .
|
|
||||||
```
|
|
||||||
|
|
||||||
Next, run the **tests and checks**:
|
|
||||||
```sh
|
|
||||||
R CMD check --as-cran ../opensensmapr_*.tar.gz
|
|
||||||
# alternatively, if you're in a hurry:
|
|
||||||
R CMD check --no-vignettes ../opensensmapr_*.tar.gz
|
|
||||||
```
|
|
||||||
|
|
||||||
### release
|
|
||||||
|
|
||||||
To create a release:
|
|
||||||
|
|
||||||
0. make sure you are on master branch
|
|
||||||
1. run the tests and checks as described above
|
|
||||||
2. bump the version in `DESCRIPTION`
|
|
||||||
3. update `NEWS.md`
|
|
||||||
3. rebuild the documentation: `R -e 'devtools::document()'`
|
|
||||||
4. build the package again with the new version: `R CMD build . --no-build-vignettes`
|
|
||||||
5. tag the commit with the new version: `git tag v0.5.0`
|
|
||||||
6. push changes: `git push && git push --tags`
|
|
||||||
7. wait for *all* CI tests to complete successfully (helps in the next step)
|
|
||||||
8. [upload the new release to CRAN](https://cran.r-project.org/submit.html)
|
|
||||||
9. get back to the enjoyable parts of your life & hope you won't get bad mail next week.
|
|
||||||
|
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
GPL-2.0 - Norwin Roosen
|
GPL-2.0 - Norwin Roosen
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
## ----setup, results='hide', message=FALSE, warning=FALSE----------------------
|
## ----setup, results='hide', message=FALSE, warning=FALSE-----------------
|
||||||
# required packages:
|
# required packages:
|
||||||
library(opensensmapr) # data download
|
library(opensensmapr) # data download
|
||||||
library(dplyr) # data wrangling
|
library(dplyr) # data wrangling
|
||||||
|
@ -6,15 +6,12 @@ library(ggplot2) # plotting
|
||||||
library(lubridate) # date arithmetic
|
library(lubridate) # date arithmetic
|
||||||
library(zoo) # rollmean()
|
library(zoo) # rollmean()
|
||||||
|
|
||||||
## ----download-----------------------------------------------------------------
|
## ----download------------------------------------------------------------
|
||||||
# if you want to see results for a specific subset of boxes,
|
# if you want to see results for a specific subset of boxes,
|
||||||
# just specify a filter such as grouptag='ifgi' here
|
# just specify a filter such as grouptag='ifgi' here
|
||||||
|
boxes = osem_boxes()
|
||||||
|
|
||||||
# boxes = osem_boxes(cache = '.')
|
## ----exposure_counts, message=FALSE--------------------------------------
|
||||||
boxes = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
|
||||||
|
|
||||||
|
|
||||||
## ----exposure_counts, message=FALSE-------------------------------------------
|
|
||||||
exposure_counts = boxes %>%
|
exposure_counts = boxes %>%
|
||||||
group_by(exposure) %>%
|
group_by(exposure) %>%
|
||||||
mutate(count = row_number(createdAt))
|
mutate(count = row_number(createdAt))
|
||||||
|
@ -25,7 +22,7 @@ ggplot(exposure_counts, aes(x = createdAt, y = count, colour = exposure)) +
|
||||||
scale_colour_manual(values = exposure_colors) +
|
scale_colour_manual(values = exposure_colors) +
|
||||||
xlab('Registration Date') + ylab('senseBox count')
|
xlab('Registration Date') + ylab('senseBox count')
|
||||||
|
|
||||||
## ----exposure_summary---------------------------------------------------------
|
## ----exposure_summary----------------------------------------------------
|
||||||
exposure_counts %>%
|
exposure_counts %>%
|
||||||
summarise(
|
summarise(
|
||||||
oldest = min(createdAt),
|
oldest = min(createdAt),
|
||||||
|
@ -34,11 +31,11 @@ exposure_counts %>%
|
||||||
) %>%
|
) %>%
|
||||||
arrange(desc(count))
|
arrange(desc(count))
|
||||||
|
|
||||||
## ----grouptag_counts, message=FALSE-------------------------------------------
|
## ----grouptag_counts, message=FALSE--------------------------------------
|
||||||
grouptag_counts = boxes %>%
|
grouptag_counts = boxes %>%
|
||||||
group_by(grouptag) %>%
|
group_by(grouptag) %>%
|
||||||
# only include grouptags with 8 or more members
|
# only include grouptags with 8 or more members
|
||||||
filter(length(grouptag) >= 8 & !is.na(grouptag)) %>%
|
filter(length(grouptag) >= 8 && !is.na(grouptag)) %>%
|
||||||
mutate(count = row_number(createdAt))
|
mutate(count = row_number(createdAt))
|
||||||
|
|
||||||
# helper for sorting the grouptags by boxcount
|
# helper for sorting the grouptags by boxcount
|
||||||
|
@ -52,7 +49,7 @@ ggplot(grouptag_counts, aes(x = createdAt, y = count, colour = grouptag)) +
|
||||||
geom_line(aes(group = grouptag)) +
|
geom_line(aes(group = grouptag)) +
|
||||||
xlab('Registration Date') + ylab('senseBox count')
|
xlab('Registration Date') + ylab('senseBox count')
|
||||||
|
|
||||||
## ----grouptag_summary---------------------------------------------------------
|
## ----grouptag_summary----------------------------------------------------
|
||||||
grouptag_counts %>%
|
grouptag_counts %>%
|
||||||
summarise(
|
summarise(
|
||||||
oldest = min(createdAt),
|
oldest = min(createdAt),
|
||||||
|
@ -61,7 +58,7 @@ grouptag_counts %>%
|
||||||
) %>%
|
) %>%
|
||||||
arrange(desc(count))
|
arrange(desc(count))
|
||||||
|
|
||||||
## ----growthrate_registered, warning=FALSE, message=FALSE, results='hide'------
|
## ----growthrate_registered, warning=FALSE, message=FALSE, results='hide'----
|
||||||
bins = 'week'
|
bins = 'week'
|
||||||
mvavg_bins = 6
|
mvavg_bins = 6
|
||||||
|
|
||||||
|
@ -71,7 +68,7 @@ growth = boxes %>%
|
||||||
summarize(count = length(week)) %>%
|
summarize(count = length(week)) %>%
|
||||||
mutate(event = 'registered')
|
mutate(event = 'registered')
|
||||||
|
|
||||||
## ----growthrate_inactive, warning=FALSE, message=FALSE, results='hide'--------
|
## ----growthrate_inactive, warning=FALSE, message=FALSE, results='hide'----
|
||||||
inactive = boxes %>%
|
inactive = boxes %>%
|
||||||
# remove boxes that were updated in the last two days,
|
# remove boxes that were updated in the last two days,
|
||||||
# b/c any box becomes inactive at some point by definition of updatedAt
|
# b/c any box becomes inactive at some point by definition of updatedAt
|
||||||
|
@ -81,7 +78,7 @@ inactive = boxes %>%
|
||||||
summarize(count = length(week)) %>%
|
summarize(count = length(week)) %>%
|
||||||
mutate(event = 'inactive')
|
mutate(event = 'inactive')
|
||||||
|
|
||||||
## ----growthrate, warning=FALSE, message=FALSE, results='hide'-----------------
|
## ----growthrate, warning=FALSE, message=FALSE, results='hide'------------
|
||||||
boxes_by_date = bind_rows(growth, inactive) %>% group_by(event)
|
boxes_by_date = bind_rows(growth, inactive) %>% group_by(event)
|
||||||
|
|
||||||
ggplot(boxes_by_date, aes(x = as.Date(week), colour = event)) +
|
ggplot(boxes_by_date, aes(x = as.Date(week), colour = event)) +
|
||||||
|
@ -92,7 +89,7 @@ ggplot(boxes_by_date, aes(x = as.Date(week), colour = event)) +
|
||||||
# moving average, make first and last value NA (to ensure identical length of vectors)
|
# moving average, make first and last value NA (to ensure identical length of vectors)
|
||||||
geom_line(aes(y = rollmean(count, mvavg_bins, fill = list(NA, NULL, NA))))
|
geom_line(aes(y = rollmean(count, mvavg_bins, fill = list(NA, NULL, NA))))
|
||||||
|
|
||||||
## ----exposure_duration, message=FALSE-----------------------------------------
|
## ----exposure_duration, message=FALSE------------------------------------
|
||||||
duration = boxes %>%
|
duration = boxes %>%
|
||||||
group_by(exposure) %>%
|
group_by(exposure) %>%
|
||||||
filter(!is.na(updatedAt)) %>%
|
filter(!is.na(updatedAt)) %>%
|
||||||
|
@ -102,11 +99,11 @@ ggplot(duration, aes(x = exposure, y = duration)) +
|
||||||
geom_boxplot() +
|
geom_boxplot() +
|
||||||
coord_flip() + ylab('Duration active in Days')
|
coord_flip() + ylab('Duration active in Days')
|
||||||
|
|
||||||
## ----grouptag_duration, message=FALSE-----------------------------------------
|
## ----grouptag_duration, message=FALSE------------------------------------
|
||||||
duration = boxes %>%
|
duration = boxes %>%
|
||||||
group_by(grouptag) %>%
|
group_by(grouptag) %>%
|
||||||
# only include grouptags with 8 or more members
|
# only include grouptags with 8 or more members
|
||||||
filter(length(grouptag) >= 8 & !is.na(grouptag) & !is.na(updatedAt)) %>%
|
filter(length(grouptag) >= 8 && !is.na(grouptag) && !is.na(updatedAt)) %>%
|
||||||
mutate(duration = difftime(updatedAt, createdAt, units='days'))
|
mutate(duration = difftime(updatedAt, createdAt, units='days'))
|
||||||
|
|
||||||
ggplot(duration, aes(x = grouptag, y = duration)) +
|
ggplot(duration, aes(x = grouptag, y = duration)) +
|
||||||
|
@ -122,7 +119,7 @@ duration %>%
|
||||||
) %>%
|
) %>%
|
||||||
arrange(desc(duration_avg))
|
arrange(desc(duration_avg))
|
||||||
|
|
||||||
## ----year_duration, message=FALSE---------------------------------------------
|
## ----year_duration, message=FALSE----------------------------------------
|
||||||
# NOTE: boxes older than 2016 missing due to missing updatedAt in database
|
# NOTE: boxes older than 2016 missing due to missing updatedAt in database
|
||||||
duration = boxes %>%
|
duration = boxes %>%
|
||||||
mutate(year = cut(as.Date(createdAt), breaks = 'year')) %>%
|
mutate(year = cut(as.Date(createdAt), breaks = 'year')) %>%
|
||||||
|
|
|
@ -43,10 +43,7 @@ So the first step is to retrieve *all the boxes*:
|
||||||
```{r download}
|
```{r download}
|
||||||
# if you want to see results for a specific subset of boxes,
|
# if you want to see results for a specific subset of boxes,
|
||||||
# just specify a filter such as grouptag='ifgi' here
|
# just specify a filter such as grouptag='ifgi' here
|
||||||
|
boxes = osem_boxes()
|
||||||
# boxes = osem_boxes(cache = '.')
|
|
||||||
boxes = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
# Plot count of boxes by time {.tabset}
|
# Plot count of boxes by time {.tabset}
|
||||||
|
@ -71,7 +68,7 @@ ggplot(exposure_counts, aes(x = createdAt, y = count, colour = exposure)) +
|
||||||
Outdoor boxes are growing *fast*!
|
Outdoor boxes are growing *fast*!
|
||||||
We can also see the introduction of `mobile` sensor "stations" in 2017. While
|
We can also see the introduction of `mobile` sensor "stations" in 2017. While
|
||||||
mobile boxes are still few, we can expect a quick rise in 2018 once the new
|
mobile boxes are still few, we can expect a quick rise in 2018 once the new
|
||||||
senseBox MCU with GPS support is released.
|
[senseBox MCU with GPS support is released](https://sensebox.de/blog/2018-03-06-senseBox_MCU).
|
||||||
|
|
||||||
Let's have a quick summary:
|
Let's have a quick summary:
|
||||||
```{r exposure_summary}
|
```{r exposure_summary}
|
||||||
|
@ -96,7 +93,7 @@ inconsistent (`Luftdaten`, `luftdaten.info`, ...)
|
||||||
grouptag_counts = boxes %>%
|
grouptag_counts = boxes %>%
|
||||||
group_by(grouptag) %>%
|
group_by(grouptag) %>%
|
||||||
# only include grouptags with 8 or more members
|
# only include grouptags with 8 or more members
|
||||||
filter(length(grouptag) >= 8 & !is.na(grouptag)) %>%
|
filter(length(grouptag) >= 8 && !is.na(grouptag)) %>%
|
||||||
mutate(count = row_number(createdAt))
|
mutate(count = row_number(createdAt))
|
||||||
|
|
||||||
# helper for sorting the grouptags by boxcount
|
# helper for sorting the grouptags by boxcount
|
||||||
|
@ -166,7 +163,7 @@ ggplot(boxes_by_date, aes(x = as.Date(week), colour = event)) +
|
||||||
|
|
||||||
We see a sudden rise in early 2017, which lines up with the fast growing grouptag `Luftdaten`.
|
We see a sudden rise in early 2017, which lines up with the fast growing grouptag `Luftdaten`.
|
||||||
This was enabled by an integration of openSenseMap.org into the firmware of the
|
This was enabled by an integration of openSenseMap.org into the firmware of the
|
||||||
air quality monitoring project [luftdaten.info](https://sensor.community/de/).
|
air quality monitoring project [luftdaten.info](https://luftdaten.info).
|
||||||
The dips in mid 2017 and early 2018 could possibly be explained by production/delivery issues
|
The dips in mid 2017 and early 2018 could possibly be explained by production/delivery issues
|
||||||
of the senseBox hardware, but I have no data on the exact time frames to verify.
|
of the senseBox hardware, but I have no data on the exact time frames to verify.
|
||||||
|
|
||||||
|
@ -195,7 +192,7 @@ spanning a large chunk of openSenseMap's existence.
|
||||||
duration = boxes %>%
|
duration = boxes %>%
|
||||||
group_by(grouptag) %>%
|
group_by(grouptag) %>%
|
||||||
# only include grouptags with 8 or more members
|
# only include grouptags with 8 or more members
|
||||||
filter(length(grouptag) >= 8 & !is.na(grouptag) & !is.na(updatedAt)) %>%
|
filter(length(grouptag) >= 8 && !is.na(grouptag) && !is.na(updatedAt)) %>%
|
||||||
mutate(duration = difftime(updatedAt, createdAt, units='days'))
|
mutate(duration = difftime(updatedAt, createdAt, units='days'))
|
||||||
|
|
||||||
ggplot(duration, aes(x = grouptag, y = duration)) +
|
ggplot(duration, aes(x = grouptag, y = duration)) +
|
||||||
|
@ -243,4 +240,4 @@ If you implemented some, feel free to add them to this vignette via a [Pull Requ
|
||||||
* filter timespans with big dips in growth rate, and extrapolate the amount of
|
* filter timespans with big dips in growth rate, and extrapolate the amount of
|
||||||
senseBoxes that could be on the platform today, assuming there were no production issues ;)
|
senseBoxes that could be on the platform today, assuming there were no production issues ;)
|
||||||
|
|
||||||
[PR]: https://github.com/sensebox/opensensmapr/pulls
|
[PR]: https://github.com/noerw/opensensmapr/pulls
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,159 +0,0 @@
|
||||||
## ----setup, results='hide', message=FALSE, warning=FALSE----------------------
|
|
||||||
# required packages:
|
|
||||||
library(opensensmapr) # data download
|
|
||||||
library(dplyr) # data wrangling
|
|
||||||
library(ggplot2) # plotting
|
|
||||||
library(lubridate) # date arithmetic
|
|
||||||
library(zoo) # rollmean()
|
|
||||||
|
|
||||||
## ----download, results='hide', message=FALSE, warning=FALSE-------------------
|
|
||||||
# if you want to see results for a specific subset of boxes,
|
|
||||||
# just specify a filter such as grouptag='ifgi' here
|
|
||||||
|
|
||||||
# boxes = osem_boxes(cache = '.')
|
|
||||||
boxes = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
|
||||||
|
|
||||||
## -----------------------------------------------------------------------------
|
|
||||||
boxes = filter(boxes, locationtimestamp >= "2022-01-01" & locationtimestamp <="2022-12-31")
|
|
||||||
summary(boxes) -> summary.data.frame
|
|
||||||
|
|
||||||
## ---- message=FALSE, warning=FALSE--------------------------------------------
|
|
||||||
plot(boxes)
|
|
||||||
|
|
||||||
## -----------------------------------------------------------------------------
|
|
||||||
phenoms = osem_phenomena(boxes)
|
|
||||||
str(phenoms)
|
|
||||||
|
|
||||||
## -----------------------------------------------------------------------------
|
|
||||||
phenoms[phenoms > 50]
|
|
||||||
|
|
||||||
## ----exposure_counts, message=FALSE-------------------------------------------
|
|
||||||
exposure_counts = boxes %>%
|
|
||||||
group_by(exposure) %>%
|
|
||||||
mutate(count = row_number(locationtimestamp))
|
|
||||||
|
|
||||||
exposure_colors = c(indoor = 'red', outdoor = 'lightgreen', mobile = 'blue', unknown = 'darkgrey')
|
|
||||||
ggplot(exposure_counts, aes(x = locationtimestamp, y = count, colour = exposure)) +
|
|
||||||
geom_line() +
|
|
||||||
scale_colour_manual(values = exposure_colors) +
|
|
||||||
xlab('Registration Date') + ylab('senseBox count')
|
|
||||||
|
|
||||||
## ----exposure_summary---------------------------------------------------------
|
|
||||||
exposure_counts %>%
|
|
||||||
summarise(
|
|
||||||
oldest = min(locationtimestamp),
|
|
||||||
newest = max(locationtimestamp),
|
|
||||||
count = max(count)
|
|
||||||
) %>%
|
|
||||||
arrange(desc(count))
|
|
||||||
|
|
||||||
## ----grouptag_counts, message=FALSE-------------------------------------------
|
|
||||||
grouptag_counts = boxes %>%
|
|
||||||
group_by(grouptag) %>%
|
|
||||||
# only include grouptags with 15 or more members
|
|
||||||
filter(length(grouptag) >= 15 & !is.na(grouptag) & grouptag != '') %>%
|
|
||||||
mutate(count = row_number(locationtimestamp))
|
|
||||||
|
|
||||||
# helper for sorting the grouptags by boxcount
|
|
||||||
sortLvls = function(oldFactor, ascending = TRUE) {
|
|
||||||
lvls = table(oldFactor) %>% sort(., decreasing = !ascending) %>% names()
|
|
||||||
factor(oldFactor, levels = lvls)
|
|
||||||
}
|
|
||||||
grouptag_counts$grouptag = sortLvls(grouptag_counts$grouptag, ascending = FALSE)
|
|
||||||
|
|
||||||
ggplot(grouptag_counts, aes(x = locationtimestamp, y = count, colour = grouptag)) +
|
|
||||||
geom_line(aes(group = grouptag)) +
|
|
||||||
xlab('Registration Date') + ylab('senseBox count')
|
|
||||||
|
|
||||||
## ----grouptag_summary---------------------------------------------------------
|
|
||||||
grouptag_counts %>%
|
|
||||||
summarise(
|
|
||||||
oldest = min(locationtimestamp),
|
|
||||||
newest = max(locationtimestamp),
|
|
||||||
count = max(count)
|
|
||||||
) %>%
|
|
||||||
arrange(desc(count))
|
|
||||||
|
|
||||||
## ----growthrate_registered, warning=FALSE, message=FALSE, results='hide'------
|
|
||||||
bins = 'week'
|
|
||||||
mvavg_bins = 6
|
|
||||||
|
|
||||||
growth = boxes %>%
|
|
||||||
mutate(week = cut(as.Date(locationtimestamp), breaks = bins)) %>%
|
|
||||||
group_by(week) %>%
|
|
||||||
summarize(count = length(week)) %>%
|
|
||||||
mutate(event = 'registered')
|
|
||||||
|
|
||||||
## ----growthrate_inactive, warning=FALSE, message=FALSE, results='hide'--------
|
|
||||||
inactive = boxes %>%
|
|
||||||
# remove boxes that were updated in the last two days,
|
|
||||||
# b/c any box becomes inactive at some point by definition of updatedAt
|
|
||||||
filter(lastMeasurement < now() - days(2)) %>%
|
|
||||||
mutate(week = cut(as.Date(lastMeasurement), breaks = bins)) %>%
|
|
||||||
filter(as.Date(week) > as.Date("2021-12-31")) %>%
|
|
||||||
group_by(week) %>%
|
|
||||||
summarize(count = length(week)) %>%
|
|
||||||
mutate(event = 'inactive')
|
|
||||||
|
|
||||||
## ----growthrate, warning=FALSE, message=FALSE, results='hide'-----------------
|
|
||||||
boxes_by_date = bind_rows(growth, inactive) %>% group_by(event)
|
|
||||||
|
|
||||||
ggplot(boxes_by_date, aes(x = as.Date(week), colour = event)) +
|
|
||||||
xlab('Time') + ylab(paste('rate per ', bins)) +
|
|
||||||
scale_x_date(date_breaks="years", date_labels="%Y") +
|
|
||||||
scale_colour_manual(values = c(registered = 'lightgreen', inactive = 'grey')) +
|
|
||||||
geom_point(aes(y = count), size = 0.5) +
|
|
||||||
# moving average, make first and last value NA (to ensure identical length of vectors)
|
|
||||||
geom_line(aes(y = rollmean(count, mvavg_bins, fill = list(NA, NULL, NA))))
|
|
||||||
|
|
||||||
## ----table_mostregistrations--------------------------------------------------
|
|
||||||
boxes_by_date %>%
|
|
||||||
filter(count > 50) %>%
|
|
||||||
arrange(desc(count))
|
|
||||||
|
|
||||||
## ----exposure_duration, message=FALSE-----------------------------------------
|
|
||||||
durations = boxes %>%
|
|
||||||
group_by(exposure) %>%
|
|
||||||
filter(!is.na(lastMeasurement)) %>%
|
|
||||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
|
||||||
filter(duration >= 0)
|
|
||||||
|
|
||||||
ggplot(durations, aes(x = exposure, y = duration)) +
|
|
||||||
geom_boxplot() +
|
|
||||||
coord_flip() + ylab('Duration active in Days')
|
|
||||||
|
|
||||||
## ----grouptag_duration, message=FALSE-----------------------------------------
|
|
||||||
durations = boxes %>%
|
|
||||||
filter(!is.na(lastMeasurement)) %>%
|
|
||||||
group_by(grouptag) %>%
|
|
||||||
# only include grouptags with 20 or more members
|
|
||||||
filter(length(grouptag) >= 15 & !is.na(grouptag) & !is.na(lastMeasurement)) %>%
|
|
||||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
|
||||||
filter(duration >= 0)
|
|
||||||
|
|
||||||
ggplot(durations, aes(x = grouptag, y = duration)) +
|
|
||||||
geom_boxplot() +
|
|
||||||
coord_flip() + ylab('Duration active in Days')
|
|
||||||
|
|
||||||
durations %>%
|
|
||||||
summarize(
|
|
||||||
duration_avg = round(mean(duration)),
|
|
||||||
duration_min = round(min(duration)),
|
|
||||||
duration_max = round(max(duration)),
|
|
||||||
oldest_box = round(max(difftime(now(), locationtimestamp, units='days')))
|
|
||||||
) %>%
|
|
||||||
arrange(desc(duration_avg))
|
|
||||||
|
|
||||||
## ----year_duration, message=FALSE---------------------------------------------
|
|
||||||
# NOTE: boxes older than 2016 missing due to missing updatedAt in database
|
|
||||||
duration = boxes %>%
|
|
||||||
mutate(year = cut(as.Date(locationtimestamp), breaks = 'year')) %>%
|
|
||||||
group_by(year) %>%
|
|
||||||
filter(!is.na(lastMeasurement)) %>%
|
|
||||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
|
||||||
filter(duration >= 0)
|
|
||||||
|
|
||||||
ggplot(duration, aes(x = substr(as.character(year), 0, 4), y = duration)) +
|
|
||||||
geom_boxplot() +
|
|
||||||
coord_flip() + ylab('Duration active in Days') + xlab('Year of Registration')
|
|
||||||
|
|
|
@ -1,297 +0,0 @@
|
||||||
---
|
|
||||||
title: "Visualising the Development of openSenseMap.org in 2022"
|
|
||||||
author: "Jan Stenkamp"
|
|
||||||
date: '`r Sys.Date()`'
|
|
||||||
output:
|
|
||||||
html_document:
|
|
||||||
code_folding: hide
|
|
||||||
df_print: kable
|
|
||||||
theme: lumen
|
|
||||||
toc: yes
|
|
||||||
toc_float: yes
|
|
||||||
rmarkdown::html_vignette:
|
|
||||||
df_print: kable
|
|
||||||
fig_height: 5
|
|
||||||
fig_width: 7
|
|
||||||
toc: yes
|
|
||||||
vignette: >
|
|
||||||
%\VignetteIndexEntry{Visualising the Development of openSenseMap.org in 2022}
|
|
||||||
%\VignetteEncoding{UTF-8}
|
|
||||||
%\VignetteEngine{knitr::rmarkdown}
|
|
||||||
---
|
|
||||||
|
|
||||||
> This vignette serves as an example on data wrangling & visualization with
|
|
||||||
`opensensmapr`, `dplyr` and `ggplot2`.
|
|
||||||
|
|
||||||
```{r setup, results='hide', message=FALSE, warning=FALSE}
|
|
||||||
# required packages:
|
|
||||||
library(opensensmapr) # data download
|
|
||||||
library(dplyr) # data wrangling
|
|
||||||
library(ggplot2) # plotting
|
|
||||||
library(lubridate) # date arithmetic
|
|
||||||
library(zoo) # rollmean()
|
|
||||||
```
|
|
||||||
|
|
||||||
openSenseMap.org has grown quite a bit in the last years; it would be interesting
|
|
||||||
to see how we got to the current `r osem_counts()$boxes` sensor stations,
|
|
||||||
split up by various attributes of the boxes.
|
|
||||||
|
|
||||||
While `opensensmapr` provides extensive methods of filtering boxes by attributes
|
|
||||||
on the server, we do the filtering within R to save time and gain flexibility.
|
|
||||||
|
|
||||||
|
|
||||||
So the first step is to retrieve *all the boxes*.
|
|
||||||
|
|
||||||
```{r download, results='hide', message=FALSE, warning=FALSE}
|
|
||||||
# if you want to see results for a specific subset of boxes,
|
|
||||||
# just specify a filter such as grouptag='ifgi' here
|
|
||||||
|
|
||||||
# boxes = osem_boxes(cache = '.')
|
|
||||||
boxes = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
|
||||||
```
|
|
||||||
# Introduction
|
|
||||||
In the following we just want to have a look at the boxes created in 2022, so we filter for them.
|
|
||||||
|
|
||||||
```{r}
|
|
||||||
boxes = filter(boxes, locationtimestamp >= "2022-01-01" & locationtimestamp <="2022-12-31")
|
|
||||||
summary(boxes) -> summary.data.frame
|
|
||||||
```
|
|
||||||
|
|
||||||
<!-- This gives a good overview already: As of writing this, there are more than 11,000 -->
|
|
||||||
<!-- sensor stations, of which ~30% are currently running. Most of them are placed -->
|
|
||||||
<!-- outdoors and have around 5 sensors each. -->
|
|
||||||
<!-- The oldest station is from August 2016, while the latest station was registered a -->
|
|
||||||
<!-- couple of minutes ago. -->
|
|
||||||
|
|
||||||
Another feature of interest is the spatial distribution of the boxes: `plot()`
|
|
||||||
can help us out here. This function requires a bunch of optional dependencies though.
|
|
||||||
|
|
||||||
```{r, message=FALSE, warning=FALSE}
|
|
||||||
plot(boxes)
|
|
||||||
```
|
|
||||||
|
|
||||||
But what do these sensor stations actually measure? Lets find out.
|
|
||||||
`osem_phenomena()` gives us a named list of of the counts of each observed
|
|
||||||
phenomenon for the given set of sensor stations:
|
|
||||||
|
|
||||||
```{r}
|
|
||||||
phenoms = osem_phenomena(boxes)
|
|
||||||
str(phenoms)
|
|
||||||
```
|
|
||||||
|
|
||||||
Thats quite some noise there, with many phenomena being measured by a single
|
|
||||||
sensor only, or many duplicated phenomena due to slightly different spellings.
|
|
||||||
We should clean that up, but for now let's just filter out the noise and find
|
|
||||||
those phenomena with high sensor numbers:
|
|
||||||
|
|
||||||
```{r}
|
|
||||||
phenoms[phenoms > 50]
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
# Plot count of boxes by time {.tabset}
|
|
||||||
By looking at the `createdAt` attribute of each box we know the exact time a box
|
|
||||||
was registered. Because of some database migration issues the `createdAt` values are mostly wrong (~80% of boxes created 2022-03-30), so we are using the `timestamp` attribute of the `currentlocation` which should in most cases correspond to the creation date.
|
|
||||||
|
|
||||||
With this approach we have no information about boxes that were deleted in the
|
|
||||||
meantime, but that's okay for now.
|
|
||||||
|
|
||||||
## ...and exposure
|
|
||||||
```{r exposure_counts, message=FALSE}
|
|
||||||
exposure_counts = boxes %>%
|
|
||||||
group_by(exposure) %>%
|
|
||||||
mutate(count = row_number(locationtimestamp))
|
|
||||||
|
|
||||||
exposure_colors = c(indoor = 'red', outdoor = 'lightgreen', mobile = 'blue', unknown = 'darkgrey')
|
|
||||||
ggplot(exposure_counts, aes(x = locationtimestamp, y = count, colour = exposure)) +
|
|
||||||
geom_line() +
|
|
||||||
scale_colour_manual(values = exposure_colors) +
|
|
||||||
xlab('Registration Date') + ylab('senseBox count')
|
|
||||||
```
|
|
||||||
|
|
||||||
Outdoor boxes are growing *fast*!
|
|
||||||
We can also see the introduction of `mobile` sensor "stations" in 2017.
|
|
||||||
|
|
||||||
Let's have a quick summary:
|
|
||||||
```{r exposure_summary}
|
|
||||||
exposure_counts %>%
|
|
||||||
summarise(
|
|
||||||
oldest = min(locationtimestamp),
|
|
||||||
newest = max(locationtimestamp),
|
|
||||||
count = max(count)
|
|
||||||
) %>%
|
|
||||||
arrange(desc(count))
|
|
||||||
```
|
|
||||||
|
|
||||||
## ...and grouptag
|
|
||||||
We can try to find out where the increases in growth came from, by analysing the
|
|
||||||
box count by grouptag.
|
|
||||||
|
|
||||||
Caveats: Only a small subset of boxes has a grouptag, and we should assume
|
|
||||||
that these groups are actually bigger. Also, we can see that grouptag naming is
|
|
||||||
inconsistent (`Luftdaten`, `luftdaten.info`, ...)
|
|
||||||
|
|
||||||
```{r grouptag_counts, message=FALSE}
|
|
||||||
grouptag_counts = boxes %>%
|
|
||||||
group_by(grouptag) %>%
|
|
||||||
# only include grouptags with 15 or more members
|
|
||||||
filter(length(grouptag) >= 15 & !is.na(grouptag) & grouptag != '') %>%
|
|
||||||
mutate(count = row_number(locationtimestamp))
|
|
||||||
|
|
||||||
# helper for sorting the grouptags by boxcount
|
|
||||||
sortLvls = function(oldFactor, ascending = TRUE) {
|
|
||||||
lvls = table(oldFactor) %>% sort(., decreasing = !ascending) %>% names()
|
|
||||||
factor(oldFactor, levels = lvls)
|
|
||||||
}
|
|
||||||
grouptag_counts$grouptag = sortLvls(grouptag_counts$grouptag, ascending = FALSE)
|
|
||||||
|
|
||||||
ggplot(grouptag_counts, aes(x = locationtimestamp, y = count, colour = grouptag)) +
|
|
||||||
geom_line(aes(group = grouptag)) +
|
|
||||||
xlab('Registration Date') + ylab('senseBox count')
|
|
||||||
```
|
|
||||||
|
|
||||||
```{r grouptag_summary}
|
|
||||||
grouptag_counts %>%
|
|
||||||
summarise(
|
|
||||||
oldest = min(locationtimestamp),
|
|
||||||
newest = max(locationtimestamp),
|
|
||||||
count = max(count)
|
|
||||||
) %>%
|
|
||||||
arrange(desc(count))
|
|
||||||
```
|
|
||||||
|
|
||||||
# Plot rate of growth and inactivity per week
|
|
||||||
First we group the boxes by `locationtimestamp` into bins of one week:
|
|
||||||
```{r growthrate_registered, warning=FALSE, message=FALSE, results='hide'}
|
|
||||||
bins = 'week'
|
|
||||||
mvavg_bins = 6
|
|
||||||
|
|
||||||
growth = boxes %>%
|
|
||||||
mutate(week = cut(as.Date(locationtimestamp), breaks = bins)) %>%
|
|
||||||
group_by(week) %>%
|
|
||||||
summarize(count = length(week)) %>%
|
|
||||||
mutate(event = 'registered')
|
|
||||||
```
|
|
||||||
|
|
||||||
We can do the same for `updatedAt`, which informs us about the last change to
|
|
||||||
a box, including uploaded measurements. As a lot of boxes were "updated" by the database
|
|
||||||
migration, many of them are updated at 2022-03-30, so we try to use the `lastMeasurement`
|
|
||||||
attribute instead of `updatedAt`. This leads to fewer boxes but also automatically excludes
|
|
||||||
boxes which were created but never made a measurement.
|
|
||||||
|
|
||||||
This method of determining inactive boxes is fairly inaccurate and should be
|
|
||||||
considered an approximation, because we have no information about intermediate
|
|
||||||
inactive phases.
|
|
||||||
Also deleted boxes would probably have a big impact here.
|
|
||||||
```{r growthrate_inactive, warning=FALSE, message=FALSE, results='hide'}
|
|
||||||
inactive = boxes %>%
|
|
||||||
# remove boxes that were updated in the last two days,
|
|
||||||
# b/c any box becomes inactive at some point by definition of updatedAt
|
|
||||||
filter(lastMeasurement < now() - days(2)) %>%
|
|
||||||
mutate(week = cut(as.Date(lastMeasurement), breaks = bins)) %>%
|
|
||||||
filter(as.Date(week) > as.Date("2021-12-31")) %>%
|
|
||||||
group_by(week) %>%
|
|
||||||
summarize(count = length(week)) %>%
|
|
||||||
mutate(event = 'inactive')
|
|
||||||
```
|
|
||||||
|
|
||||||
Now we can combine both datasets for plotting:
|
|
||||||
```{r growthrate, warning=FALSE, message=FALSE, results='hide'}
|
|
||||||
boxes_by_date = bind_rows(growth, inactive) %>% group_by(event)
|
|
||||||
|
|
||||||
ggplot(boxes_by_date, aes(x = as.Date(week), colour = event)) +
|
|
||||||
xlab('Time') + ylab(paste('rate per ', bins)) +
|
|
||||||
scale_x_date(date_breaks="years", date_labels="%Y") +
|
|
||||||
scale_colour_manual(values = c(registered = 'lightgreen', inactive = 'grey')) +
|
|
||||||
geom_point(aes(y = count), size = 0.5) +
|
|
||||||
# moving average, make first and last value NA (to ensure identical length of vectors)
|
|
||||||
geom_line(aes(y = rollmean(count, mvavg_bins, fill = list(NA, NULL, NA))))
|
|
||||||
```
|
|
||||||
|
|
||||||
And see in which weeks the most boxes become (in)active:
|
|
||||||
```{r table_mostregistrations}
|
|
||||||
boxes_by_date %>%
|
|
||||||
filter(count > 50) %>%
|
|
||||||
arrange(desc(count))
|
|
||||||
```
|
|
||||||
|
|
||||||
# Plot duration of boxes being active {.tabset}
|
|
||||||
While we are looking at `locationtimestamp` and `lastMeasurement`, we can also extract the duration of activity
|
|
||||||
of each box, and look at metrics by exposure and grouptag once more:
|
|
||||||
|
|
||||||
## ...by exposure
|
|
||||||
```{r exposure_duration, message=FALSE}
|
|
||||||
durations = boxes %>%
|
|
||||||
group_by(exposure) %>%
|
|
||||||
filter(!is.na(lastMeasurement)) %>%
|
|
||||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
|
||||||
filter(duration >= 0)
|
|
||||||
|
|
||||||
ggplot(durations, aes(x = exposure, y = duration)) +
|
|
||||||
geom_boxplot() +
|
|
||||||
coord_flip() + ylab('Duration active in Days')
|
|
||||||
```
|
|
||||||
|
|
||||||
The time of activity averages at only `r round(mean(durations$duration))` days,
|
|
||||||
though there are boxes with `r round(max(durations$duration))` days of activity,
|
|
||||||
spanning a large chunk of openSenseMap's existence.
|
|
||||||
|
|
||||||
## ...by grouptag
|
|
||||||
```{r grouptag_duration, message=FALSE}
|
|
||||||
durations = boxes %>%
|
|
||||||
filter(!is.na(lastMeasurement)) %>%
|
|
||||||
group_by(grouptag) %>%
|
|
||||||
# only include grouptags with 20 or more members
|
|
||||||
filter(length(grouptag) >= 15 & !is.na(grouptag) & !is.na(lastMeasurement)) %>%
|
|
||||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
|
||||||
filter(duration >= 0)
|
|
||||||
|
|
||||||
ggplot(durations, aes(x = grouptag, y = duration)) +
|
|
||||||
geom_boxplot() +
|
|
||||||
coord_flip() + ylab('Duration active in Days')
|
|
||||||
|
|
||||||
durations %>%
|
|
||||||
summarize(
|
|
||||||
duration_avg = round(mean(duration)),
|
|
||||||
duration_min = round(min(duration)),
|
|
||||||
duration_max = round(max(duration)),
|
|
||||||
oldest_box = round(max(difftime(now(), locationtimestamp, units='days')))
|
|
||||||
) %>%
|
|
||||||
arrange(desc(duration_avg))
|
|
||||||
```
|
|
||||||
|
|
||||||
The time of activity averages at only `r round(mean(durations$duration))` days,
|
|
||||||
though there are boxes with `r round(max(durations$duration))` days of activity,
|
|
||||||
spanning a large chunk of openSenseMap's existence.
|
|
||||||
|
|
||||||
## ...by year of registration
|
|
||||||
This is less useful, as older boxes are active for a longer time by definition.
|
|
||||||
If you have an idea how to compensate for that, please send a [Pull Request][PR]!
|
|
||||||
|
|
||||||
```{r year_duration, message=FALSE}
|
|
||||||
# NOTE: boxes older than 2016 missing due to missing updatedAt in database
|
|
||||||
duration = boxes %>%
|
|
||||||
mutate(year = cut(as.Date(locationtimestamp), breaks = 'year')) %>%
|
|
||||||
group_by(year) %>%
|
|
||||||
filter(!is.na(lastMeasurement)) %>%
|
|
||||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
|
||||||
filter(duration >= 0)
|
|
||||||
|
|
||||||
ggplot(duration, aes(x = substr(as.character(year), 0, 4), y = duration)) +
|
|
||||||
geom_boxplot() +
|
|
||||||
coord_flip() + ylab('Duration active in Days') + xlab('Year of Registration')
|
|
||||||
```
|
|
||||||
|
|
||||||
# More Visualisations
|
|
||||||
Other visualisations come to mind, and are left as an exercise to the reader.
|
|
||||||
If you implemented some, feel free to add them to this vignette via a [Pull Request][PR].
|
|
||||||
|
|
||||||
* growth by phenomenon
|
|
||||||
* growth by location -> (interactive) map
|
|
||||||
* set inactive rate in relation to total box count
|
|
||||||
* filter timespans with big dips in growth rate, and extrapolate the amount of
|
|
||||||
senseBoxes that could be on the platform today, assuming there were no production issues ;)
|
|
||||||
|
|
||||||
[PR]: https://github.com/sensebox/opensensmapr/pulls
|
|
||||||
|
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,75 +1,73 @@
|
||||||
## ----setup, include=FALSE-----------------------------------------------------
|
## ----setup, include=FALSE------------------------------------------------
|
||||||
knitr::opts_chunk$set(echo = TRUE)
|
knitr::opts_chunk$set(echo = TRUE)
|
||||||
|
|
||||||
## ----results = FALSE----------------------------------------------------------
|
## ----results = F---------------------------------------------------------
|
||||||
library(magrittr)
|
library(magrittr)
|
||||||
library(opensensmapr)
|
library(opensensmapr)
|
||||||
|
|
||||||
# all_sensors = osem_boxes(cache = '.')
|
all_sensors = osem_boxes()
|
||||||
all_sensors = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
|
||||||
|
|
||||||
## -----------------------------------------------------------------------------
|
## ------------------------------------------------------------------------
|
||||||
summary(all_sensors)
|
summary(all_sensors)
|
||||||
|
|
||||||
## ---- message=FALSE, warning=FALSE--------------------------------------------
|
## ----message=F, warning=F------------------------------------------------
|
||||||
|
if (!require('maps')) install.packages('maps')
|
||||||
|
if (!require('maptools')) install.packages('maptools')
|
||||||
|
if (!require('rgeos')) install.packages('rgeos')
|
||||||
|
|
||||||
plot(all_sensors)
|
plot(all_sensors)
|
||||||
|
|
||||||
## -----------------------------------------------------------------------------
|
## ------------------------------------------------------------------------
|
||||||
phenoms = osem_phenomena(all_sensors)
|
phenoms = osem_phenomena(all_sensors)
|
||||||
str(phenoms)
|
str(phenoms)
|
||||||
|
|
||||||
## -----------------------------------------------------------------------------
|
## ------------------------------------------------------------------------
|
||||||
phenoms[phenoms > 20]
|
phenoms[phenoms > 20]
|
||||||
|
|
||||||
## ----results = FALSE, eval=FALSE----------------------------------------------
|
## ----results = F---------------------------------------------------------
|
||||||
# pm25_sensors = osem_boxes(
|
pm25_sensors = osem_boxes(
|
||||||
# exposure = 'outdoor',
|
exposure = 'outdoor',
|
||||||
# date = Sys.time(), # ±4 hours
|
date = Sys.time(), # ±4 hours
|
||||||
# phenomenon = 'PM2.5'
|
phenomenon = 'PM2.5'
|
||||||
# )
|
)
|
||||||
|
|
||||||
## -----------------------------------------------------------------------------
|
|
||||||
pm25_sensors = readRDS('pm25_sensors.rds') # read precomputed file to save resources
|
|
||||||
|
|
||||||
|
## ------------------------------------------------------------------------
|
||||||
summary(pm25_sensors)
|
summary(pm25_sensors)
|
||||||
plot(pm25_sensors)
|
plot(pm25_sensors)
|
||||||
|
|
||||||
## ---- results=FALSE, message=FALSE--------------------------------------------
|
## ------------------------------------------------------------------------
|
||||||
library(sf)
|
library(sf)
|
||||||
library(units)
|
library(units)
|
||||||
library(lubridate)
|
library(lubridate)
|
||||||
library(dplyr)
|
library(dplyr)
|
||||||
|
|
||||||
|
# construct a bounding box: 12 kilometers around Berlin
|
||||||
|
berlin = st_point(c(13.4034, 52.5120)) %>%
|
||||||
|
st_sfc(crs = 4326) %>%
|
||||||
|
st_transform(3857) %>% # allow setting a buffer in meters
|
||||||
|
st_buffer(set_units(12, km)) %>%
|
||||||
|
st_transform(4326) %>% # the opensensemap expects WGS 84
|
||||||
|
st_bbox()
|
||||||
|
|
||||||
## ----bbox, results = FALSE, eval=FALSE----------------------------------------
|
## ----results = F---------------------------------------------------------
|
||||||
# # construct a bounding box: 12 kilometers around Berlin
|
pm25 = osem_measurements(
|
||||||
# berlin = st_point(c(13.4034, 52.5120)) %>%
|
berlin,
|
||||||
# st_sfc(crs = 4326) %>%
|
phenomenon = 'PM2.5',
|
||||||
# st_transform(3857) %>% # allow setting a buffer in meters
|
from = now() - days(20), # defaults to 2 days
|
||||||
# st_buffer(set_units(12, km)) %>%
|
to = now()
|
||||||
# st_transform(4326) %>% # the opensensemap expects WGS 84
|
)
|
||||||
# st_bbox()
|
|
||||||
# pm25 = osem_measurements(
|
|
||||||
# berlin,
|
|
||||||
# phenomenon = 'PM2.5',
|
|
||||||
# from = now() - days(3), # defaults to 2 days
|
|
||||||
# to = now()
|
|
||||||
# )
|
|
||||||
#
|
|
||||||
|
|
||||||
## -----------------------------------------------------------------------------
|
|
||||||
pm25 = readRDS('pm25_berlin.rds') # read precomputed file to save resources
|
|
||||||
plot(pm25)
|
plot(pm25)
|
||||||
|
|
||||||
## ---- warning=FALSE-----------------------------------------------------------
|
## ------------------------------------------------------------------------
|
||||||
outliers = filter(pm25, value > 100)$sensorId
|
outliers = filter(pm25, value > 100)$sensorId
|
||||||
bad_sensors = outliers[, drop = TRUE] %>% levels()
|
bad_sensors = outliers[, drop = T] %>% levels()
|
||||||
|
|
||||||
pm25 = mutate(pm25, invalid = sensorId %in% bad_sensors)
|
pm25 = mutate(pm25, invalid = sensorId %in% bad_sensors)
|
||||||
|
|
||||||
## -----------------------------------------------------------------------------
|
## ------------------------------------------------------------------------
|
||||||
st_as_sf(pm25) %>% st_geometry() %>% plot(col = factor(pm25$invalid), axes = TRUE)
|
st_as_sf(pm25) %>% st_geometry() %>% plot(col = factor(pm25$invalid), axes = T)
|
||||||
|
|
||||||
## -----------------------------------------------------------------------------
|
## ------------------------------------------------------------------------
|
||||||
pm25 %>% filter(invalid == FALSE) %>% plot()
|
pm25 %>% filter(invalid == FALSE) %>% plot()
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@ knitr::opts_chunk$set(echo = TRUE)
|
||||||
```
|
```
|
||||||
|
|
||||||
This package provides data ingestion functions for almost any data stored on the
|
This package provides data ingestion functions for almost any data stored on the
|
||||||
open data platform for environmental sensordata <https://opensensemap.org>.
|
open data platform for environemental sensordata <https://opensensemap.org>.
|
||||||
Its main goals are to provide means for:
|
Its main goals are to provide means for:
|
||||||
|
|
||||||
- big data analysis of the measurements stored on the platform
|
- big data analysis of the measurements stored on the platform
|
||||||
|
@ -28,12 +28,11 @@ Its main goals are to provide means for:
|
||||||
Before we look at actual observations, lets get a grasp of the openSenseMap
|
Before we look at actual observations, lets get a grasp of the openSenseMap
|
||||||
datasets' structure.
|
datasets' structure.
|
||||||
|
|
||||||
```{r results = FALSE}
|
```{r results = F}
|
||||||
library(magrittr)
|
library(magrittr)
|
||||||
library(opensensmapr)
|
library(opensensmapr)
|
||||||
|
|
||||||
# all_sensors = osem_boxes(cache = '.')
|
all_sensors = osem_boxes()
|
||||||
all_sensors = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
|
||||||
```
|
```
|
||||||
```{r}
|
```{r}
|
||||||
summary(all_sensors)
|
summary(all_sensors)
|
||||||
|
@ -48,7 +47,11 @@ couple of minutes ago.
|
||||||
Another feature of interest is the spatial distribution of the boxes: `plot()`
|
Another feature of interest is the spatial distribution of the boxes: `plot()`
|
||||||
can help us out here. This function requires a bunch of optional dependencies though.
|
can help us out here. This function requires a bunch of optional dependencies though.
|
||||||
|
|
||||||
```{r, message=FALSE, warning=FALSE}
|
```{r message=F, warning=F}
|
||||||
|
if (!require('maps')) install.packages('maps')
|
||||||
|
if (!require('maptools')) install.packages('maptools')
|
||||||
|
if (!require('rgeos')) install.packages('rgeos')
|
||||||
|
|
||||||
plot(all_sensors)
|
plot(all_sensors)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -78,7 +81,7 @@ We should check how many sensor stations provide useful data: We want only those
|
||||||
boxes with a PM2.5 sensor, that are placed outdoors and are currently submitting
|
boxes with a PM2.5 sensor, that are placed outdoors and are currently submitting
|
||||||
measurements:
|
measurements:
|
||||||
|
|
||||||
```{r results = FALSE, eval=FALSE}
|
```{r results = F}
|
||||||
pm25_sensors = osem_boxes(
|
pm25_sensors = osem_boxes(
|
||||||
exposure = 'outdoor',
|
exposure = 'outdoor',
|
||||||
date = Sys.time(), # ±4 hours
|
date = Sys.time(), # ±4 hours
|
||||||
|
@ -86,8 +89,6 @@ pm25_sensors = osem_boxes(
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
```{r}
|
```{r}
|
||||||
pm25_sensors = readRDS('pm25_sensors.rds') # read precomputed file to save resources
|
|
||||||
|
|
||||||
summary(pm25_sensors)
|
summary(pm25_sensors)
|
||||||
plot(pm25_sensors)
|
plot(pm25_sensors)
|
||||||
```
|
```
|
||||||
|
@ -96,20 +97,16 @@ Thats still more than 200 measuring stations, we can work with that.
|
||||||
|
|
||||||
### Analyzing sensor data
|
### Analyzing sensor data
|
||||||
Having analyzed the available data sources, let's finally get some measurements.
|
Having analyzed the available data sources, let's finally get some measurements.
|
||||||
We could call `osem_measurements(pm25_sensors)` now, however we are focusing on
|
We could call `osem_measurements(pm25_sensors)` now, however we are focussing on
|
||||||
a restricted area of interest, the city of Berlin.
|
a restricted area of interest, the city of Berlin.
|
||||||
Luckily we can get the measurements filtered by a bounding box:
|
Luckily we can get the measurements filtered by a bounding box:
|
||||||
|
|
||||||
```{r, results=FALSE, message=FALSE}
|
```{r}
|
||||||
library(sf)
|
library(sf)
|
||||||
library(units)
|
library(units)
|
||||||
library(lubridate)
|
library(lubridate)
|
||||||
library(dplyr)
|
library(dplyr)
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
Since the API takes quite long to response measurements, especially filtered on space and time, we do not run the following chunks for publication of the package on CRAN.
|
|
||||||
```{r bbox, results = FALSE, eval=FALSE}
|
|
||||||
# construct a bounding box: 12 kilometers around Berlin
|
# construct a bounding box: 12 kilometers around Berlin
|
||||||
berlin = st_point(c(13.4034, 52.5120)) %>%
|
berlin = st_point(c(13.4034, 52.5120)) %>%
|
||||||
st_sfc(crs = 4326) %>%
|
st_sfc(crs = 4326) %>%
|
||||||
|
@ -117,26 +114,24 @@ berlin = st_point(c(13.4034, 52.5120)) %>%
|
||||||
st_buffer(set_units(12, km)) %>%
|
st_buffer(set_units(12, km)) %>%
|
||||||
st_transform(4326) %>% # the opensensemap expects WGS 84
|
st_transform(4326) %>% # the opensensemap expects WGS 84
|
||||||
st_bbox()
|
st_bbox()
|
||||||
|
```
|
||||||
|
```{r results = F}
|
||||||
pm25 = osem_measurements(
|
pm25 = osem_measurements(
|
||||||
berlin,
|
berlin,
|
||||||
phenomenon = 'PM2.5',
|
phenomenon = 'PM2.5',
|
||||||
from = now() - days(3), # defaults to 2 days
|
from = now() - days(20), # defaults to 2 days
|
||||||
to = now()
|
to = now()
|
||||||
)
|
)
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
```{r}
|
|
||||||
pm25 = readRDS('pm25_berlin.rds') # read precomputed file to save resources
|
|
||||||
plot(pm25)
|
plot(pm25)
|
||||||
```
|
```
|
||||||
|
|
||||||
Now we can get started with actual spatiotemporal data analysis.
|
Now we can get started with actual spatiotemporal data analysis.
|
||||||
First, lets mask the seemingly uncalibrated sensors:
|
First, lets mask the seemingly uncalibrated sensors:
|
||||||
|
|
||||||
```{r, warning=FALSE}
|
```{r}
|
||||||
outliers = filter(pm25, value > 100)$sensorId
|
outliers = filter(pm25, value > 100)$sensorId
|
||||||
bad_sensors = outliers[, drop = TRUE] %>% levels()
|
bad_sensors = outliers[, drop = T] %>% levels()
|
||||||
|
|
||||||
pm25 = mutate(pm25, invalid = sensorId %in% bad_sensors)
|
pm25 = mutate(pm25, invalid = sensorId %in% bad_sensors)
|
||||||
```
|
```
|
||||||
|
@ -144,7 +139,7 @@ pm25 = mutate(pm25, invalid = sensorId %in% bad_sensors)
|
||||||
Then plot the measuring locations, flagging the outliers:
|
Then plot the measuring locations, flagging the outliers:
|
||||||
|
|
||||||
```{r}
|
```{r}
|
||||||
st_as_sf(pm25) %>% st_geometry() %>% plot(col = factor(pm25$invalid), axes = TRUE)
|
st_as_sf(pm25) %>% st_geometry() %>% plot(col = factor(pm25$invalid), axes = T)
|
||||||
```
|
```
|
||||||
|
|
||||||
Removing these sensors yields a nicer time series plot:
|
Removing these sensors yields a nicer time series plot:
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,51 +1,96 @@
|
||||||
## ----setup, results='hide'----------------------------------------------------
|
## ----cache---------------------------------------------------------------
|
||||||
# this vignette requires:
|
b = osem_boxes(cache = tempdir())
|
||||||
|
list.files(tempdir(), pattern = 'osemcache\\..*\\.rds')
|
||||||
|
|
||||||
|
# the next identical request will hit the cache only!
|
||||||
|
b = osem_boxes(cache = tempdir())
|
||||||
|
|
||||||
|
# requests without the cache parameter will still be performed normally
|
||||||
|
b = osem_boxes()
|
||||||
|
|
||||||
|
## ----cache_custom--------------------------------------------------------
|
||||||
|
cacheDir = getwd() # current working directory
|
||||||
|
b = osem_boxes(cache = cacheDir)
|
||||||
|
|
||||||
|
# the next identical request will hit the cache only!
|
||||||
|
b = osem_boxes(cache = cacheDir)
|
||||||
|
|
||||||
|
## ----clearcache----------------------------------------------------------
|
||||||
|
osem_clear_cache() # clears default cache
|
||||||
|
osem_clear_cache(getwd()) # clears a custom cache
|
||||||
|
|
||||||
|
## ----setup, results='hide'-----------------------------------------------
|
||||||
|
# this section requires:
|
||||||
library(opensensmapr)
|
library(opensensmapr)
|
||||||
library(jsonlite)
|
library(jsonlite)
|
||||||
library(readr)
|
library(readr)
|
||||||
|
|
||||||
## ----cache--------------------------------------------------------------------
|
# first get our example data:
|
||||||
b = osem_boxes(grouptag = 'ifgi', cache = tempdir())
|
boxes = osem_boxes(grouptag = 'ifgi')
|
||||||
|
measurements = osem_measurements(boxes, phenomenon = 'PM10')
|
||||||
|
|
||||||
# the next identical request will hit the cache only!
|
## ----serialize_json------------------------------------------------------
|
||||||
b = osem_boxes(grouptag = 'ifgi', cache = tempdir())
|
# serializing senseBoxes to JSON, and loading from file again:
|
||||||
|
write(jsonlite::serializeJSON(measurements), 'boxes.json')
|
||||||
|
boxes_from_file = jsonlite::unserializeJSON(readr::read_file('boxes.json'))
|
||||||
|
|
||||||
# requests without the cache parameter will still be performed normally
|
## ----serialize_attrs-----------------------------------------------------
|
||||||
b = osem_boxes(grouptag = 'ifgi')
|
# note the toJSON call
|
||||||
|
write(jsonlite::toJSON(measurements), 'boxes_bad.json')
|
||||||
|
boxes_without_attrs = jsonlite::fromJSON('boxes_bad.json')
|
||||||
|
|
||||||
## ----cachelisting-------------------------------------------------------------
|
boxes_with_attrs = osem_as_sensebox(boxes_without_attrs)
|
||||||
list.files(tempdir(), pattern = 'osemcache\\..*\\.rds')
|
class(boxes_with_attrs)
|
||||||
|
|
||||||
## ----cache_custom-------------------------------------------------------------
|
## ----osem_offline--------------------------------------------------------
|
||||||
cacheDir = getwd() # current working directory
|
# offline logic
|
||||||
b = osem_boxes(grouptag = 'ifgi', cache = cacheDir)
|
osem_offline = function (func, file, format='rds', ...) {
|
||||||
|
# deserialize if file exists, otherwise download and serialize
|
||||||
|
if (file.exists(file)) {
|
||||||
|
if (format == 'json')
|
||||||
|
jsonlite::unserializeJSON(readr::read_file(file))
|
||||||
|
else
|
||||||
|
readRDS(file)
|
||||||
|
} else {
|
||||||
|
data = func(...)
|
||||||
|
if (format == 'json')
|
||||||
|
write(jsonlite::serializeJSON(data), file = file)
|
||||||
|
else
|
||||||
|
saveRDS(data, file)
|
||||||
|
data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
# the next identical request will hit the cache only!
|
# wrappers for each download function
|
||||||
b = osem_boxes(grouptag = 'ifgi', cache = cacheDir)
|
osem_measurements_offline = function (file, ...) {
|
||||||
|
osem_offline(opensensmapr::osem_measurements, file, ...)
|
||||||
|
}
|
||||||
|
osem_boxes_offline = function (file, ...) {
|
||||||
|
osem_offline(opensensmapr::osem_boxes, file, ...)
|
||||||
|
}
|
||||||
|
osem_box_offline = function (file, ...) {
|
||||||
|
osem_offline(opensensmapr::osem_box, file, ...)
|
||||||
|
}
|
||||||
|
osem_counts_offline = function (file, ...) {
|
||||||
|
osem_offline(opensensmapr::osem_counts, file, ...)
|
||||||
|
}
|
||||||
|
|
||||||
## ----clearcache, results='hide'-----------------------------------------------
|
## ----test----------------------------------------------------------------
|
||||||
osem_clear_cache() # clears default cache
|
# first run; will download and save to disk
|
||||||
osem_clear_cache(getwd()) # clears a custom cache
|
b1 = osem_boxes_offline('mobileboxes.rds', exposure='mobile')
|
||||||
|
|
||||||
## ----data, results='hide', eval=FALSE-----------------------------------------
|
# consecutive runs; will read from disk
|
||||||
# # first get our example data:
|
b2 = osem_boxes_offline('mobileboxes.rds', exposure='mobile')
|
||||||
# measurements = osem_measurements('Windgeschwindigkeit')
|
class(b1) == class(b2)
|
||||||
|
|
||||||
## ----serialize_json, eval=FALSE-----------------------------------------------
|
# we can even omit the arguments now (though thats not really the point here)
|
||||||
# # serializing senseBoxes to JSON, and loading from file again:
|
b3 = osem_boxes_offline('mobileboxes.rds')
|
||||||
# write(jsonlite::serializeJSON(measurements), 'measurements.json')
|
nrow(b1) == nrow(b3)
|
||||||
# measurements_from_file = jsonlite::unserializeJSON(readr::read_file('measurements.json'))
|
|
||||||
# class(measurements_from_file)
|
|
||||||
|
|
||||||
## ----serialize_attrs, eval=FALSE----------------------------------------------
|
# verify that the custom sensebox methods are still working
|
||||||
# # note the toJSON call instead of serializeJSON
|
summary(b2)
|
||||||
# write(jsonlite::toJSON(measurements), 'measurements_bad.json')
|
plot(b3)
|
||||||
# measurements_without_attrs = jsonlite::fromJSON('measurements_bad.json')
|
|
||||||
# class(measurements_without_attrs)
|
|
||||||
#
|
|
||||||
# measurements_with_attrs = osem_as_measurements(measurements_without_attrs)
|
|
||||||
# class(measurements_with_attrs)
|
|
||||||
|
|
||||||
## ----cleanup, include=FALSE, eval=FALSE---------------------------------------
|
## ----cleanup, results='hide'---------------------------------------------
|
||||||
# file.remove('measurements.json', 'measurements_bad.json')
|
file.remove('mobileboxes.rds', 'boxes_bad.json', 'boxes.json', 'measurements.rds')
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ vignette: >
|
||||||
---
|
---
|
||||||
|
|
||||||
It may be useful to download data from openSenseMap only once.
|
It may be useful to download data from openSenseMap only once.
|
||||||
For reproducible results, the data should be saved to disk, and reloaded at a
|
For reproducible results, the data could be saved to disk, and reloaded at a
|
||||||
later point.
|
later point.
|
||||||
|
|
||||||
This avoids..
|
This avoids..
|
||||||
|
@ -21,49 +21,40 @@ This avoids..
|
||||||
- stress on the openSenseMap-server.
|
- stress on the openSenseMap-server.
|
||||||
|
|
||||||
This vignette shows how to use this built in `opensensmapr` feature, and
|
This vignette shows how to use this built in `opensensmapr` feature, and
|
||||||
how to do it yourself in case you want to save to other data formats.
|
how to do it yourself, if you want to store to other data formats.
|
||||||
|
|
||||||
```{r setup, results='hide'}
|
## Using openSensMapr Caching Feature
|
||||||
# this vignette requires:
|
|
||||||
library(opensensmapr)
|
|
||||||
library(jsonlite)
|
|
||||||
library(readr)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Using the opensensmapr Caching Feature
|
|
||||||
All data retrieval functions of `opensensmapr` have a built in caching feature,
|
All data retrieval functions of `opensensmapr` have a built in caching feature,
|
||||||
which serializes an API response to disk.
|
which serializes an API response to disk.
|
||||||
Subsequent identical requests will then return the serialized data instead of making
|
Subsequent identical requests will then return the serialized data instead of making
|
||||||
another request.
|
another request.
|
||||||
|
To do so, each request is given a unique ID based on its parameters.
|
||||||
|
|
||||||
To use this feature, just add a path to a directory to the `cache` parameter:
|
To use this feature, just add a path to a directory to the `cache` parameter:
|
||||||
```{r cache}
|
```{r cache}
|
||||||
b = osem_boxes(grouptag = 'ifgi', cache = tempdir())
|
b = osem_boxes(cache = tempdir())
|
||||||
|
list.files(tempdir(), pattern = 'osemcache\\..*\\.rds')
|
||||||
|
|
||||||
# the next identical request will hit the cache only!
|
# the next identical request will hit the cache only!
|
||||||
b = osem_boxes(grouptag = 'ifgi', cache = tempdir())
|
b = osem_boxes(cache = tempdir())
|
||||||
|
|
||||||
# requests without the cache parameter will still be performed normally
|
# requests without the cache parameter will still be performed normally
|
||||||
b = osem_boxes(grouptag = 'ifgi')
|
b = osem_boxes()
|
||||||
```
|
```
|
||||||
|
|
||||||
Looking at the cache directory we can see one file for each request, which is identified through a hash of the request URL:
|
You can maintain multiple caches simultaneously which allows to store only
|
||||||
```{r cachelisting}
|
serialized data related to a script in its directory:
|
||||||
list.files(tempdir(), pattern = 'osemcache\\..*\\.rds')
|
|
||||||
```
|
|
||||||
|
|
||||||
You can maintain multiple caches simultaneously which allows to only store data related to a script in the same directory:
|
|
||||||
```{r cache_custom}
|
```{r cache_custom}
|
||||||
cacheDir = getwd() # current working directory
|
cacheDir = getwd() # current working directory
|
||||||
b = osem_boxes(grouptag = 'ifgi', cache = cacheDir)
|
b = osem_boxes(cache = cacheDir)
|
||||||
|
|
||||||
# the next identical request will hit the cache only!
|
# the next identical request will hit the cache only!
|
||||||
b = osem_boxes(grouptag = 'ifgi', cache = cacheDir)
|
b = osem_boxes(cache = cacheDir)
|
||||||
```
|
```
|
||||||
|
|
||||||
To get fresh results again, just call `osem_clear_cache()` for the respective cache:
|
To get fresh results again, just call `osem_clear_cache()` for the respective cache:
|
||||||
```{r clearcache, results='hide'}
|
```{r clearcache}
|
||||||
osem_clear_cache() # clears default cache
|
osem_clear_cache() # clears default cache
|
||||||
osem_clear_cache(getwd()) # clears a custom cache
|
osem_clear_cache(getwd()) # clears a custom cache
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -71,36 +62,108 @@ osem_clear_cache(getwd()) # clears a custom cache
|
||||||
If you want to roll your own serialization method to support custom data formats,
|
If you want to roll your own serialization method to support custom data formats,
|
||||||
here's how:
|
here's how:
|
||||||
|
|
||||||
```{r data, results='hide', eval=FALSE}
|
```{r setup, results='hide'}
|
||||||
|
# this section requires:
|
||||||
|
library(opensensmapr)
|
||||||
|
library(jsonlite)
|
||||||
|
library(readr)
|
||||||
|
|
||||||
# first get our example data:
|
# first get our example data:
|
||||||
measurements = osem_measurements('Windgeschwindigkeit')
|
boxes = osem_boxes(grouptag = 'ifgi')
|
||||||
|
measurements = osem_measurements(boxes, phenomenon = 'PM10')
|
||||||
```
|
```
|
||||||
|
|
||||||
If you are paranoid and worry about `.rds` files not being decodable anymore
|
If you are paranoid and worry about `.rds` files not being decodable anymore
|
||||||
in the (distant) future, you could serialize to a plain text format such as JSON.
|
in the (distant) future, you could serialize to a plain text format such as JSON.
|
||||||
This of course comes at the cost of storage space and performance.
|
This of course comes at the cost of storage space and performance.
|
||||||
```{r serialize_json, eval=FALSE}
|
```{r serialize_json}
|
||||||
# serializing senseBoxes to JSON, and loading from file again:
|
# serializing senseBoxes to JSON, and loading from file again:
|
||||||
write(jsonlite::serializeJSON(measurements), 'measurements.json')
|
write(jsonlite::serializeJSON(measurements), 'boxes.json')
|
||||||
measurements_from_file = jsonlite::unserializeJSON(readr::read_file('measurements.json'))
|
boxes_from_file = jsonlite::unserializeJSON(readr::read_file('boxes.json'))
|
||||||
class(measurements_from_file)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
This method also persists the R object metadata (classes, attributes).
|
Both methods also persist the R object metadata (classes, attributes).
|
||||||
If you were to use a serialization method that can't persist object metadata, you
|
If you were to use a serialization method that can't persist object metadata, you
|
||||||
could re-apply it with the following functions:
|
could re-apply it with the following functions:
|
||||||
|
|
||||||
```{r serialize_attrs, eval=FALSE}
|
```{r serialize_attrs}
|
||||||
# note the toJSON call instead of serializeJSON
|
# note the toJSON call
|
||||||
write(jsonlite::toJSON(measurements), 'measurements_bad.json')
|
write(jsonlite::toJSON(measurements), 'boxes_bad.json')
|
||||||
measurements_without_attrs = jsonlite::fromJSON('measurements_bad.json')
|
boxes_without_attrs = jsonlite::fromJSON('boxes_bad.json')
|
||||||
class(measurements_without_attrs)
|
|
||||||
|
|
||||||
measurements_with_attrs = osem_as_measurements(measurements_without_attrs)
|
boxes_with_attrs = osem_as_sensebox(boxes_without_attrs)
|
||||||
class(measurements_with_attrs)
|
class(boxes_with_attrs)
|
||||||
```
|
```
|
||||||
The same goes for boxes via `osem_as_sensebox()`.
|
The same goes for measurements via `osem_as_measurements()`.
|
||||||
|
|
||||||
```{r cleanup, include=FALSE, eval=FALSE}
|
## Workflow for reproducible code
|
||||||
file.remove('measurements.json', 'measurements_bad.json')
|
For truly reproducible code you want it to work and return the same results --
|
||||||
|
no matter if you run it the first time or a consecutive time, and without making
|
||||||
|
changes to it.
|
||||||
|
|
||||||
|
Therefore we need a wrapper around the save-to-file & load-from-file logic.
|
||||||
|
The following examples show a way to do just that, and where inspired by
|
||||||
|
[this reproducible analysis by Daniel Nuest](https://github.com/nuest/sensebox-binder).
|
||||||
|
|
||||||
|
```{r osem_offline}
|
||||||
|
# offline logic
|
||||||
|
osem_offline = function (func, file, format='rds', ...) {
|
||||||
|
# deserialize if file exists, otherwise download and serialize
|
||||||
|
if (file.exists(file)) {
|
||||||
|
if (format == 'json')
|
||||||
|
jsonlite::unserializeJSON(readr::read_file(file))
|
||||||
|
else
|
||||||
|
readRDS(file)
|
||||||
|
} else {
|
||||||
|
data = func(...)
|
||||||
|
if (format == 'json')
|
||||||
|
write(jsonlite::serializeJSON(data), file = file)
|
||||||
|
else
|
||||||
|
saveRDS(data, file)
|
||||||
|
data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# wrappers for each download function
|
||||||
|
osem_measurements_offline = function (file, ...) {
|
||||||
|
osem_offline(opensensmapr::osem_measurements, file, ...)
|
||||||
|
}
|
||||||
|
osem_boxes_offline = function (file, ...) {
|
||||||
|
osem_offline(opensensmapr::osem_boxes, file, ...)
|
||||||
|
}
|
||||||
|
osem_box_offline = function (file, ...) {
|
||||||
|
osem_offline(opensensmapr::osem_box, file, ...)
|
||||||
|
}
|
||||||
|
osem_counts_offline = function (file, ...) {
|
||||||
|
osem_offline(opensensmapr::osem_counts, file, ...)
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Thats it! Now let's try it out:
|
||||||
|
|
||||||
|
```{r test}
|
||||||
|
# first run; will download and save to disk
|
||||||
|
b1 = osem_boxes_offline('mobileboxes.rds', exposure='mobile')
|
||||||
|
|
||||||
|
# consecutive runs; will read from disk
|
||||||
|
b2 = osem_boxes_offline('mobileboxes.rds', exposure='mobile')
|
||||||
|
class(b1) == class(b2)
|
||||||
|
|
||||||
|
# we can even omit the arguments now (though thats not really the point here)
|
||||||
|
b3 = osem_boxes_offline('mobileboxes.rds')
|
||||||
|
nrow(b1) == nrow(b3)
|
||||||
|
|
||||||
|
# verify that the custom sensebox methods are still working
|
||||||
|
summary(b2)
|
||||||
|
plot(b3)
|
||||||
|
```
|
||||||
|
|
||||||
|
To re-download the data, just clear the files that were created in the process:
|
||||||
|
```{r cleanup, results='hide'}
|
||||||
|
file.remove('mobileboxes.rds', 'boxes_bad.json', 'boxes.json', 'measurements.rds')
|
||||||
|
```
|
||||||
|
|
||||||
|
A possible extension to this scheme comes to mind: Omit the specification of a
|
||||||
|
filename, and assign a unique ID to the request instead.
|
||||||
|
For example, one could calculate the SHA-1 hash of the parameters, and use it
|
||||||
|
as filename.
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,25 +0,0 @@
|
||||||
% Generated by roxygen2: do not edit by hand
|
|
||||||
% Please edit documentation in R/archive.R
|
|
||||||
\name{archive_fetch_measurements}
|
|
||||||
\alias{archive_fetch_measurements}
|
|
||||||
\title{fetch measurements from archive from a single box, and a single sensor}
|
|
||||||
\usage{
|
|
||||||
archive_fetch_measurements(box, sensorId, fromDate, toDate, progress)
|
|
||||||
}
|
|
||||||
\arguments{
|
|
||||||
\item{box}{A sensebox data.frame with a single box}
|
|
||||||
|
|
||||||
\item{sensorId}{Character specifying the sensor}
|
|
||||||
|
|
||||||
\item{fromDate}{Start date for measurement download, must be convertable via `as.Date`.}
|
|
||||||
|
|
||||||
\item{toDate}{End date for measurement download (inclusive).}
|
|
||||||
|
|
||||||
\item{progress}{whether to print progress}
|
|
||||||
}
|
|
||||||
\value{
|
|
||||||
A \code{tbl_df} containing observations of all selected sensors for each time stamp.
|
|
||||||
}
|
|
||||||
\description{
|
|
||||||
fetch measurements from archive from a single box, and a single sensor
|
|
||||||
}
|
|
|
@ -1,5 +1,5 @@
|
||||||
% Generated by roxygen2: do not edit by hand
|
% Generated by roxygen2: do not edit by hand
|
||||||
% Please edit documentation in R/external_generics.R
|
% Please edit documentation in R/measurement_utils.R
|
||||||
\name{filter.osem_measurements}
|
\name{filter.osem_measurements}
|
||||||
\alias{filter.osem_measurements}
|
\alias{filter.osem_measurements}
|
||||||
\title{Return rows with matching conditions, while maintaining class & attributes}
|
\title{Return rows with matching conditions, while maintaining class & attributes}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
% Generated by roxygen2: do not edit by hand
|
% Generated by roxygen2: do not edit by hand
|
||||||
% Please edit documentation in R/external_generics.R
|
% Please edit documentation in R/box_utils.R
|
||||||
\name{filter.sensebox}
|
\name{filter.sensebox}
|
||||||
\alias{filter.sensebox}
|
\alias{filter.sensebox}
|
||||||
\title{Return rows with matching conditions, while maintaining class & attributes}
|
\title{Return rows with matching conditions, while maintaining class & attributes}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
% Generated by roxygen2: do not edit by hand
|
% Generated by roxygen2: do not edit by hand
|
||||||
% Please edit documentation in R/external_generics.R
|
% Please edit documentation in R/measurement_utils.R
|
||||||
\name{mutate.osem_measurements}
|
\name{mutate.osem_measurements}
|
||||||
\alias{mutate.osem_measurements}
|
\alias{mutate.osem_measurements}
|
||||||
\title{Add new variables to the data, while maintaining class & attributes}
|
\title{Add new variables to the data, while maintaining class & attributes}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
% Generated by roxygen2: do not edit by hand
|
% Generated by roxygen2: do not edit by hand
|
||||||
% Please edit documentation in R/external_generics.R
|
% Please edit documentation in R/box_utils.R
|
||||||
\name{mutate.sensebox}
|
\name{mutate.sensebox}
|
||||||
\alias{mutate.sensebox}
|
\alias{mutate.sensebox}
|
||||||
\title{Add new variables to the data, while maintaining class & attributes}
|
\title{Add new variables to the data, while maintaining class & attributes}
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
\name{opensensmapr}
|
\name{opensensmapr}
|
||||||
\alias{opensensmapr}
|
\alias{opensensmapr}
|
||||||
\alias{opensensmapr-package}
|
\alias{opensensmapr-package}
|
||||||
|
\alias{opensensmapr-package}
|
||||||
\title{opensensmapr: Get sensor data from opensensemap.org}
|
\title{opensensmapr: Get sensor data from opensensemap.org}
|
||||||
\description{
|
\description{
|
||||||
The opensensmapr package provides functions for
|
The opensensmapr package provides functions for
|
||||||
|
@ -46,27 +47,16 @@ implemented:
|
||||||
|
|
||||||
\section{Retrieving measurements}{
|
\section{Retrieving measurements}{
|
||||||
|
|
||||||
There are two ways to retrieve measurements:
|
Measurements can be retrieved through \code{\link{osem_measurements}} for a
|
||||||
|
given phenomenon only. A subset of measurements may be selected by
|
||||||
|
|
||||||
\itemize{
|
\itemize{
|
||||||
\item \code{\link{osem_measurements_archive}}:
|
\item a list of senseBoxes, previously retrieved through
|
||||||
Downloads measurements for a \emph{single box} from the openSenseMap archive.
|
\code{\link{osem_box}} or \code{\link{osem_boxes}}.
|
||||||
This function does not provide realtime data, but is suitable for long time frames.
|
\item a geographic bounding box, which can be generated with the
|
||||||
|
\code{\link[sf]{sf}} package.
|
||||||
\item \code{\link{osem_measurements}}:
|
\item a time frame
|
||||||
This function retrieves (realtime) measurements from the API. It works for a
|
\item a exposure type of the given box
|
||||||
\emph{single phenomenon} only, but provides various filters to select sensors by
|
|
||||||
|
|
||||||
\itemize{
|
|
||||||
\item a list of senseBoxes, previously retrieved through
|
|
||||||
\code{\link{osem_box}} or \code{\link{osem_boxes}}.
|
|
||||||
\item a geographic bounding box, which can be generated with the
|
|
||||||
\code{\link[sf]{sf}} package.
|
|
||||||
\item a time frame
|
|
||||||
\item a exposure type of the given box
|
|
||||||
}
|
|
||||||
|
|
||||||
Use this function with caution for long time frames, as the API becomes
|
|
||||||
quite slow is limited to 10.000 measurements per 30 day interval.
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Data is returned as \code{tibble} with the class \code{osem_measurements}.
|
Data is returned as \code{tibble} with the class \code{osem_measurements}.
|
||||||
|
@ -77,16 +67,6 @@ Data is returned as \code{tibble} with the class \code{osem_measurements}.
|
||||||
Count statistics about the database are provided with \code{\link{osem_counts}}.
|
Count statistics about the database are provided with \code{\link{osem_counts}}.
|
||||||
}
|
}
|
||||||
|
|
||||||
\section{Using a different API instance / endpoint}{
|
|
||||||
|
|
||||||
You can override the functions \code{osem_endpoint} and \code{osem_endpoint_archive}
|
|
||||||
inside the package namespace:
|
|
||||||
|
|
||||||
\code{
|
|
||||||
assignInNamespace("osem_endpoint", function() "http://mynewosem.org", "opensensmapr")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
\section{Integration with other packages}{
|
\section{Integration with other packages}{
|
||||||
|
|
||||||
The package aims to be compatible with the tidyverse.
|
The package aims to be compatible with the tidyverse.
|
||||||
|
@ -105,23 +85,18 @@ Helpers are implemented to ease the further usage of the retrieved data:
|
||||||
}
|
}
|
||||||
|
|
||||||
\seealso{
|
\seealso{
|
||||||
Report bugs at \url{https://github.com/sensebox/opensensmapR/issues}
|
Report bugs at \url{https://github.com/noerw/opensensmapR/issues}
|
||||||
|
|
||||||
openSenseMap API: \url{https://api.opensensemap.org/}
|
openSenseMap API: \url{https://api.opensensemap.org/}
|
||||||
|
|
||||||
official openSenseMap API documentation: \url{https://docs.opensensemap.org/}
|
official openSenseMap API documentation: \url{https://docs.opensensemap.org/}
|
||||||
}
|
}
|
||||||
\author{
|
\author{
|
||||||
\strong{Maintainer}: Jan Stenkamp \email{jan.stenkamp@uni-muenster.de} [contributor]
|
\strong{Maintainer}: Norwin Roosen \email{hello@nroo.de}
|
||||||
|
|
||||||
Authors:
|
|
||||||
\itemize{
|
|
||||||
\item Norwin Roosen \email{hello@nroo.de}
|
|
||||||
}
|
|
||||||
|
|
||||||
Other contributors:
|
Other contributors:
|
||||||
\itemize{
|
\itemize{
|
||||||
\item Daniel Nuest \email{daniel.nuest@uni-muenster.de} (\href{https://orcid.org/0000-0003-2392-6140}{ORCID}) [contributor]
|
\item Daniel Nuest \email{daniel.nuest@uni-muenster.de} (0000-0003-2392-6140) [contributor]
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,15 +0,0 @@
|
||||||
% Generated by roxygen2: do not edit by hand
|
|
||||||
% Please edit documentation in R/archive.R
|
|
||||||
\name{osem_archive_endpoint}
|
|
||||||
\alias{osem_archive_endpoint}
|
|
||||||
\title{Returns the default endpoint for the archive *download*
|
|
||||||
While the front end domain is archive.opensensemap.org, file downloads
|
|
||||||
are provided via sciebo.}
|
|
||||||
\usage{
|
|
||||||
osem_archive_endpoint()
|
|
||||||
}
|
|
||||||
\description{
|
|
||||||
Returns the default endpoint for the archive *download*
|
|
||||||
While the front end domain is archive.opensensemap.org, file downloads
|
|
||||||
are provided via sciebo.
|
|
||||||
}
|
|
|
@ -7,11 +7,7 @@
|
||||||
osem_as_measurements(x)
|
osem_as_measurements(x)
|
||||||
}
|
}
|
||||||
\arguments{
|
\arguments{
|
||||||
\item{x}{A data.frame to attach the class to.
|
\item{x}{A data.frame to attach the class to}
|
||||||
Should have at least a `value` and `createdAt` column.}
|
|
||||||
}
|
|
||||||
\value{
|
|
||||||
data.frame of class \code{osem_measurements}
|
|
||||||
}
|
}
|
||||||
\description{
|
\description{
|
||||||
Converts a foreign object to an osem_measurements data.frame.
|
Converts a foreign object to an osem_measurements data.frame.
|
||||||
|
|
|
@ -9,9 +9,6 @@ osem_as_sensebox(x)
|
||||||
\arguments{
|
\arguments{
|
||||||
\item{x}{A data.frame to attach the class to}
|
\item{x}{A data.frame to attach the class to}
|
||||||
}
|
}
|
||||||
\value{
|
|
||||||
data.frame of class \code{sensebox}
|
|
||||||
}
|
|
||||||
\description{
|
\description{
|
||||||
Converts a foreign object to a sensebox data.frame.
|
Converts a foreign object to a sensebox data.frame.
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,7 @@ A \code{sensebox data.frame} containing a box in each row
|
||||||
Get a single senseBox by its ID
|
Get a single senseBox by its ID
|
||||||
}
|
}
|
||||||
\examples{
|
\examples{
|
||||||
\dontrun{
|
\donttest{
|
||||||
# get a specific box by ID
|
# get a specific box by ID
|
||||||
b = osem_box('57000b8745fd40c8196ad04c')
|
b = osem_box('57000b8745fd40c8196ad04c')
|
||||||
|
|
||||||
|
|
|
@ -1,19 +0,0 @@
|
||||||
% Generated by roxygen2: do not edit by hand
|
|
||||||
% Please edit documentation in R/archive.R
|
|
||||||
\name{osem_box_to_archivename}
|
|
||||||
\alias{osem_box_to_archivename}
|
|
||||||
\title{replace chars in box name according to archive script:
|
|
||||||
https://github.com/sensebox/osem-archiver/blob/612e14b/helpers.sh#L66}
|
|
||||||
\usage{
|
|
||||||
osem_box_to_archivename(box)
|
|
||||||
}
|
|
||||||
\arguments{
|
|
||||||
\item{box}{A sensebox data.frame}
|
|
||||||
}
|
|
||||||
\value{
|
|
||||||
character with archive identifier for each box
|
|
||||||
}
|
|
||||||
\description{
|
|
||||||
replace chars in box name according to archive script:
|
|
||||||
https://github.com/sensebox/osem-archiver/blob/612e14b/helpers.sh#L66
|
|
||||||
}
|
|
|
@ -4,19 +4,9 @@
|
||||||
\alias{osem_boxes}
|
\alias{osem_boxes}
|
||||||
\title{Get a set of senseBoxes from the openSenseMap}
|
\title{Get a set of senseBoxes from the openSenseMap}
|
||||||
\usage{
|
\usage{
|
||||||
osem_boxes(
|
osem_boxes(exposure = NA, model = NA, grouptag = NA, date = NA,
|
||||||
exposure = NA,
|
from = NA, to = NA, phenomenon = NA, endpoint = osem_endpoint(),
|
||||||
model = NA,
|
progress = TRUE, cache = NA)
|
||||||
grouptag = NA,
|
|
||||||
date = NA,
|
|
||||||
from = NA,
|
|
||||||
to = NA,
|
|
||||||
phenomenon = NA,
|
|
||||||
bbox = NA,
|
|
||||||
endpoint = osem_endpoint(),
|
|
||||||
progress = TRUE,
|
|
||||||
cache = NA
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
\arguments{
|
\arguments{
|
||||||
\item{exposure}{Only return boxes with the given exposure ('indoor', 'outdoor', 'mobile')}
|
\item{exposure}{Only return boxes with the given exposure ('indoor', 'outdoor', 'mobile')}
|
||||||
|
@ -34,11 +24,6 @@ osem_boxes(
|
||||||
\item{phenomenon}{Only return boxes that measured the given phenomenon in the
|
\item{phenomenon}{Only return boxes that measured the given phenomenon in the
|
||||||
time interval as specified through \code{date} or \code{from / to}}
|
time interval as specified through \code{date} or \code{from / to}}
|
||||||
|
|
||||||
\item{bbox}{Only return boxes that are within the given boundingbox,
|
|
||||||
vector of 4 WGS84 coordinates.
|
|
||||||
Order is: longitude southwest, latitude southwest, longitude northeast, latitude northeast.
|
|
||||||
Minimal and maximal values are: -180, 180 for longitude and -90, 90 for latitude.}
|
|
||||||
|
|
||||||
\item{endpoint}{The URL of the openSenseMap API instance}
|
\item{endpoint}{The URL of the openSenseMap API instance}
|
||||||
|
|
||||||
\item{progress}{Whether to print download progress information, defaults to \code{TRUE}}
|
\item{progress}{Whether to print download progress information, defaults to \code{TRUE}}
|
||||||
|
@ -61,7 +46,7 @@ Note that some filters do not work together:
|
||||||
}
|
}
|
||||||
\examples{
|
\examples{
|
||||||
|
|
||||||
\dontrun{
|
\donttest{
|
||||||
# get *all* boxes available on the API
|
# get *all* boxes available on the API
|
||||||
b = osem_boxes()
|
b = osem_boxes()
|
||||||
|
|
||||||
|
|
|
@ -17,12 +17,11 @@ Boolean whether the deletion was successful
|
||||||
Purge cached responses from the given cache directory
|
Purge cached responses from the given cache directory
|
||||||
}
|
}
|
||||||
\examples{
|
\examples{
|
||||||
\dontrun{
|
\donttest{
|
||||||
osem_boxes(cache = tempdir())
|
osem_boxes(cache = tempdir())
|
||||||
osem_clear_cache()
|
osem_clear_cache()
|
||||||
|
|
||||||
cachedir = paste(getwd(), 'osemcache', sep = '/')
|
cachedir = paste(getwd(), 'osemcache', sep = '/')
|
||||||
dir.create(file.path(cachedir), showWarnings = FALSE)
|
|
||||||
osem_boxes(cache = cachedir)
|
osem_boxes(cache = cachedir)
|
||||||
osem_clear_cache(cachedir)
|
osem_clear_cache(cachedir)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,17 +0,0 @@
|
||||||
% Generated by roxygen2: do not edit by hand
|
|
||||||
% Please edit documentation in R/api.R
|
|
||||||
\name{osem_ensure_api_available}
|
|
||||||
\alias{osem_ensure_api_available}
|
|
||||||
\title{Check if the given openSenseMap API endpoint is available}
|
|
||||||
\usage{
|
|
||||||
osem_ensure_api_available(endpoint = osem_endpoint())
|
|
||||||
}
|
|
||||||
\arguments{
|
|
||||||
\item{endpoint}{The API base URL to check, defaulting to \code{\link{osem_endpoint}}}
|
|
||||||
}
|
|
||||||
\value{
|
|
||||||
\code{TRUE} if the API is available, otherwise \code{stop()} is called.
|
|
||||||
}
|
|
||||||
\description{
|
|
||||||
Check if the given openSenseMap API endpoint is available
|
|
||||||
}
|
|
|
@ -1,17 +0,0 @@
|
||||||
% Generated by roxygen2: do not edit by hand
|
|
||||||
% Please edit documentation in R/archive.R
|
|
||||||
\name{osem_ensure_archive_available}
|
|
||||||
\alias{osem_ensure_archive_available}
|
|
||||||
\title{Check if the given openSenseMap archive endpoint is available}
|
|
||||||
\usage{
|
|
||||||
osem_ensure_archive_available(endpoint = osem_archive_endpoint())
|
|
||||||
}
|
|
||||||
\arguments{
|
|
||||||
\item{endpoint}{The archive base URL to check, defaulting to \code{\link{osem_archive_endpoint}}}
|
|
||||||
}
|
|
||||||
\value{
|
|
||||||
\code{TRUE} if the archive is available, otherwise \code{stop()} is called.
|
|
||||||
}
|
|
||||||
\description{
|
|
||||||
Check if the given openSenseMap archive endpoint is available
|
|
||||||
}
|
|
|
@ -5,37 +5,19 @@
|
||||||
\alias{osem_measurements.default}
|
\alias{osem_measurements.default}
|
||||||
\alias{osem_measurements.bbox}
|
\alias{osem_measurements.bbox}
|
||||||
\alias{osem_measurements.sensebox}
|
\alias{osem_measurements.sensebox}
|
||||||
\title{Fetch the Measurements of a Phenomenon on opensensemap.org}
|
\title{Get the Measurements of a Phenomenon on opensensemap.org}
|
||||||
\usage{
|
\usage{
|
||||||
osem_measurements(x, ...)
|
osem_measurements(x, ...)
|
||||||
|
|
||||||
\method{osem_measurements}{default}(x, ...)
|
\method{osem_measurements}{default}(x, ...)
|
||||||
|
|
||||||
\method{osem_measurements}{bbox}(
|
\method{osem_measurements}{bbox}(x, phenomenon, exposure = NA, from = NA,
|
||||||
x,
|
to = NA, columns = NA, ..., endpoint = osem_endpoint(), progress = T,
|
||||||
phenomenon,
|
cache = NA)
|
||||||
exposure = NA,
|
|
||||||
from = NA,
|
|
||||||
to = NA,
|
|
||||||
columns = NA,
|
|
||||||
...,
|
|
||||||
endpoint = osem_endpoint(),
|
|
||||||
progress = TRUE,
|
|
||||||
cache = NA
|
|
||||||
)
|
|
||||||
|
|
||||||
\method{osem_measurements}{sensebox}(
|
\method{osem_measurements}{sensebox}(x, phenomenon, exposure = NA,
|
||||||
x,
|
from = NA, to = NA, columns = NA, ..., endpoint = osem_endpoint(),
|
||||||
phenomenon,
|
progress = T, cache = NA)
|
||||||
exposure = NA,
|
|
||||||
from = NA,
|
|
||||||
to = NA,
|
|
||||||
columns = NA,
|
|
||||||
...,
|
|
||||||
endpoint = osem_endpoint(),
|
|
||||||
progress = TRUE,
|
|
||||||
cache = NA
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
\arguments{
|
\arguments{
|
||||||
\item{x}{Depending on the method, either
|
\item{x}{Depending on the method, either
|
||||||
|
@ -76,15 +58,15 @@ a bounding box spanning the whole world.
|
||||||
}
|
}
|
||||||
\section{Methods (by class)}{
|
\section{Methods (by class)}{
|
||||||
\itemize{
|
\itemize{
|
||||||
\item \code{osem_measurements(default)}: Get measurements from \strong{all} senseBoxes.
|
\item \code{default}: Get measurements from \strong{all} senseBoxes.
|
||||||
|
|
||||||
\item \code{osem_measurements(bbox)}: Get measurements by a spatial filter.
|
\item \code{bbox}: Get measurements by a spatial filter.
|
||||||
|
|
||||||
\item \code{osem_measurements(sensebox)}: Get measurements from a set of senseBoxes.
|
|
||||||
|
|
||||||
|
\item \code{sensebox}: Get measurements from a set of senseBoxes.
|
||||||
}}
|
}}
|
||||||
|
|
||||||
\examples{
|
\examples{
|
||||||
\dontrun{
|
\donttest{
|
||||||
# get measurements from all boxes on the phenomenon 'PM10' from the last 48h
|
# get measurements from all boxes on the phenomenon 'PM10' from the last 48h
|
||||||
m = osem_measurements('PM10')
|
m = osem_measurements('PM10')
|
||||||
|
|
||||||
|
@ -107,7 +89,7 @@ a bounding box spanning the whole world.
|
||||||
'height'
|
'height'
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
\dontrun{
|
\donttest{
|
||||||
# get measurements from sensors within a custom WGS84 bounding box
|
# get measurements from sensors within a custom WGS84 bounding box
|
||||||
bbox = structure(c(7, 51, 8, 52), class = 'bbox')
|
bbox = structure(c(7, 51, 8, 52), class = 'bbox')
|
||||||
m = osem_measurements(bbox, 'Temperatur')
|
m = osem_measurements(bbox, 'Temperatur')
|
||||||
|
@ -115,7 +97,6 @@ a bounding box spanning the whole world.
|
||||||
# construct a bounding box 12km around berlin using the sf package,
|
# construct a bounding box 12km around berlin using the sf package,
|
||||||
# and get measurements from stations within that box
|
# and get measurements from stations within that box
|
||||||
library(sf)
|
library(sf)
|
||||||
library(units)
|
|
||||||
bbox2 = st_point(c(13.4034, 52.5120)) \%>\%
|
bbox2 = st_point(c(13.4034, 52.5120)) \%>\%
|
||||||
st_sfc(crs = 4326) \%>\%
|
st_sfc(crs = 4326) \%>\%
|
||||||
st_transform(3857) \%>\% # allow setting a buffer in meters
|
st_transform(3857) \%>\% # allow setting a buffer in meters
|
||||||
|
|
|
@ -1,75 +0,0 @@
|
||||||
% Generated by roxygen2: do not edit by hand
|
|
||||||
% Please edit documentation in R/archive.R
|
|
||||||
\name{osem_measurements_archive}
|
|
||||||
\alias{osem_measurements_archive}
|
|
||||||
\alias{osem_measurements_archive.sensebox}
|
|
||||||
\title{Fetch day-wise measurements for a single box from the openSenseMap archive.}
|
|
||||||
\usage{
|
|
||||||
osem_measurements_archive(x, ...)
|
|
||||||
|
|
||||||
\method{osem_measurements_archive}{sensebox}(
|
|
||||||
x,
|
|
||||||
fromDate,
|
|
||||||
toDate = fromDate,
|
|
||||||
sensorFilter = ~TRUE,
|
|
||||||
...,
|
|
||||||
progress = TRUE
|
|
||||||
)
|
|
||||||
}
|
|
||||||
\arguments{
|
|
||||||
\item{x}{A `sensebox data.frame` of a single box, as retrieved via \code{\link{osem_box}},
|
|
||||||
to download measurements for.}
|
|
||||||
|
|
||||||
\item{...}{see parameters below}
|
|
||||||
|
|
||||||
\item{fromDate}{Start date for measurement download, must be convertable via `as.Date`.}
|
|
||||||
|
|
||||||
\item{toDate}{End date for measurement download (inclusive).}
|
|
||||||
|
|
||||||
\item{sensorFilter}{A NSE formula matching to \code{x$sensors}, selecting a subset of sensors.}
|
|
||||||
|
|
||||||
\item{progress}{Whether to print download progress information, defaults to \code{TRUE}.}
|
|
||||||
}
|
|
||||||
\value{
|
|
||||||
A \code{tbl_df} containing observations of all selected sensors for each time stamp.
|
|
||||||
}
|
|
||||||
\description{
|
|
||||||
This function is significantly faster than \code{\link{osem_measurements}} for large
|
|
||||||
time-frames, as daily CSV dumps for each sensor from
|
|
||||||
\href{https://archive.opensensemap.org}{archive.opensensemap.org} are used.
|
|
||||||
Note that the latest data available is from the previous day.
|
|
||||||
}
|
|
||||||
\details{
|
|
||||||
By default, data for all sensors of a box is fetched, but you can select a
|
|
||||||
subset with a \code{\link[dplyr]{dplyr}}-style NSE filter expression.
|
|
||||||
|
|
||||||
The function will warn when no data is available in the selected period,
|
|
||||||
but continue the remaining download.
|
|
||||||
}
|
|
||||||
\section{Methods (by class)}{
|
|
||||||
\itemize{
|
|
||||||
\item \code{osem_measurements_archive(sensebox)}: Get daywise measurements for one or more sensors of a single box.
|
|
||||||
|
|
||||||
}}
|
|
||||||
\examples{
|
|
||||||
\donttest{
|
|
||||||
# fetch measurements for a single day
|
|
||||||
box = osem_box('593bcd656ccf3b0011791f5a')
|
|
||||||
m = osem_measurements_archive(box, as.POSIXlt('2018-09-13'))
|
|
||||||
|
|
||||||
# fetch measurements for a date range and selected sensors
|
|
||||||
sensors = ~ phenomenon \%in\% c('Temperatur', 'Beleuchtungsstärke')
|
|
||||||
m = osem_measurements_archive(
|
|
||||||
box,
|
|
||||||
as.POSIXlt('2018-09-01'), as.POSIXlt('2018-09-30'),
|
|
||||||
sensorFilter = sensors
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
\seealso{
|
|
||||||
\href{https://archive.opensensemap.org}{openSenseMap archive}
|
|
||||||
|
|
||||||
\code{\link{osem_measurements}}
|
|
||||||
|
|
||||||
\code{\link{osem_box}}
|
|
||||||
}
|
|
|
@ -21,21 +21,21 @@ Get the counts of sensors for each observed phenomenon.
|
||||||
}
|
}
|
||||||
\section{Methods (by class)}{
|
\section{Methods (by class)}{
|
||||||
\itemize{
|
\itemize{
|
||||||
\item \code{osem_phenomena(sensebox)}: Get counts of sensors observing each phenomenon
|
\item \code{sensebox}: Get counts of sensors observing each phenomenon
|
||||||
from a set of senseBoxes.
|
from a set of senseBoxes.
|
||||||
|
|
||||||
}}
|
}}
|
||||||
|
|
||||||
\examples{
|
\examples{
|
||||||
# get the phenomena for a single senseBox
|
# get the phenomena for a single senseBox
|
||||||
osem_phenomena(osem_box('593bcd656ccf3b0011791f5a'))
|
osem_phenomena(osem_box('593bcd656ccf3b0011791f5a'))
|
||||||
|
|
||||||
\donttest{
|
# get the phenomena for a group of senseBoxes
|
||||||
# get the phenomena for a group of senseBoxes
|
osem_phenomena(
|
||||||
osem_phenomena(
|
osem_boxes(grouptag = 'ifgi', exposure = 'outdoor', date = Sys.time())
|
||||||
osem_boxes(grouptag = 'ifgi', exposure = 'outdoor', date = Sys.time())
|
)
|
||||||
)
|
|
||||||
|
|
||||||
# get phenomena with at least 30 sensors on opensensemap
|
# get phenomena with at least 30 sensors on opensensemap
|
||||||
|
\donttest{
|
||||||
phenoms = osem_phenomena(osem_boxes())
|
phenoms = osem_phenomena(osem_boxes())
|
||||||
names(phenoms[phenoms > 29])
|
names(phenoms[phenoms > 29])
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
% Generated by roxygen2: do not edit by hand
|
% Generated by roxygen2: do not edit by hand
|
||||||
% Please edit documentation in R/external_generics.R
|
% Please edit documentation in R/measurement_utils.R
|
||||||
\name{st_as_sf.osem_measurements}
|
\name{st_as_sf.osem_measurements}
|
||||||
\alias{st_as_sf.osem_measurements}
|
\alias{st_as_sf.osem_measurements}
|
||||||
\title{Convert a \code{osem_measurements} dataframe to an \code{\link[sf]{st_sf}} object.}
|
\title{Convert a \code{osem_measurements} dataframe to an \code{\link[sf]{st_sf}} object.}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
% Generated by roxygen2: do not edit by hand
|
% Generated by roxygen2: do not edit by hand
|
||||||
% Please edit documentation in R/external_generics.R
|
% Please edit documentation in R/box_utils.R
|
||||||
\name{st_as_sf.sensebox}
|
\name{st_as_sf.sensebox}
|
||||||
\alias{st_as_sf.sensebox}
|
\alias{st_as_sf.sensebox}
|
||||||
\title{Convert a \code{sensebox} dataframe to an \code{\link[sf]{st_sf}} object.}
|
\title{Convert a \code{sensebox} dataframe to an \code{\link[sf]{st_sf}} object.}
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
context('API error handling')
|
|
||||||
|
|
||||||
test_that('unavailable API yields informative error message', {
|
|
||||||
expect_error({
|
|
||||||
osem_boxes(endpoint = 'example.zip')
|
|
||||||
}, 'The API at example.zip is currently not available')
|
|
||||||
})
|
|
|
@ -1,66 +0,0 @@
|
||||||
source('testhelpers.R')
|
|
||||||
|
|
||||||
context('osem_box_to_archivename()')
|
|
||||||
|
|
||||||
try({
|
|
||||||
boxes = osem_boxes(grouptag = 'ifgi')
|
|
||||||
box = osem_box('593bcd656ccf3b0011791f5a')
|
|
||||||
})
|
|
||||||
|
|
||||||
test_that('osem_box_to_archive_name does the correct character replacements', {
|
|
||||||
b = data.frame(
|
|
||||||
name = 'aA1._- äß!"?$%&/',
|
|
||||||
X_id = 'UUID'
|
|
||||||
)
|
|
||||||
|
|
||||||
archivename = opensensmapr:::osem_box_to_archivename(b)
|
|
||||||
expect_equal(archivename, 'UUID-aA1._-__________')
|
|
||||||
})
|
|
||||||
|
|
||||||
test_that('osem_box_to_archive_name works for one box', {
|
|
||||||
check_api()
|
|
||||||
if (is.null(box)) skip('no box data could be fetched')
|
|
||||||
|
|
||||||
archivename = opensensmapr:::osem_box_to_archivename(box)
|
|
||||||
expect_length(archivename, 1)
|
|
||||||
expect_type(archivename, 'character')
|
|
||||||
})
|
|
||||||
|
|
||||||
test_that('osem_box_to_archive_name works for multiple boxes', {
|
|
||||||
check_api()
|
|
||||||
if (is.null(boxes)) skip('no box data available')
|
|
||||||
|
|
||||||
archivename = opensensmapr:::osem_box_to_archivename(boxes)
|
|
||||||
expect_length(archivename, nrow(boxes))
|
|
||||||
expect_type(archivename, 'character')
|
|
||||||
})
|
|
||||||
|
|
||||||
context('osem_measurements_archive()')
|
|
||||||
|
|
||||||
test_that('osem_measurements_archive works for one box', {
|
|
||||||
check_api()
|
|
||||||
if (is.null(box)) skip('no box data could be fetched')
|
|
||||||
|
|
||||||
m = osem_measurements_archive(box, as.POSIXlt('2018-08-08'))
|
|
||||||
expect_length(m, nrow(box$sensors[[1]]) + 1) # one column for each sensor + createdAt
|
|
||||||
expect_s3_class(m, c('data.frame'))
|
|
||||||
})
|
|
||||||
|
|
||||||
test_that('osem_measurements_archive sensorFilter works for one box', {
|
|
||||||
check_api()
|
|
||||||
if (is.null(box)) skip('no box data could be fetched')
|
|
||||||
|
|
||||||
m = osem_measurements_archive(box, as.POSIXlt('2018-08-08'), sensorFilter = ~ phenomenon == 'Temperatur')
|
|
||||||
expect_length(m, 2) # one column for Temperatur + createdAt
|
|
||||||
expect_s3_class(m, c('data.frame'))
|
|
||||||
})
|
|
||||||
|
|
||||||
test_that('osem_measurements_archive fails for multiple boxes', {
|
|
||||||
check_api()
|
|
||||||
if (is.null(boxes)) skip('no box data available')
|
|
||||||
|
|
||||||
expect_error(
|
|
||||||
osem_measurements_archive(boxes, as.POSIXlt('2018-08-08')),
|
|
||||||
'this function only works for exactly one senseBox!'
|
|
||||||
)
|
|
||||||
})
|
|
|
@ -2,44 +2,20 @@ source('testhelpers.R')
|
||||||
context('box')
|
context('box')
|
||||||
|
|
||||||
try({
|
try({
|
||||||
|
boxes = osem_boxes()
|
||||||
box = osem_box('57000b8745fd40c8196ad04c')
|
box = osem_box('57000b8745fd40c8196ad04c')
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test_that('a single box can be retrieved by ID', {
|
||||||
test_that('required box attributes are correctly parsed', {
|
|
||||||
check_api()
|
check_api()
|
||||||
|
|
||||||
expect_is(box$X_id, 'character')
|
|
||||||
expect_is(box$name, 'character')
|
|
||||||
expect_is(box$exposure, 'character')
|
|
||||||
expect_is(box$model, 'character')
|
|
||||||
expect_is(box$lat, 'numeric')
|
|
||||||
expect_is(box$lon, 'numeric')
|
|
||||||
expect_is(box$createdAt, 'POSIXct')
|
|
||||||
})
|
|
||||||
|
|
||||||
test_that('optional box attributes are correctly parsed', {
|
box = osem_box(boxes$X_id[[1]])
|
||||||
check_api()
|
|
||||||
|
expect_true('sensebox' %in% class(box))
|
||||||
completebox = osem_box('5a676e49411a790019290f94') # all fields populated
|
expect_true('data.frame' %in% class(box))
|
||||||
expect_is(completebox$description, 'character')
|
expect_true(nrow(box) == 1)
|
||||||
expect_is(completebox$grouptag, 'character')
|
expect_true(box$X_id == boxes$X_id[[1]])
|
||||||
expect_is(completebox$weblink, 'character')
|
expect_silent(osem_box(boxes$X_id[[1]]))
|
||||||
expect_is(completebox$updatedAt, 'POSIXct')
|
|
||||||
expect_is(completebox$lastMeasurement, 'POSIXct')
|
|
||||||
expect_is(completebox$height, c('numeric', 'integer'))
|
|
||||||
expect_is(completebox$phenomena, 'list')
|
|
||||||
expect_is(completebox$phenomena[[1]], 'character')
|
|
||||||
expect_is(completebox$sensors, 'list')
|
|
||||||
expect_is(completebox$sensors[[1]], 'data.frame')
|
|
||||||
|
|
||||||
# box with older schema, not recently updated..
|
|
||||||
oldbox = osem_box('539fec94a8341554157931d7')
|
|
||||||
expect_null(oldbox$description)
|
|
||||||
expect_null(oldbox$grouptag)
|
|
||||||
expect_null(oldbox$weblink)
|
|
||||||
expect_null(oldbox$height)
|
|
||||||
expect_null(oldbox$lastMeasurement)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
test_that('unknown box throws', {
|
test_that('unknown box throws', {
|
||||||
|
@ -49,10 +25,13 @@ test_that('unknown box throws', {
|
||||||
expect_error(osem_box('57000b8745fd40c800000000'), 'not found')
|
expect_error(osem_box('57000b8745fd40c800000000'), 'not found')
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test_that('[.sensebox maintains attributes', {
|
||||||
|
check_api()
|
||||||
|
|
||||||
|
expect_true(all(attributes(boxes[1:nrow(boxes), ]) %in% attributes(boxes)))
|
||||||
|
})
|
||||||
|
|
||||||
test_that("print.sensebox filters important attributes for a single box", {
|
test_that("print.sensebox filters important attributes for a single box", {
|
||||||
check_api()
|
|
||||||
|
|
||||||
msg = capture.output({
|
msg = capture.output({
|
||||||
print(box)
|
print(box)
|
||||||
})
|
})
|
||||||
|
@ -60,8 +39,6 @@ test_that("print.sensebox filters important attributes for a single box", {
|
||||||
})
|
})
|
||||||
|
|
||||||
test_that("summary.sensebox outputs all metrics for a single box", {
|
test_that("summary.sensebox outputs all metrics for a single box", {
|
||||||
check_api()
|
|
||||||
|
|
||||||
msg = capture.output({
|
msg = capture.output({
|
||||||
summary(box)
|
summary(box)
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,17 +1,14 @@
|
||||||
source('testhelpers.R')
|
source('testhelpers.R')
|
||||||
context('boxes')
|
context('boxes')
|
||||||
|
|
||||||
try({
|
|
||||||
boxes = osem_boxes()
|
|
||||||
})
|
|
||||||
|
|
||||||
test_that('a list of all boxes can be retrieved and returns a sensebox data.frame', {
|
test_that('a list of all boxes can be retrieved and returns a sensebox data.frame', {
|
||||||
check_api()
|
check_api()
|
||||||
|
|
||||||
|
boxes = osem_boxes()
|
||||||
expect_true(is.data.frame(boxes))
|
expect_true(is.data.frame(boxes))
|
||||||
expect_true(is.factor(boxes$model))
|
expect_true(is.factor(boxes$model))
|
||||||
expect_true(is.character(boxes$name))
|
expect_true(is.character(boxes$name))
|
||||||
expect_length(names(boxes), 18)
|
expect_length(names(boxes), 14)
|
||||||
expect_true(any('sensebox' %in% class(boxes)))
|
expect_true(any('sensebox' %in% class(boxes)))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -23,60 +20,53 @@ test_that('both from and to are required when requesting boxes, error otherwise'
|
||||||
test_that('a list of boxes with phenomenon filter returns only the requested phenomenon', {
|
test_that('a list of boxes with phenomenon filter returns only the requested phenomenon', {
|
||||||
check_api()
|
check_api()
|
||||||
|
|
||||||
boxes_phen = osem_boxes(phenomenon = 'Temperatur', date = Sys.time())
|
boxes = osem_boxes(phenomenon = 'Temperatur', date=Sys.time())
|
||||||
expect_true(all(grep('Temperatur', boxes_phen$phenomena)))
|
expect_true(all(grep('Temperatur', boxes$phenomena)))
|
||||||
})
|
})
|
||||||
|
|
||||||
test_that('a list of boxes with exposure filter returns only the requested exposure', {
|
test_that('a list of boxes with exposure filter returns only the requested exposure', {
|
||||||
check_api()
|
check_api()
|
||||||
|
|
||||||
boxes_exp = osem_boxes(exposure = 'mobile')
|
boxes = osem_boxes(exposure = 'mobile')
|
||||||
expect_true(all(boxes_exp$exposure == 'mobile'))
|
expect_true(all(boxes$exposure == 'mobile'))
|
||||||
})
|
})
|
||||||
|
|
||||||
test_that('a list of boxes with model filter returns only the requested model', {
|
test_that('a list of boxes with model filter returns only the requested model', {
|
||||||
check_api()
|
check_api()
|
||||||
|
|
||||||
boxes_mod = osem_boxes(model = 'homeWifi')
|
boxes = osem_boxes(model = 'homeWifi')
|
||||||
expect_true(all(boxes_mod$model == 'homeWifi'))
|
expect_true(all(boxes$model == 'homeWifi'))
|
||||||
})
|
})
|
||||||
|
|
||||||
test_that('box query can combine exposure and model filter', {
|
test_that('box query can combine exposure and model filter', {
|
||||||
check_api()
|
check_api()
|
||||||
|
|
||||||
boxes_com = osem_boxes(exposure = 'mobile', model = 'homeWifi')
|
boxes = osem_boxes(exposure = 'mobile', model = 'homeWifi')
|
||||||
expect_true(all(boxes_com$model == 'homeWifi'))
|
expect_true(all(boxes$model == 'homeWifi'))
|
||||||
expect_true(all(boxes_com$exposure == 'mobile'))
|
expect_true(all(boxes$exposure == 'mobile'))
|
||||||
})
|
})
|
||||||
|
|
||||||
test_that('a list of boxes with grouptype returns only boxes of that group', {
|
test_that('a list of boxes with grouptype returns only boxes of that group', {
|
||||||
check_api()
|
check_api()
|
||||||
|
|
||||||
boxes_gro = osem_boxes(grouptag = 'codeformuenster')
|
boxes = osem_boxes(grouptag = 'codeformuenster')
|
||||||
expect_true(all(boxes_gro$grouptag == 'codeformuenster'))
|
expect_true(all(boxes$grouptag == 'codeformuenster'))
|
||||||
})
|
|
||||||
|
|
||||||
test_that('a list of boxes within a bbox only returns boxes within that bbox', {
|
|
||||||
check_api()
|
|
||||||
|
|
||||||
boxes_box = osem_boxes(bbox = c(7.8, 51.8, 8.0, 52.0))
|
|
||||||
expect_true(all(boxes_box$lon > 7.8 & boxes_box$lon < 8.0 & boxes_box$lat > 51.8 & boxes_box$lat < 52.0))
|
|
||||||
})
|
})
|
||||||
|
|
||||||
test_that('endpoint can be (mis)configured', {
|
test_that('endpoint can be (mis)configured', {
|
||||||
check_api()
|
check_api()
|
||||||
|
|
||||||
expect_error(osem_boxes(endpoint = 'http://not.the.opensensemap.org'), 'The API at http://not.the.opensensemap.org is currently not available.')
|
expect_error(osem_boxes(endpoint = 'http://not.the.opensensemap.org'), 'resolve host')
|
||||||
})
|
})
|
||||||
|
|
||||||
test_that('a response with no matches returns empty sensebox data.frame', {
|
test_that('a response with no matches returns empty sensebox data.frame', {
|
||||||
check_api()
|
check_api()
|
||||||
|
|
||||||
suppressWarnings({
|
suppressWarnings({
|
||||||
boxes_gro = osem_boxes(grouptag = 'does_not_exist')
|
boxes = osem_boxes(grouptag = 'does_not_exist')
|
||||||
})
|
})
|
||||||
expect_true(is.data.frame(boxes_gro))
|
expect_true(is.data.frame(boxes))
|
||||||
expect_true(any('sensebox' %in% class(boxes_gro)))
|
expect_true(any('sensebox' %in% class(boxes)))
|
||||||
})
|
})
|
||||||
|
|
||||||
test_that('a response with no matches gives a warning', {
|
test_that('a response with no matches gives a warning', {
|
||||||
|
@ -93,7 +83,7 @@ test_that('data.frame can be converted to sensebox data.frame', {
|
||||||
test_that('boxes can be converted to sf object', {
|
test_that('boxes can be converted to sf object', {
|
||||||
check_api()
|
check_api()
|
||||||
|
|
||||||
# boxes = osem_boxes()
|
boxes = osem_boxes()
|
||||||
boxes_sf = sf::st_as_sf(boxes)
|
boxes_sf = sf::st_as_sf(boxes)
|
||||||
|
|
||||||
expect_true(all(sf::st_is_simple(boxes_sf)))
|
expect_true(all(sf::st_is_simple(boxes_sf)))
|
||||||
|
@ -103,7 +93,7 @@ test_that('boxes can be converted to sf object', {
|
||||||
test_that('boxes converted to sf object keep all attributes', {
|
test_that('boxes converted to sf object keep all attributes', {
|
||||||
check_api()
|
check_api()
|
||||||
|
|
||||||
# boxes = osem_boxes()
|
boxes = osem_boxes()
|
||||||
boxes_sf = sf::st_as_sf(boxes)
|
boxes_sf = sf::st_as_sf(boxes)
|
||||||
|
|
||||||
# coord columns get removed!
|
# coord columns get removed!
|
||||||
|
@ -127,7 +117,7 @@ test_that('box retrieval does not give progress information in non-interactive m
|
||||||
test_that('print.sensebox filters important attributes for a set of boxes', {
|
test_that('print.sensebox filters important attributes for a set of boxes', {
|
||||||
check_api()
|
check_api()
|
||||||
|
|
||||||
# boxes = osem_boxes()
|
boxes = osem_boxes()
|
||||||
msg = capture.output({
|
msg = capture.output({
|
||||||
print(boxes)
|
print(boxes)
|
||||||
})
|
})
|
||||||
|
@ -137,7 +127,7 @@ test_that('print.sensebox filters important attributes for a set of boxes', {
|
||||||
test_that('summary.sensebox outputs all metrics for a set of boxes', {
|
test_that('summary.sensebox outputs all metrics for a set of boxes', {
|
||||||
check_api()
|
check_api()
|
||||||
|
|
||||||
# boxes = osem_boxes()
|
boxes = osem_boxes()
|
||||||
msg = capture.output({
|
msg = capture.output({
|
||||||
summary(boxes)
|
summary(boxes)
|
||||||
})
|
})
|
||||||
|
@ -175,45 +165,3 @@ test_that('requests can be cached', {
|
||||||
osem_clear_cache()
|
osem_clear_cache()
|
||||||
expect_length(list.files(tempdir(), pattern = 'osemcache\\..*\\.rds'), 0)
|
expect_length(list.files(tempdir(), pattern = 'osemcache\\..*\\.rds'), 0)
|
||||||
})
|
})
|
||||||
|
|
||||||
context('single box from boxes')
|
|
||||||
test_that('a single box can be retrieved by ID', {
|
|
||||||
check_api()
|
|
||||||
|
|
||||||
box = osem_box(boxes$X_id[[1]])
|
|
||||||
|
|
||||||
expect_true('sensebox' %in% class(box))
|
|
||||||
expect_true('data.frame' %in% class(box))
|
|
||||||
expect_true(nrow(box) == 1)
|
|
||||||
expect_true(box$X_id == boxes$X_id[[1]])
|
|
||||||
expect_silent(osem_box(boxes$X_id[[1]]))
|
|
||||||
})
|
|
||||||
|
|
||||||
test_that('[.sensebox maintains attributes', {
|
|
||||||
check_api()
|
|
||||||
|
|
||||||
expect_true(all(attributes(boxes[1:nrow(boxes), ]) %in% attributes(boxes)))
|
|
||||||
})
|
|
||||||
|
|
||||||
context('measurements boxes')
|
|
||||||
test_that('measurements of specific boxes can be retrieved for one phenomenon and returns a measurements data.frame', {
|
|
||||||
check_api()
|
|
||||||
|
|
||||||
# fix for subsetting
|
|
||||||
class(boxes) = c('data.frame')
|
|
||||||
three_boxes = boxes[1:3, ]
|
|
||||||
class(boxes) = c('sensebox', 'data.frame')
|
|
||||||
three_boxes = osem_as_sensebox(three_boxes)
|
|
||||||
phens = names(osem_phenomena(three_boxes))
|
|
||||||
|
|
||||||
measurements = osem_measurements(x = three_boxes, phenomenon = phens[[1]])
|
|
||||||
expect_true(is.data.frame(measurements))
|
|
||||||
expect_true('osem_measurements' %in% class(measurements))
|
|
||||||
})
|
|
||||||
|
|
||||||
test_that('phenomenon is required when requesting measurements, error otherwise', {
|
|
||||||
check_api()
|
|
||||||
|
|
||||||
expect_error(osem_measurements(boxes), 'Parameter "phenomenon" is required')
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,15 @@
|
||||||
source('testhelpers.R')
|
source('testhelpers.R')
|
||||||
context('measurements')
|
context('measurements')
|
||||||
|
|
||||||
|
try({
|
||||||
|
boxes = osem_boxes()
|
||||||
|
})
|
||||||
|
|
||||||
test_that('measurements can be retrieved for a phenomenon', {
|
test_that('measurements can be retrieved for a phenomenon', {
|
||||||
check_api()
|
check_api()
|
||||||
|
|
||||||
measurements = osem_measurements('Windgeschwindigkeit')
|
|
||||||
measurements = osem_measurements(x = 'Windgeschwindigkeit')
|
measurements = osem_measurements(x = 'Windgeschwindigkeit')
|
||||||
expect_true(tibble::is_tibble(measurements))
|
expect_true(is.data.frame(measurements))
|
||||||
expect_true('osem_measurements' %in% class(measurements))
|
expect_true('osem_measurements' %in% class(measurements))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -25,7 +27,12 @@ test_that('measurement retrieval does not give progress information in non-inter
|
||||||
test_that('a response with no matching senseBoxes gives an error', {
|
test_that('a response with no matching senseBoxes gives an error', {
|
||||||
check_api()
|
check_api()
|
||||||
|
|
||||||
expect_error(osem_measurements(x = 'foobar', exposure = 'indoor'), 'No senseBoxes found')
|
expect_error(osem_measurements(x = 'Windgeschwindigkeit', exposure = 'indoor'), 'No senseBoxes found')
|
||||||
|
})
|
||||||
|
|
||||||
|
test_that('data.frame can be converted to measurements data.frame', {
|
||||||
|
df = osem_as_measurements(data.frame(c(1, 2), c('a', 'b')))
|
||||||
|
expect_equal(class(df), c('osem_measurements', 'data.frame'))
|
||||||
})
|
})
|
||||||
|
|
||||||
test_that('columns can be specified for phenomena', {
|
test_that('columns can be specified for phenomena', {
|
||||||
|
@ -44,6 +51,20 @@ test_that('measurements can be retrieved for a phenomenon and exposure', {
|
||||||
expect_equal(nrow(measurements), 0)
|
expect_equal(nrow(measurements), 0)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test_that('measurements of specific boxes can be retrieved for one phenomenon and returns a measurements data.frame', {
|
||||||
|
check_api()
|
||||||
|
|
||||||
|
# fix for subsetting
|
||||||
|
class(boxes) = c('data.frame')
|
||||||
|
three_boxes = boxes[1:3, ]
|
||||||
|
class(boxes) = c('sensebox', 'data.frame')
|
||||||
|
three_boxes = osem_as_sensebox(three_boxes)
|
||||||
|
phens = names(osem_phenomena(three_boxes))
|
||||||
|
|
||||||
|
measurements = osem_measurements(x = three_boxes, phenomenon = phens[[1]])
|
||||||
|
expect_true(is.data.frame(measurements))
|
||||||
|
expect_true('osem_measurements' %in% class(measurements))
|
||||||
|
})
|
||||||
|
|
||||||
test_that('measurements can be retrieved for a bounding box', {
|
test_that('measurements can be retrieved for a bounding box', {
|
||||||
check_api()
|
check_api()
|
||||||
|
@ -87,7 +108,8 @@ test_that('both from and to are required when requesting measurements, error oth
|
||||||
test_that('phenomenon is required when requesting measurements, error otherwise', {
|
test_that('phenomenon is required when requesting measurements, error otherwise', {
|
||||||
check_api()
|
check_api()
|
||||||
|
|
||||||
expect_error(osem_measurements())
|
expect_error(osem_measurements(), 'missing, with no default')
|
||||||
|
expect_error(osem_measurements(boxes), 'Parameter "phenomenon" is required')
|
||||||
|
|
||||||
sfc = sf::st_sfc(sf::st_linestring(x = matrix(data = c(7, 8, 50, 51), ncol = 2)), crs = 4326)
|
sfc = sf::st_sfc(sf::st_linestring(x = matrix(data = c(7, 8, 50, 51), ncol = 2)), crs = 4326)
|
||||||
bbox = sf::st_bbox(sfc)
|
bbox = sf::st_bbox(sfc)
|
||||||
|
@ -104,13 +126,6 @@ test_that('[.osem_measurements maintains attributes', {
|
||||||
expect_true(all(attributes(m[1:nrow(m), ]) %in% attributes(m)))
|
expect_true(all(attributes(m[1:nrow(m), ]) %in% attributes(m)))
|
||||||
})
|
})
|
||||||
|
|
||||||
test_that('data.frame can be converted to measurements data.frame', {
|
|
||||||
check_api()
|
|
||||||
m = osem_measurements('Windrichtung')
|
|
||||||
df = osem_as_measurements(data.frame(c(1, 2), c('a', 'b')))
|
|
||||||
expect_equal(class(df), class(m))
|
|
||||||
})
|
|
||||||
|
|
||||||
test_that('requests can be cached', {
|
test_that('requests can be cached', {
|
||||||
check_api()
|
check_api()
|
||||||
|
|
||||||
|
|
|
@ -25,8 +25,6 @@ test_that('phenomena from boxes has all phenomena', {
|
||||||
})
|
})
|
||||||
|
|
||||||
test_that('phenomena from a not sensebox data.frame returns error', {
|
test_that('phenomena from a not sensebox data.frame returns error', {
|
||||||
check_api()
|
|
||||||
|
|
||||||
expect_error(osem_phenomena(list()), 'no applicable method')
|
expect_error(osem_phenomena(list()), 'no applicable method')
|
||||||
expect_error(osem_phenomena(data.frame()), 'no applicable method')
|
expect_error(osem_phenomena(data.frame()), 'no applicable method')
|
||||||
boxes_df = boxes
|
boxes_df = boxes
|
||||||
|
|
|
@ -7,7 +7,7 @@ RUN apt-get update && \
|
||||||
RUN Rscript -e 'install.packages("sf")'
|
RUN Rscript -e 'install.packages("sf")'
|
||||||
RUN Rscript -e 'install.packages("magrittr")'
|
RUN Rscript -e 'install.packages("magrittr")'
|
||||||
RUN Rscript -e 'install.packages("devtools")'
|
RUN Rscript -e 'install.packages("devtools")'
|
||||||
RUN Rscript -e 'devtools::install_github("sensebox/opensensmapR")'
|
RUN Rscript -e 'devtools::install_github("noerw/opensensmapR")'
|
||||||
|
|
||||||
# install crontab
|
# install crontab
|
||||||
COPY crontab /crontab
|
COPY crontab /crontab
|
||||||
|
|
|
@ -21,7 +21,7 @@ docker run -v $(pwd)/data:/script/data osem-monitr
|
||||||
```bash
|
```bash
|
||||||
# install dependencies once
|
# install dependencies once
|
||||||
Rscript -e 'install.packages(c("dplyr", "magrittr", "devtools"))'
|
Rscript -e 'install.packages(c("dplyr", "magrittr", "devtools"))'
|
||||||
Rscript -e 'devtools::install_github("sensebox/opensensmapR")'
|
Rscript -e 'devtools::install_github("noerw/opensensmapR")'
|
||||||
|
|
||||||
Rscript --save --restore get-counts.R
|
Rscript --save --restore get-counts.R
|
||||||
Rscript --save --restore get-boxes.R
|
Rscript --save --restore get-boxes.R
|
||||||
|
|
Binary file not shown.
|
@ -43,10 +43,7 @@ So the first step is to retrieve *all the boxes*:
|
||||||
```{r download}
|
```{r download}
|
||||||
# if you want to see results for a specific subset of boxes,
|
# if you want to see results for a specific subset of boxes,
|
||||||
# just specify a filter such as grouptag='ifgi' here
|
# just specify a filter such as grouptag='ifgi' here
|
||||||
|
boxes = osem_boxes()
|
||||||
# boxes = osem_boxes(cache = '.')
|
|
||||||
boxes = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
# Plot count of boxes by time {.tabset}
|
# Plot count of boxes by time {.tabset}
|
||||||
|
@ -71,7 +68,7 @@ ggplot(exposure_counts, aes(x = createdAt, y = count, colour = exposure)) +
|
||||||
Outdoor boxes are growing *fast*!
|
Outdoor boxes are growing *fast*!
|
||||||
We can also see the introduction of `mobile` sensor "stations" in 2017. While
|
We can also see the introduction of `mobile` sensor "stations" in 2017. While
|
||||||
mobile boxes are still few, we can expect a quick rise in 2018 once the new
|
mobile boxes are still few, we can expect a quick rise in 2018 once the new
|
||||||
senseBox MCU with GPS support is released.
|
[senseBox MCU with GPS support is released](https://sensebox.de/blog/2018-03-06-senseBox_MCU).
|
||||||
|
|
||||||
Let's have a quick summary:
|
Let's have a quick summary:
|
||||||
```{r exposure_summary}
|
```{r exposure_summary}
|
||||||
|
@ -96,7 +93,7 @@ inconsistent (`Luftdaten`, `luftdaten.info`, ...)
|
||||||
grouptag_counts = boxes %>%
|
grouptag_counts = boxes %>%
|
||||||
group_by(grouptag) %>%
|
group_by(grouptag) %>%
|
||||||
# only include grouptags with 8 or more members
|
# only include grouptags with 8 or more members
|
||||||
filter(length(grouptag) >= 8 & !is.na(grouptag)) %>%
|
filter(length(grouptag) >= 8 && !is.na(grouptag)) %>%
|
||||||
mutate(count = row_number(createdAt))
|
mutate(count = row_number(createdAt))
|
||||||
|
|
||||||
# helper for sorting the grouptags by boxcount
|
# helper for sorting the grouptags by boxcount
|
||||||
|
@ -166,7 +163,7 @@ ggplot(boxes_by_date, aes(x = as.Date(week), colour = event)) +
|
||||||
|
|
||||||
We see a sudden rise in early 2017, which lines up with the fast growing grouptag `Luftdaten`.
|
We see a sudden rise in early 2017, which lines up with the fast growing grouptag `Luftdaten`.
|
||||||
This was enabled by an integration of openSenseMap.org into the firmware of the
|
This was enabled by an integration of openSenseMap.org into the firmware of the
|
||||||
air quality monitoring project [luftdaten.info](https://sensor.community/de/).
|
air quality monitoring project [luftdaten.info](https://luftdaten.info).
|
||||||
The dips in mid 2017 and early 2018 could possibly be explained by production/delivery issues
|
The dips in mid 2017 and early 2018 could possibly be explained by production/delivery issues
|
||||||
of the senseBox hardware, but I have no data on the exact time frames to verify.
|
of the senseBox hardware, but I have no data on the exact time frames to verify.
|
||||||
|
|
||||||
|
@ -195,7 +192,7 @@ spanning a large chunk of openSenseMap's existence.
|
||||||
duration = boxes %>%
|
duration = boxes %>%
|
||||||
group_by(grouptag) %>%
|
group_by(grouptag) %>%
|
||||||
# only include grouptags with 8 or more members
|
# only include grouptags with 8 or more members
|
||||||
filter(length(grouptag) >= 8 & !is.na(grouptag) & !is.na(updatedAt)) %>%
|
filter(length(grouptag) >= 8 && !is.na(grouptag) && !is.na(updatedAt)) %>%
|
||||||
mutate(duration = difftime(updatedAt, createdAt, units='days'))
|
mutate(duration = difftime(updatedAt, createdAt, units='days'))
|
||||||
|
|
||||||
ggplot(duration, aes(x = grouptag, y = duration)) +
|
ggplot(duration, aes(x = grouptag, y = duration)) +
|
||||||
|
@ -243,4 +240,4 @@ If you implemented some, feel free to add them to this vignette via a [Pull Requ
|
||||||
* filter timespans with big dips in growth rate, and extrapolate the amount of
|
* filter timespans with big dips in growth rate, and extrapolate the amount of
|
||||||
senseBoxes that could be on the platform today, assuming there were no production issues ;)
|
senseBoxes that could be on the platform today, assuming there were no production issues ;)
|
||||||
|
|
||||||
[PR]: https://github.com/sensebox/opensensmapr/pulls
|
[PR]: https://github.com/noerw/opensensmapr/pulls
|
||||||
|
|
|
@ -1,297 +0,0 @@
|
||||||
---
|
|
||||||
title: "Visualising the Development of openSenseMap.org in 2022"
|
|
||||||
author: "Jan Stenkamp"
|
|
||||||
date: '`r Sys.Date()`'
|
|
||||||
output:
|
|
||||||
html_document:
|
|
||||||
code_folding: hide
|
|
||||||
df_print: kable
|
|
||||||
theme: lumen
|
|
||||||
toc: yes
|
|
||||||
toc_float: yes
|
|
||||||
rmarkdown::html_vignette:
|
|
||||||
df_print: kable
|
|
||||||
fig_height: 5
|
|
||||||
fig_width: 7
|
|
||||||
toc: yes
|
|
||||||
vignette: >
|
|
||||||
%\VignetteIndexEntry{Visualising the Development of openSenseMap.org in 2022}
|
|
||||||
%\VignetteEncoding{UTF-8}
|
|
||||||
%\VignetteEngine{knitr::rmarkdown}
|
|
||||||
---
|
|
||||||
|
|
||||||
> This vignette serves as an example on data wrangling & visualization with
|
|
||||||
`opensensmapr`, `dplyr` and `ggplot2`.
|
|
||||||
|
|
||||||
```{r setup, results='hide', message=FALSE, warning=FALSE}
|
|
||||||
# required packages:
|
|
||||||
library(opensensmapr) # data download
|
|
||||||
library(dplyr) # data wrangling
|
|
||||||
library(ggplot2) # plotting
|
|
||||||
library(lubridate) # date arithmetic
|
|
||||||
library(zoo) # rollmean()
|
|
||||||
```
|
|
||||||
|
|
||||||
openSenseMap.org has grown quite a bit in the last years; it would be interesting
|
|
||||||
to see how we got to the current `r osem_counts()$boxes` sensor stations,
|
|
||||||
split up by various attributes of the boxes.
|
|
||||||
|
|
||||||
While `opensensmapr` provides extensive methods of filtering boxes by attributes
|
|
||||||
on the server, we do the filtering within R to save time and gain flexibility.
|
|
||||||
|
|
||||||
|
|
||||||
So the first step is to retrieve *all the boxes*.
|
|
||||||
|
|
||||||
```{r download, results='hide', message=FALSE, warning=FALSE}
|
|
||||||
# if you want to see results for a specific subset of boxes,
|
|
||||||
# just specify a filter such as grouptag='ifgi' here
|
|
||||||
|
|
||||||
# boxes = osem_boxes(cache = '.')
|
|
||||||
boxes = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
|
||||||
```
|
|
||||||
# Introduction
|
|
||||||
In the following we just want to have a look at the boxes created in 2022, so we filter for them.
|
|
||||||
|
|
||||||
```{r}
|
|
||||||
boxes = filter(boxes, locationtimestamp >= "2022-01-01" & locationtimestamp <="2022-12-31")
|
|
||||||
summary(boxes) -> summary.data.frame
|
|
||||||
```
|
|
||||||
|
|
||||||
<!-- This gives a good overview already: As of writing this, there are more than 11,000 -->
|
|
||||||
<!-- sensor stations, of which ~30% are currently running. Most of them are placed -->
|
|
||||||
<!-- outdoors and have around 5 sensors each. -->
|
|
||||||
<!-- The oldest station is from August 2016, while the latest station was registered a -->
|
|
||||||
<!-- couple of minutes ago. -->
|
|
||||||
|
|
||||||
Another feature of interest is the spatial distribution of the boxes: `plot()`
|
|
||||||
can help us out here. This function requires a bunch of optional dependencies though.
|
|
||||||
|
|
||||||
```{r, message=FALSE, warning=FALSE}
|
|
||||||
plot(boxes)
|
|
||||||
```
|
|
||||||
|
|
||||||
But what do these sensor stations actually measure? Lets find out.
|
|
||||||
`osem_phenomena()` gives us a named list of of the counts of each observed
|
|
||||||
phenomenon for the given set of sensor stations:
|
|
||||||
|
|
||||||
```{r}
|
|
||||||
phenoms = osem_phenomena(boxes)
|
|
||||||
str(phenoms)
|
|
||||||
```
|
|
||||||
|
|
||||||
Thats quite some noise there, with many phenomena being measured by a single
|
|
||||||
sensor only, or many duplicated phenomena due to slightly different spellings.
|
|
||||||
We should clean that up, but for now let's just filter out the noise and find
|
|
||||||
those phenomena with high sensor numbers:
|
|
||||||
|
|
||||||
```{r}
|
|
||||||
phenoms[phenoms > 50]
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
# Plot count of boxes by time {.tabset}
|
|
||||||
By looking at the `createdAt` attribute of each box we know the exact time a box
|
|
||||||
was registered. Because of some database migration issues the `createdAt` values are mostly wrong (~80% of boxes created 2022-03-30), so we are using the `timestamp` attribute of the `currentlocation` which should in most cases correspond to the creation date.
|
|
||||||
|
|
||||||
With this approach we have no information about boxes that were deleted in the
|
|
||||||
meantime, but that's okay for now.
|
|
||||||
|
|
||||||
## ...and exposure
|
|
||||||
```{r exposure_counts, message=FALSE}
|
|
||||||
exposure_counts = boxes %>%
|
|
||||||
group_by(exposure) %>%
|
|
||||||
mutate(count = row_number(locationtimestamp))
|
|
||||||
|
|
||||||
exposure_colors = c(indoor = 'red', outdoor = 'lightgreen', mobile = 'blue', unknown = 'darkgrey')
|
|
||||||
ggplot(exposure_counts, aes(x = locationtimestamp, y = count, colour = exposure)) +
|
|
||||||
geom_line() +
|
|
||||||
scale_colour_manual(values = exposure_colors) +
|
|
||||||
xlab('Registration Date') + ylab('senseBox count')
|
|
||||||
```
|
|
||||||
|
|
||||||
Outdoor boxes are growing *fast*!
|
|
||||||
We can also see the introduction of `mobile` sensor "stations" in 2017.
|
|
||||||
|
|
||||||
Let's have a quick summary:
|
|
||||||
```{r exposure_summary}
|
|
||||||
exposure_counts %>%
|
|
||||||
summarise(
|
|
||||||
oldest = min(locationtimestamp),
|
|
||||||
newest = max(locationtimestamp),
|
|
||||||
count = max(count)
|
|
||||||
) %>%
|
|
||||||
arrange(desc(count))
|
|
||||||
```
|
|
||||||
|
|
||||||
## ...and grouptag
|
|
||||||
We can try to find out where the increases in growth came from, by analysing the
|
|
||||||
box count by grouptag.
|
|
||||||
|
|
||||||
Caveats: Only a small subset of boxes has a grouptag, and we should assume
|
|
||||||
that these groups are actually bigger. Also, we can see that grouptag naming is
|
|
||||||
inconsistent (`Luftdaten`, `luftdaten.info`, ...)
|
|
||||||
|
|
||||||
```{r grouptag_counts, message=FALSE}
|
|
||||||
grouptag_counts = boxes %>%
|
|
||||||
group_by(grouptag) %>%
|
|
||||||
# only include grouptags with 15 or more members
|
|
||||||
filter(length(grouptag) >= 15 & !is.na(grouptag) & grouptag != '') %>%
|
|
||||||
mutate(count = row_number(locationtimestamp))
|
|
||||||
|
|
||||||
# helper for sorting the grouptags by boxcount
|
|
||||||
sortLvls = function(oldFactor, ascending = TRUE) {
|
|
||||||
lvls = table(oldFactor) %>% sort(., decreasing = !ascending) %>% names()
|
|
||||||
factor(oldFactor, levels = lvls)
|
|
||||||
}
|
|
||||||
grouptag_counts$grouptag = sortLvls(grouptag_counts$grouptag, ascending = FALSE)
|
|
||||||
|
|
||||||
ggplot(grouptag_counts, aes(x = locationtimestamp, y = count, colour = grouptag)) +
|
|
||||||
geom_line(aes(group = grouptag)) +
|
|
||||||
xlab('Registration Date') + ylab('senseBox count')
|
|
||||||
```
|
|
||||||
|
|
||||||
```{r grouptag_summary}
|
|
||||||
grouptag_counts %>%
|
|
||||||
summarise(
|
|
||||||
oldest = min(locationtimestamp),
|
|
||||||
newest = max(locationtimestamp),
|
|
||||||
count = max(count)
|
|
||||||
) %>%
|
|
||||||
arrange(desc(count))
|
|
||||||
```
|
|
||||||
|
|
||||||
# Plot rate of growth and inactivity per week
|
|
||||||
First we group the boxes by `locationtimestamp` into bins of one week:
|
|
||||||
```{r growthrate_registered, warning=FALSE, message=FALSE, results='hide'}
|
|
||||||
bins = 'week'
|
|
||||||
mvavg_bins = 6
|
|
||||||
|
|
||||||
growth = boxes %>%
|
|
||||||
mutate(week = cut(as.Date(locationtimestamp), breaks = bins)) %>%
|
|
||||||
group_by(week) %>%
|
|
||||||
summarize(count = length(week)) %>%
|
|
||||||
mutate(event = 'registered')
|
|
||||||
```
|
|
||||||
|
|
||||||
We can do the same for `updatedAt`, which informs us about the last change to
|
|
||||||
a box, including uploaded measurements. As a lot of boxes were "updated" by the database
|
|
||||||
migration, many of them are updated at 2022-03-30, so we try to use the `lastMeasurement`
|
|
||||||
attribute instead of `updatedAt`. This leads to fewer boxes but also automatically excludes
|
|
||||||
boxes which were created but never made a measurement.
|
|
||||||
|
|
||||||
This method of determining inactive boxes is fairly inaccurate and should be
|
|
||||||
considered an approximation, because we have no information about intermediate
|
|
||||||
inactive phases.
|
|
||||||
Also deleted boxes would probably have a big impact here.
|
|
||||||
```{r growthrate_inactive, warning=FALSE, message=FALSE, results='hide'}
|
|
||||||
inactive = boxes %>%
|
|
||||||
# remove boxes that were updated in the last two days,
|
|
||||||
# b/c any box becomes inactive at some point by definition of updatedAt
|
|
||||||
filter(lastMeasurement < now() - days(2)) %>%
|
|
||||||
mutate(week = cut(as.Date(lastMeasurement), breaks = bins)) %>%
|
|
||||||
filter(as.Date(week) > as.Date("2021-12-31")) %>%
|
|
||||||
group_by(week) %>%
|
|
||||||
summarize(count = length(week)) %>%
|
|
||||||
mutate(event = 'inactive')
|
|
||||||
```
|
|
||||||
|
|
||||||
Now we can combine both datasets for plotting:
|
|
||||||
```{r growthrate, warning=FALSE, message=FALSE, results='hide'}
|
|
||||||
boxes_by_date = bind_rows(growth, inactive) %>% group_by(event)
|
|
||||||
|
|
||||||
ggplot(boxes_by_date, aes(x = as.Date(week), colour = event)) +
|
|
||||||
xlab('Time') + ylab(paste('rate per ', bins)) +
|
|
||||||
scale_x_date(date_breaks="years", date_labels="%Y") +
|
|
||||||
scale_colour_manual(values = c(registered = 'lightgreen', inactive = 'grey')) +
|
|
||||||
geom_point(aes(y = count), size = 0.5) +
|
|
||||||
# moving average, make first and last value NA (to ensure identical length of vectors)
|
|
||||||
geom_line(aes(y = rollmean(count, mvavg_bins, fill = list(NA, NULL, NA))))
|
|
||||||
```
|
|
||||||
|
|
||||||
And see in which weeks the most boxes become (in)active:
|
|
||||||
```{r table_mostregistrations}
|
|
||||||
boxes_by_date %>%
|
|
||||||
filter(count > 50) %>%
|
|
||||||
arrange(desc(count))
|
|
||||||
```
|
|
||||||
|
|
||||||
# Plot duration of boxes being active {.tabset}
|
|
||||||
While we are looking at `locationtimestamp` and `lastMeasurement`, we can also extract the duration of activity
|
|
||||||
of each box, and look at metrics by exposure and grouptag once more:
|
|
||||||
|
|
||||||
## ...by exposure
|
|
||||||
```{r exposure_duration, message=FALSE}
|
|
||||||
durations = boxes %>%
|
|
||||||
group_by(exposure) %>%
|
|
||||||
filter(!is.na(lastMeasurement)) %>%
|
|
||||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
|
||||||
filter(duration >= 0)
|
|
||||||
|
|
||||||
ggplot(durations, aes(x = exposure, y = duration)) +
|
|
||||||
geom_boxplot() +
|
|
||||||
coord_flip() + ylab('Duration active in Days')
|
|
||||||
```
|
|
||||||
|
|
||||||
The time of activity averages at only `r round(mean(durations$duration))` days,
|
|
||||||
though there are boxes with `r round(max(durations$duration))` days of activity,
|
|
||||||
spanning a large chunk of openSenseMap's existence.
|
|
||||||
|
|
||||||
## ...by grouptag
|
|
||||||
```{r grouptag_duration, message=FALSE}
|
|
||||||
durations = boxes %>%
|
|
||||||
filter(!is.na(lastMeasurement)) %>%
|
|
||||||
group_by(grouptag) %>%
|
|
||||||
# only include grouptags with 20 or more members
|
|
||||||
filter(length(grouptag) >= 15 & !is.na(grouptag) & !is.na(lastMeasurement)) %>%
|
|
||||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
|
||||||
filter(duration >= 0)
|
|
||||||
|
|
||||||
ggplot(durations, aes(x = grouptag, y = duration)) +
|
|
||||||
geom_boxplot() +
|
|
||||||
coord_flip() + ylab('Duration active in Days')
|
|
||||||
|
|
||||||
durations %>%
|
|
||||||
summarize(
|
|
||||||
duration_avg = round(mean(duration)),
|
|
||||||
duration_min = round(min(duration)),
|
|
||||||
duration_max = round(max(duration)),
|
|
||||||
oldest_box = round(max(difftime(now(), locationtimestamp, units='days')))
|
|
||||||
) %>%
|
|
||||||
arrange(desc(duration_avg))
|
|
||||||
```
|
|
||||||
|
|
||||||
The time of activity averages at only `r round(mean(durations$duration))` days,
|
|
||||||
though there are boxes with `r round(max(durations$duration))` days of activity,
|
|
||||||
spanning a large chunk of openSenseMap's existence.
|
|
||||||
|
|
||||||
## ...by year of registration
|
|
||||||
This is less useful, as older boxes are active for a longer time by definition.
|
|
||||||
If you have an idea how to compensate for that, please send a [Pull Request][PR]!
|
|
||||||
|
|
||||||
```{r year_duration, message=FALSE}
|
|
||||||
# NOTE: boxes older than 2016 missing due to missing updatedAt in database
|
|
||||||
duration = boxes %>%
|
|
||||||
mutate(year = cut(as.Date(locationtimestamp), breaks = 'year')) %>%
|
|
||||||
group_by(year) %>%
|
|
||||||
filter(!is.na(lastMeasurement)) %>%
|
|
||||||
mutate(duration = difftime(lastMeasurement, locationtimestamp, units='days')) %>%
|
|
||||||
filter(duration >= 0)
|
|
||||||
|
|
||||||
ggplot(duration, aes(x = substr(as.character(year), 0, 4), y = duration)) +
|
|
||||||
geom_boxplot() +
|
|
||||||
coord_flip() + ylab('Duration active in Days') + xlab('Year of Registration')
|
|
||||||
```
|
|
||||||
|
|
||||||
# More Visualisations
|
|
||||||
Other visualisations come to mind, and are left as an exercise to the reader.
|
|
||||||
If you implemented some, feel free to add them to this vignette via a [Pull Request][PR].
|
|
||||||
|
|
||||||
* growth by phenomenon
|
|
||||||
* growth by location -> (interactive) map
|
|
||||||
* set inactive rate in relation to total box count
|
|
||||||
* filter timespans with big dips in growth rate, and extrapolate the amount of
|
|
||||||
senseBoxes that could be on the platform today, assuming there were no production issues ;)
|
|
||||||
|
|
||||||
[PR]: https://github.com/sensebox/opensensmapr/pulls
|
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@ knitr::opts_chunk$set(echo = TRUE)
|
||||||
```
|
```
|
||||||
|
|
||||||
This package provides data ingestion functions for almost any data stored on the
|
This package provides data ingestion functions for almost any data stored on the
|
||||||
open data platform for environmental sensordata <https://opensensemap.org>.
|
open data platform for environemental sensordata <https://opensensemap.org>.
|
||||||
Its main goals are to provide means for:
|
Its main goals are to provide means for:
|
||||||
|
|
||||||
- big data analysis of the measurements stored on the platform
|
- big data analysis of the measurements stored on the platform
|
||||||
|
@ -28,12 +28,11 @@ Its main goals are to provide means for:
|
||||||
Before we look at actual observations, lets get a grasp of the openSenseMap
|
Before we look at actual observations, lets get a grasp of the openSenseMap
|
||||||
datasets' structure.
|
datasets' structure.
|
||||||
|
|
||||||
```{r results = FALSE}
|
```{r results = F}
|
||||||
library(magrittr)
|
library(magrittr)
|
||||||
library(opensensmapr)
|
library(opensensmapr)
|
||||||
|
|
||||||
# all_sensors = osem_boxes(cache = '.')
|
all_sensors = osem_boxes()
|
||||||
all_sensors = readRDS('boxes_precomputed.rds') # read precomputed file to save resources
|
|
||||||
```
|
```
|
||||||
```{r}
|
```{r}
|
||||||
summary(all_sensors)
|
summary(all_sensors)
|
||||||
|
@ -48,7 +47,11 @@ couple of minutes ago.
|
||||||
Another feature of interest is the spatial distribution of the boxes: `plot()`
|
Another feature of interest is the spatial distribution of the boxes: `plot()`
|
||||||
can help us out here. This function requires a bunch of optional dependencies though.
|
can help us out here. This function requires a bunch of optional dependencies though.
|
||||||
|
|
||||||
```{r, message=FALSE, warning=FALSE}
|
```{r message=F, warning=F}
|
||||||
|
if (!require('maps')) install.packages('maps')
|
||||||
|
if (!require('maptools')) install.packages('maptools')
|
||||||
|
if (!require('rgeos')) install.packages('rgeos')
|
||||||
|
|
||||||
plot(all_sensors)
|
plot(all_sensors)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -78,7 +81,7 @@ We should check how many sensor stations provide useful data: We want only those
|
||||||
boxes with a PM2.5 sensor, that are placed outdoors and are currently submitting
|
boxes with a PM2.5 sensor, that are placed outdoors and are currently submitting
|
||||||
measurements:
|
measurements:
|
||||||
|
|
||||||
```{r results = FALSE, eval=FALSE}
|
```{r results = F}
|
||||||
pm25_sensors = osem_boxes(
|
pm25_sensors = osem_boxes(
|
||||||
exposure = 'outdoor',
|
exposure = 'outdoor',
|
||||||
date = Sys.time(), # ±4 hours
|
date = Sys.time(), # ±4 hours
|
||||||
|
@ -86,8 +89,6 @@ pm25_sensors = osem_boxes(
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
```{r}
|
```{r}
|
||||||
pm25_sensors = readRDS('pm25_sensors.rds') # read precomputed file to save resources
|
|
||||||
|
|
||||||
summary(pm25_sensors)
|
summary(pm25_sensors)
|
||||||
plot(pm25_sensors)
|
plot(pm25_sensors)
|
||||||
```
|
```
|
||||||
|
@ -96,20 +97,16 @@ Thats still more than 200 measuring stations, we can work with that.
|
||||||
|
|
||||||
### Analyzing sensor data
|
### Analyzing sensor data
|
||||||
Having analyzed the available data sources, let's finally get some measurements.
|
Having analyzed the available data sources, let's finally get some measurements.
|
||||||
We could call `osem_measurements(pm25_sensors)` now, however we are focusing on
|
We could call `osem_measurements(pm25_sensors)` now, however we are focussing on
|
||||||
a restricted area of interest, the city of Berlin.
|
a restricted area of interest, the city of Berlin.
|
||||||
Luckily we can get the measurements filtered by a bounding box:
|
Luckily we can get the measurements filtered by a bounding box:
|
||||||
|
|
||||||
```{r, results=FALSE, message=FALSE}
|
```{r}
|
||||||
library(sf)
|
library(sf)
|
||||||
library(units)
|
library(units)
|
||||||
library(lubridate)
|
library(lubridate)
|
||||||
library(dplyr)
|
library(dplyr)
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
Since the API takes quite long to response measurements, especially filtered on space and time, we do not run the following chunks for publication of the package on CRAN.
|
|
||||||
```{r bbox, results = FALSE, eval=FALSE}
|
|
||||||
# construct a bounding box: 12 kilometers around Berlin
|
# construct a bounding box: 12 kilometers around Berlin
|
||||||
berlin = st_point(c(13.4034, 52.5120)) %>%
|
berlin = st_point(c(13.4034, 52.5120)) %>%
|
||||||
st_sfc(crs = 4326) %>%
|
st_sfc(crs = 4326) %>%
|
||||||
|
@ -117,26 +114,24 @@ berlin = st_point(c(13.4034, 52.5120)) %>%
|
||||||
st_buffer(set_units(12, km)) %>%
|
st_buffer(set_units(12, km)) %>%
|
||||||
st_transform(4326) %>% # the opensensemap expects WGS 84
|
st_transform(4326) %>% # the opensensemap expects WGS 84
|
||||||
st_bbox()
|
st_bbox()
|
||||||
|
```
|
||||||
|
```{r results = F}
|
||||||
pm25 = osem_measurements(
|
pm25 = osem_measurements(
|
||||||
berlin,
|
berlin,
|
||||||
phenomenon = 'PM2.5',
|
phenomenon = 'PM2.5',
|
||||||
from = now() - days(3), # defaults to 2 days
|
from = now() - days(20), # defaults to 2 days
|
||||||
to = now()
|
to = now()
|
||||||
)
|
)
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
```{r}
|
|
||||||
pm25 = readRDS('pm25_berlin.rds') # read precomputed file to save resources
|
|
||||||
plot(pm25)
|
plot(pm25)
|
||||||
```
|
```
|
||||||
|
|
||||||
Now we can get started with actual spatiotemporal data analysis.
|
Now we can get started with actual spatiotemporal data analysis.
|
||||||
First, lets mask the seemingly uncalibrated sensors:
|
First, lets mask the seemingly uncalibrated sensors:
|
||||||
|
|
||||||
```{r, warning=FALSE}
|
```{r}
|
||||||
outliers = filter(pm25, value > 100)$sensorId
|
outliers = filter(pm25, value > 100)$sensorId
|
||||||
bad_sensors = outliers[, drop = TRUE] %>% levels()
|
bad_sensors = outliers[, drop = T] %>% levels()
|
||||||
|
|
||||||
pm25 = mutate(pm25, invalid = sensorId %in% bad_sensors)
|
pm25 = mutate(pm25, invalid = sensorId %in% bad_sensors)
|
||||||
```
|
```
|
||||||
|
@ -144,7 +139,7 @@ pm25 = mutate(pm25, invalid = sensorId %in% bad_sensors)
|
||||||
Then plot the measuring locations, flagging the outliers:
|
Then plot the measuring locations, flagging the outliers:
|
||||||
|
|
||||||
```{r}
|
```{r}
|
||||||
st_as_sf(pm25) %>% st_geometry() %>% plot(col = factor(pm25$invalid), axes = TRUE)
|
st_as_sf(pm25) %>% st_geometry() %>% plot(col = factor(pm25$invalid), axes = T)
|
||||||
```
|
```
|
||||||
|
|
||||||
Removing these sensors yields a nicer time series plot:
|
Removing these sensors yields a nicer time series plot:
|
||||||
|
|
|
@ -10,7 +10,7 @@ vignette: >
|
||||||
---
|
---
|
||||||
|
|
||||||
It may be useful to download data from openSenseMap only once.
|
It may be useful to download data from openSenseMap only once.
|
||||||
For reproducible results, the data should be saved to disk, and reloaded at a
|
For reproducible results, the data could be saved to disk, and reloaded at a
|
||||||
later point.
|
later point.
|
||||||
|
|
||||||
This avoids..
|
This avoids..
|
||||||
|
@ -21,49 +21,40 @@ This avoids..
|
||||||
- stress on the openSenseMap-server.
|
- stress on the openSenseMap-server.
|
||||||
|
|
||||||
This vignette shows how to use this built in `opensensmapr` feature, and
|
This vignette shows how to use this built in `opensensmapr` feature, and
|
||||||
how to do it yourself in case you want to save to other data formats.
|
how to do it yourself, if you want to store to other data formats.
|
||||||
|
|
||||||
```{r setup, results='hide'}
|
## Using openSensMapr Caching Feature
|
||||||
# this vignette requires:
|
|
||||||
library(opensensmapr)
|
|
||||||
library(jsonlite)
|
|
||||||
library(readr)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Using the opensensmapr Caching Feature
|
|
||||||
All data retrieval functions of `opensensmapr` have a built in caching feature,
|
All data retrieval functions of `opensensmapr` have a built in caching feature,
|
||||||
which serializes an API response to disk.
|
which serializes an API response to disk.
|
||||||
Subsequent identical requests will then return the serialized data instead of making
|
Subsequent identical requests will then return the serialized data instead of making
|
||||||
another request.
|
another request.
|
||||||
|
To do so, each request is given a unique ID based on its parameters.
|
||||||
|
|
||||||
To use this feature, just add a path to a directory to the `cache` parameter:
|
To use this feature, just add a path to a directory to the `cache` parameter:
|
||||||
```{r cache}
|
```{r cache}
|
||||||
b = osem_boxes(grouptag = 'ifgi', cache = tempdir())
|
b = osem_boxes(cache = tempdir())
|
||||||
|
list.files(tempdir(), pattern = 'osemcache\\..*\\.rds')
|
||||||
|
|
||||||
# the next identical request will hit the cache only!
|
# the next identical request will hit the cache only!
|
||||||
b = osem_boxes(grouptag = 'ifgi', cache = tempdir())
|
b = osem_boxes(cache = tempdir())
|
||||||
|
|
||||||
# requests without the cache parameter will still be performed normally
|
# requests without the cache parameter will still be performed normally
|
||||||
b = osem_boxes(grouptag = 'ifgi')
|
b = osem_boxes()
|
||||||
```
|
```
|
||||||
|
|
||||||
Looking at the cache directory we can see one file for each request, which is identified through a hash of the request URL:
|
You can maintain multiple caches simultaneously which allows to store only
|
||||||
```{r cachelisting}
|
serialized data related to a script in its directory:
|
||||||
list.files(tempdir(), pattern = 'osemcache\\..*\\.rds')
|
|
||||||
```
|
|
||||||
|
|
||||||
You can maintain multiple caches simultaneously which allows to only store data related to a script in the same directory:
|
|
||||||
```{r cache_custom}
|
```{r cache_custom}
|
||||||
cacheDir = getwd() # current working directory
|
cacheDir = getwd() # current working directory
|
||||||
b = osem_boxes(grouptag = 'ifgi', cache = cacheDir)
|
b = osem_boxes(cache = cacheDir)
|
||||||
|
|
||||||
# the next identical request will hit the cache only!
|
# the next identical request will hit the cache only!
|
||||||
b = osem_boxes(grouptag = 'ifgi', cache = cacheDir)
|
b = osem_boxes(cache = cacheDir)
|
||||||
```
|
```
|
||||||
|
|
||||||
To get fresh results again, just call `osem_clear_cache()` for the respective cache:
|
To get fresh results again, just call `osem_clear_cache()` for the respective cache:
|
||||||
```{r clearcache, results='hide'}
|
```{r clearcache}
|
||||||
osem_clear_cache() # clears default cache
|
osem_clear_cache() # clears default cache
|
||||||
osem_clear_cache(getwd()) # clears a custom cache
|
osem_clear_cache(getwd()) # clears a custom cache
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -71,36 +62,108 @@ osem_clear_cache(getwd()) # clears a custom cache
|
||||||
If you want to roll your own serialization method to support custom data formats,
|
If you want to roll your own serialization method to support custom data formats,
|
||||||
here's how:
|
here's how:
|
||||||
|
|
||||||
```{r data, results='hide', eval=FALSE}
|
```{r setup, results='hide'}
|
||||||
|
# this section requires:
|
||||||
|
library(opensensmapr)
|
||||||
|
library(jsonlite)
|
||||||
|
library(readr)
|
||||||
|
|
||||||
# first get our example data:
|
# first get our example data:
|
||||||
measurements = osem_measurements('Windgeschwindigkeit')
|
boxes = osem_boxes(grouptag = 'ifgi')
|
||||||
|
measurements = osem_measurements(boxes, phenomenon = 'PM10')
|
||||||
```
|
```
|
||||||
|
|
||||||
If you are paranoid and worry about `.rds` files not being decodable anymore
|
If you are paranoid and worry about `.rds` files not being decodable anymore
|
||||||
in the (distant) future, you could serialize to a plain text format such as JSON.
|
in the (distant) future, you could serialize to a plain text format such as JSON.
|
||||||
This of course comes at the cost of storage space and performance.
|
This of course comes at the cost of storage space and performance.
|
||||||
```{r serialize_json, eval=FALSE}
|
```{r serialize_json}
|
||||||
# serializing senseBoxes to JSON, and loading from file again:
|
# serializing senseBoxes to JSON, and loading from file again:
|
||||||
write(jsonlite::serializeJSON(measurements), 'measurements.json')
|
write(jsonlite::serializeJSON(measurements), 'boxes.json')
|
||||||
measurements_from_file = jsonlite::unserializeJSON(readr::read_file('measurements.json'))
|
boxes_from_file = jsonlite::unserializeJSON(readr::read_file('boxes.json'))
|
||||||
class(measurements_from_file)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
This method also persists the R object metadata (classes, attributes).
|
Both methods also persist the R object metadata (classes, attributes).
|
||||||
If you were to use a serialization method that can't persist object metadata, you
|
If you were to use a serialization method that can't persist object metadata, you
|
||||||
could re-apply it with the following functions:
|
could re-apply it with the following functions:
|
||||||
|
|
||||||
```{r serialize_attrs, eval=FALSE}
|
```{r serialize_attrs}
|
||||||
# note the toJSON call instead of serializeJSON
|
# note the toJSON call
|
||||||
write(jsonlite::toJSON(measurements), 'measurements_bad.json')
|
write(jsonlite::toJSON(measurements), 'boxes_bad.json')
|
||||||
measurements_without_attrs = jsonlite::fromJSON('measurements_bad.json')
|
boxes_without_attrs = jsonlite::fromJSON('boxes_bad.json')
|
||||||
class(measurements_without_attrs)
|
|
||||||
|
|
||||||
measurements_with_attrs = osem_as_measurements(measurements_without_attrs)
|
boxes_with_attrs = osem_as_sensebox(boxes_without_attrs)
|
||||||
class(measurements_with_attrs)
|
class(boxes_with_attrs)
|
||||||
```
|
```
|
||||||
The same goes for boxes via `osem_as_sensebox()`.
|
The same goes for measurements via `osem_as_measurements()`.
|
||||||
|
|
||||||
```{r cleanup, include=FALSE, eval=FALSE}
|
## Workflow for reproducible code
|
||||||
file.remove('measurements.json', 'measurements_bad.json')
|
For truly reproducible code you want it to work and return the same results --
|
||||||
|
no matter if you run it the first time or a consecutive time, and without making
|
||||||
|
changes to it.
|
||||||
|
|
||||||
|
Therefore we need a wrapper around the save-to-file & load-from-file logic.
|
||||||
|
The following examples show a way to do just that, and where inspired by
|
||||||
|
[this reproducible analysis by Daniel Nuest](https://github.com/nuest/sensebox-binder).
|
||||||
|
|
||||||
|
```{r osem_offline}
|
||||||
|
# offline logic
|
||||||
|
osem_offline = function (func, file, format='rds', ...) {
|
||||||
|
# deserialize if file exists, otherwise download and serialize
|
||||||
|
if (file.exists(file)) {
|
||||||
|
if (format == 'json')
|
||||||
|
jsonlite::unserializeJSON(readr::read_file(file))
|
||||||
|
else
|
||||||
|
readRDS(file)
|
||||||
|
} else {
|
||||||
|
data = func(...)
|
||||||
|
if (format == 'json')
|
||||||
|
write(jsonlite::serializeJSON(data), file = file)
|
||||||
|
else
|
||||||
|
saveRDS(data, file)
|
||||||
|
data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# wrappers for each download function
|
||||||
|
osem_measurements_offline = function (file, ...) {
|
||||||
|
osem_offline(opensensmapr::osem_measurements, file, ...)
|
||||||
|
}
|
||||||
|
osem_boxes_offline = function (file, ...) {
|
||||||
|
osem_offline(opensensmapr::osem_boxes, file, ...)
|
||||||
|
}
|
||||||
|
osem_box_offline = function (file, ...) {
|
||||||
|
osem_offline(opensensmapr::osem_box, file, ...)
|
||||||
|
}
|
||||||
|
osem_counts_offline = function (file, ...) {
|
||||||
|
osem_offline(opensensmapr::osem_counts, file, ...)
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Thats it! Now let's try it out:
|
||||||
|
|
||||||
|
```{r test}
|
||||||
|
# first run; will download and save to disk
|
||||||
|
b1 = osem_boxes_offline('mobileboxes.rds', exposure='mobile')
|
||||||
|
|
||||||
|
# consecutive runs; will read from disk
|
||||||
|
b2 = osem_boxes_offline('mobileboxes.rds', exposure='mobile')
|
||||||
|
class(b1) == class(b2)
|
||||||
|
|
||||||
|
# we can even omit the arguments now (though thats not really the point here)
|
||||||
|
b3 = osem_boxes_offline('mobileboxes.rds')
|
||||||
|
nrow(b1) == nrow(b3)
|
||||||
|
|
||||||
|
# verify that the custom sensebox methods are still working
|
||||||
|
summary(b2)
|
||||||
|
plot(b3)
|
||||||
|
```
|
||||||
|
|
||||||
|
To re-download the data, just clear the files that were created in the process:
|
||||||
|
```{r cleanup, results='hide'}
|
||||||
|
file.remove('mobileboxes.rds', 'boxes_bad.json', 'boxes.json', 'measurements.rds')
|
||||||
|
```
|
||||||
|
|
||||||
|
A possible extension to this scheme comes to mind: Omit the specification of a
|
||||||
|
filename, and assign a unique ID to the request instead.
|
||||||
|
For example, one could calculate the SHA-1 hash of the parameters, and use it
|
||||||
|
as filename.
|
||||||
|
|
Binary file not shown.
Binary file not shown.
Loading…
Add table
Reference in a new issue