YVYYNI5L3BSWWYHEQTF5NTHW5TKJDSMWTC3LZVFLIM4URL2SGTIAC
defmodule VivaTest do
use ExUnit.Case, asynce: true
doctest ExViva
test "get_stations" do
{:ok, stations} = ExViva.get_stations()
assert is_list(stations)
refute Enum.empty?(stations)
end
test "get_station(86)" do
{:ok, %ExViva.StationSample{}} = ExViva.get_station(86)
end
end
ExUnit.start()
defmodule Viva.StationIdentityTest do
use ExUnit.Case, async: true
alias ExViva.{StationIdentity, Station, StationSample, Sample}
test "Viva.Station" do
assert StationIdentity.station_id(%Station{id: 329})
end
test "Viva.StationSample" do
assert 11009 = StationIdentity.station_id(%StationSample{station_id: 11009})
end
test "Viva.Sample" do
assert 19 = StationIdentity.station_id(%Sample{station_id: 19})
end
test "number" do
assert 572 = StationIdentity.station_id(572)
end
end
defmodule Viva.SampleDecoderTest do
use ExUnit.Case, async: true
test "simple_decode full sample" do
samples = File.read!("test/full_sample.etf") |> :erlang.binary_to_term()
for %{"GetSingleStationResult" => %{"Samples" => samples}} <- samples do
for sample <- samples do
assert %ExViva.Sample{} = ExViva.Decoders.Sample.simple_decode(sample)
end
end
end
end
%{
"castore": {:hex, :castore, "0.1.1", "a8905530209152ddb74989fa2a5bd4fa3a2d3ff5d15ad12578caa7460d807c8b", [:mix], [], "hexpm"},
"certifi": {:hex, :certifi, "2.5.1", "867ce347f7c7d78563450a18a6a28a8090331e77fa02380b4a21962a65d36ee5", [:rebar3], [{:parse_trans, "~> 3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm", "805abd97539caf89ec6d4732c91e62ba9da0cda51ac462380bbd28ee697a8c42"},
"earmark": {:hex, :earmark, "1.3.2", "b840562ea3d67795ffbb5bd88940b1bed0ed9fa32834915125ea7d02e35888a5", [:mix], [], "hexpm", "e3be2bc3ae67781db529b80aa7e7c49904a988596e2dbff897425b48b3581161"},
"ex_doc": {:hex, :ex_doc, "0.20.2", "1bd0dfb0304bade58beb77f20f21ee3558cc3c753743ae0ddbb0fd7ba2912331", [:mix], [{:earmark, "~> 1.3", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.10", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm", "8e24fc8ff9a50b9f557ff020d6c91a03cded7e59ac3e0eec8a27e771430c7d27"},
"exsync": {:hex, :exsync, "0.2.3", "a1ac11b4bd3808706003dbe587902101fcc1387d9fc55e8b10972f13a563dd15", [:mix], [{:file_system, "~> 0.2", [hex: :file_system, repo: "hexpm", optional: false]}], "hexpm", "23b6b186a2caa1cf5c0c4dfea9bd181d21d80a4032594d2f7c27d7ca78caa51d"},
"file_system": {:hex, :file_system, "0.2.6", "fd4dc3af89b9ab1dc8ccbcc214a0e60c41f34be251d9307920748a14bf41f1d3", [:mix], [], "hexpm", "0d50da6b04c58e101a3793b1600f9a03b86e3a8057b192ac1766013d35706fa6"},
"google_geocoding_api": {:hex, :google_geocoding_api, "0.1.4", "c9b54bb6357a9b3b818f9938c1197644a34ce5fe50971c025baa185595c4187d", [:mix], [{:httpoison, "~> 1.5.0", [hex: :httpoison, repo: "hexpm", optional: false]}, {:poison, "~> 2.2 or ~> 3.0", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"},
"hackney": {:hex, :hackney, "1.15.1", "9f8f471c844b8ce395f7b6d8398139e26ddca9ebc171a8b91342ee15a19963f4", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~> 1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.4", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "c2790c9f0f7205f4a362512192dee8179097394400e745e4d20bab7226a8eaad"},
"httpoison": {:hex, :httpoison, "1.5.0", "71ae9f304bdf7f00e9cd1823f275c955bdfc68282bc5eb5c85c3a9ade865d68e", [:mix], [{:hackney, "~> 1.8", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm"},
"idna": {:hex, :idna, "6.0.0", "689c46cbcdf3524c44d5f3dde8001f364cd7608a99556d8fbd8239a5798d4c10", [:rebar3], [{:unicode_util_compat, "0.4.1", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "4bdd305eb64e18b0273864920695cb18d7a2021f31a11b9c5fbcd9a253f936e2"},
"jason": {:hex, :jason, "1.1.2", "b03dedea67a99223a2eaf9f1264ce37154564de899fd3d8b9a21b1a6fd64afe7", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fdf843bca858203ae1de16da2ee206f53416bbda5dc8c9e78f43243de4bc3afe"},
"json": {:hex, :json, "1.2.5", "3682c18c6b07480df2122d0daf5c05457b42c1990f197ce3de53884e8ba834c4", [:mix], [{:benchee, "~> 0.8", [hex: :benchee, repo: "hexpm", optional: true]}, {:benchee_html, "~> 0.1", [hex: :benchee_html, repo: "hexpm", optional: true]}, {:exjsx, "~> 4.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:jsone, "~> 1.4", [hex: :jsone, repo: "hexpm", optional: true]}, {:poison, "~> 3.0", [hex: :poison, repo: "hexpm", optional: true]}, {:tiny, "~> 1.0", [hex: :tiny, repo: "hexpm", optional: true]}], "hexpm"},
"makeup": {:hex, :makeup, "0.8.0", "9cf32aea71c7fe0a4b2e9246c2c4978f9070257e5c9ce6d4a28ec450a839b55f", [:mix], [{:nimble_parsec, "~> 0.5.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "5fbc8e549aa9afeea2847c0769e3970537ed302f93a23ac612602e805d9d1e7f"},
"makeup_elixir": {:hex, :makeup_elixir, "0.13.0", "be7a477997dcac2e48a9d695ec730b2d22418292675c75aa2d34ba0909dcdeda", [:mix], [{:makeup, "~> 0.8", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "adf0218695e22caeda2820eaba703fa46c91820d53813a2223413da3ef4ba515"},
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"},
"mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"},
"mint": {:hex, :mint, "0.2.0", "eaf12cbe66c588f77516de93b7f525c6a688d84e93fa244e175cf26f32e1a80e", [:mix], [{:castore, "~> 0.1.0", [hex: :castore, repo: "hexpm", optional: true]}], "hexpm"},
"mix_test_watch": {:hex, :mix_test_watch, "0.9.0", "c72132a6071261893518fa08e121e911c9358713f62794a90c95db59042af375", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}], "hexpm", "817dec4a7f6edf260258002f99ac8ffaf7a8f395b27bf2d13ec24018beecec8a"},
"nimble_parsec": {:hex, :nimble_parsec, "0.5.0", "90e2eca3d0266e5c53f8fbe0079694740b9c91b6747f2b7e3c5d21966bba8300", [:mix], [], "hexpm", "5c040b8469c1ff1b10093d3186e2e10dbe483cd73d79ec017993fb3985b8a9b3"},
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm", "17ef63abde837ad30680ea7f857dd9e7ced9476cdd7b0394432af4bfc241b960"},
"poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm"},
"pure_geocoder": {:hex, :pure_geocoder, "0.1.0", "38a9483576485c33e470ce96798d5b628f0ee2b87010a507f061c8830dab7f40", [:mix], [{:httpoison, "~> 1.3.0", [hex: :httpoison, repo: "hexpm", optional: false]}, {:poison, "~> 3.1", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"},
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.4", "f0eafff810d2041e93f915ef59899c923f4568f4585904d010387ed74988e77b", [:make, :mix, :rebar3], [], "hexpm", "603561dc0fd62f4f2ea9b890f4e20e1a0d388746d6e20557cafb1b16950de88c"},
"unicode_util_compat": {:hex, :unicode_util_compat, "0.4.1", "d869e4c68901dd9531385bb0c8c40444ebf624e60b6962d95952775cac5e90cd", [:rebar3], [], "hexpm", "1d1848c40487cdb0b30e8ed975e34e025860c02e419cb615d255849f3427439d"},
}
defmodule ExViva.MixProject do
use Mix.Project
def project do
[
app: :ex_viva,
version: "0.1.0",
elixir: "~> 1.8",
start_permanent: Mix.env() == :prod,
deps: deps(),
description: "Weather observations from the Swedish Maritime Administration",
package: package()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
def package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE*"],
maintainers: ["Hannes Nevalainen"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/kwando/ex_viva"}
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:jason, ">= 0.0.0"},
{:hackney, ">= 0.0.0"},
{:exsync, ">= 0.0.0", only: :dev},
{:ex_doc, ">= 0.0.0", only: :dev},
{:mix_test_watch, "0.9.0", runtime: false, only: :dev}
]
end
end
defmodule ExViva do
@base_url "https://services.viva.sjofartsverket.se:8080"
alias ExViva.{Decoders, HTTP}
@moduledoc """
This modules provides functions to fetch imformation from the Swedish Maritime Administration.
The data from the API is not interpreted in any way, it is just normalized and parsed
into appropriate Elixir datatypes.
"""
@doc """
Returns a list of %ExViva.Station{} structs.
"""
def get_stations(opts \\ []) do
get_stations_request()
|> request(opts)
end
@doc """
Get observations for a particaluar station.
Station_id can be a number, a %ExViva.Station{} struct or anything that implements
the Viva.StationIdentity protocol.
"""
def get_station(station_id, opts \\ []) do
get_station_request(station_id, opts)
|> request(opts)
end
def get_stations_request() do
HTTP.Request.get("/output/vivaoutputservice.svc/vivastation/")
|> HTTP.Request.accept_json()
|> HTTP.Request.add_response_handler(:normalize_headers)
|> HTTP.Request.add_response_handler(Decoders.GetStationsResult)
end
def get_station_request(station_id, opts \\ []) do
station_id = ExViva.StationIdentity.station_id(station_id)
HTTP.Request.get("/output/vivaoutputservice.svc/vivastation/#{station_id}")
|> HTTP.Request.accept_json()
|> HTTP.Request.add_response_handler(:normalize_headers)
|> use_decoder(Decoders.GetSingleStationResult, get_opt(opts, :decode, true))
end
def sample_all(opts \\ []) do
with {:ok, stations} <- get_stations() do
{success, errors} = stations
|> Task.async_stream(&get_station(&1, opts), max_concurrency: get_opt(opts, :concurrency, System.schedulers_online))
|> Stream.map(&elem(&1, 1))
|> Enum.split_with(fn {:ok, _result} -> true end)
{:ok, success |> Enum.flat_map(fn {:ok, result} -> result.samples end), errors}
end
end
defp request(request, opts) do
request
|> HTTP.Request.put_header("User-Agent", "ExViva 0.0.1")
|> HTTP.request(base_url: @base_url)
|> unwrap(get_opt(opts, :unwrap, true))
end
defp get_opt(opts, key, default), do: Keyword.get(opts, key, default)
defp use_decoder(request, decoder, true) do
request
|> HTTP.Request.add_response_handler(decoder)
end
defp use_decoder(request, _, _), do: request
defp unwrap({:ok, {200, _headers, body}}, true) do
{:ok, body}
end
defp unwrap(result, false), do: result
end
defprotocol ExViva.StationIdentity do
def station_id(arg)
end
defimpl ExViva.StationIdentity, for: Integer do
def station_id(value), do: value
end
defmodule ExViva.Station do
defstruct [:id, :name, :latitude, :longitude]
@moduledoc """
Datatype holding information about a Viva.Station.
"""
defimpl ExViva.StationIdentity, for: __MODULE__ do
def station_id(%{id: id}), do: id
end
end
defmodule ExViva.StationSample do
defstruct [
station_id: nil,
samples: [],
requested_at: nil,
error: nil
]
@moduledoc """
Datatype for holding information that is returned from the API.
"""
defimpl ExViva.StationIdentity, for: __MODULE__ do
def station_id(%{station_id: station_id}), do: station_id
end
end
defmodule ExViva.Sample do
defstruct [:calm, :heading, :message, :name, :quality, :trend, :station_id, :type, :unit, :updated_at, :value, :water_level_reference]
defimpl ExViva.StationIdentity, for: __MODULE__ do
def station_id(%{station_id: station_id}), do: station_id
end
end
defmodule ExViva.HTTP.Request do
defstruct [
method: :get,
url: nil,
headers: [],
body: "",
options: [],
response_handlers: []
]
def get(url) do
%__MODULE__{url: url, method: :get}
end
def post(url, body \\ "") do
%__MODULE__{url: url, method: :post, body: body}
end
def put_header(%__MODULE__{headers: headers} = req, header, value) do
%{ req | headers: [{header, value} | headers] }
end
def basic_auth(request, username, password) do
credentials = Base.encode64("#{username}:#{password}")
request
|> put_header("Authorization", "Basic #{credentials}")
end
def accept_json(request) do
request
|> put_header("Accept", "application/json")
|> add_response_handler(:decode_json)
end
def add_response_handler(%__MODULE__{} = req, handler) do
%{ req | response_handlers: [ handler | req.response_handlers] }
end
def method(%__MODULE__{method: method}) do
method
end
def method(%__MODULE__{} = req, method) do
%{ req | method: method }
end
end
defmodule ExViva.HTTP do
alias ExViva.HTTP
def request(request, opts \\ []) do
request = preprocess_request(request, opts)
{:ok, status, headers, client} = :hackney.request(request.method, request.url, request.headers, request.body, request.options)
{:ok, body} = :hackney.body(client)
{:ok, {status, headers, body} |> apply_transforms(request.response_handlers) }
end
defp apply_transforms(response, []), do: response
defp apply_transforms(response, [ transform | transforms ]) do
response
|> apply_transforms(transforms)
|> apply_transform(transform)
end
defp apply_transform({status, headers, body}, :normalize_headers) do
{status, normalize_headers(headers), body}
end
defp apply_transform({status, headers, body}, :decode_json) do
{:ok, json} = Jason.decode(body)
{status, headers, json}
end
defp apply_transform({status, headers, body}, module) when is_atom(module) do
module.handle_response({status, headers, body}, [])
end
defp normalize_headers(headers) do
for {header, value} <- headers do
{String.downcase(header), value}
end
end
defp preprocess_request(request, opts) do
%{ request |
url: process_url(request.url, Keyword.get(opts, :base_url))
}
|> process_body()
end
defp process_body(%{body: {:json, data}} = request) do
request
|> HTTP.Request.put_header("Content-Type", "application/json")
|> Map.put(:body, Jason.encode!(data))
end
defp process_body(%{body: ""} = request) do
request
end
defp process_url("/" <> _, nil) do
raise ArgumentError, "relative urls is not supported"
end
defp process_url("/" <> path, base_url) do
"#{base_url}/#{path}"
end
defp process_url(url, _) do
url
end
end
defmodule ExViva.Decoders.Sample do
defmacrop v!(key) do
quote do
Map.fetch!(var!(sample), unquote(key))
end
end
def simple_decode(sample) do
unit = v!("Unit")
type = parse_type(v!("Type"), unit)
%ExViva.Sample{
heading: v!("Heading"),
unit: v!("Unit"),
trend: v!("Trend"),
water_level_reference: v!("WaterLevelReference"),
calm: v!("Calm"),
type: type,
value: parse_value(v!("Value"), unit, type)
}
|> with_name(sample)
|> with_common(sample)
end
defp with_station_id(result, %{"StationID" => station_id}) do
%{result | station_id: station_id}
end
defp with_message(result, %{"Msg" => message}) do
%{result | message: message}
end
defp with_quality(result, %{"Quality" => "Ok"}) do
%{result | quality: "ok"}
end
defp with_quality(result, %{"Quality" => quality}) do
%{result | quality: quality}
end
defp with_timestamp(result, %{"Updated" => updated}) do
%{result | updated_at: parse_timestamp(updated)}
end
defp with_name(result, %{"Name" => name}) do
%{result | name: name}
end
defp with_common(result, sample) do
result
|> with_name(sample)
|> with_station_id(sample)
|> with_message(sample)
|> with_quality(sample)
|> with_timestamp(sample)
end
defp directional_float(value) do
case String.split(value, " ", parts: 2) do
[_dir, value] ->
String.to_float(value)
[value] ->
single_float(value)
end
end
defp parse_timestamp(timestamp) do
String.replace(timestamp, " ", "T")
|> NaiveDateTime.from_iso8601!()
end
defp single_float(value) do
try do
String.to_float(value)
rescue
ArgumentError ->
String.to_integer(value) * 1.0
end
end
defp parse_value(value, unit, type) do
try do
parse_value(value, unit)
catch
error ->
require Logger
Logger.error("failed to decode: #{inspect(type)}")
raise error
end
end
defp parse_value(value, "m³/s"), do: single_float(value)
defp parse_value(value, "mm/h"), do: single_float(value)
defp parse_value(value, "#/cm2/h"), do: single_float(value)
defp parse_value("-", _unit), do: nil
defp parse_value(value, unit) when unit in ["m/s", "knop", "s"], do: directional_float(value)
defp parse_value(value, "cm"), do: single_float(value)
defp parse_value(value, unit) when unit in ["‰", "%", "kg/m³", "°C", "mbar"],
do: single_float(value)
defp parse_value(">" <> value, "m") do
{:less_than, String.to_integer(value)}
end
defp parse_value(value, "m"), do: directional_float(value)
defp parse_type("wind", _unit), do: :wind
defp parse_type("level", _unit), do: :water_level
defp parse_type("watertemp", _unit), do: :water_temperature
defp parse_type("stream", _unit), do: :stream
defp parse_type("water", "‰"), do: :salinity
defp parse_type("water", "kg/m³"), do: :water_density
defp parse_type("water", "m³/s"), do: :water_flow
defp parse_type("pressure", "mbar"), do: :air_pressure
defp parse_type("air", "%"), do: :humidity
defp parse_type("airtemp", "°C"), do: :temperature
defp parse_type("sight", "m"), do: :sight
defp parse_type("wave", "m"), do: :wave_height
defp parse_type("wave", "s"), do: :wave_period
defp parse_type("rain", "mm/h"), do: :rain
defp parse_type("rain", "#/cm2/h"), do: :hail_intensity
end
defmodule ExViva.Decoders.GetStationsResult do
def handle_response({status, headers, %{"GetStationsResult" => %{"Felmeddelande" => nil, "Stations" => stations}}}, _opts) do
{status, headers, Enum.map(stations, &map_station/1)}
end
defp map_station(station) do
%ExViva.Station{
id: station["ID"],
latitude: station["Lat"],
longitude: station["Lon"],
name: station["Name"]
}
end
end
defmodule ExViva.Decoders.GetSingleStationResult do
def handle_response({status, headers, body}, _opts) do
{:ok, decoded_body} = decode(body)
{status, headers, decoded_body}
end
def decode(%{"GetSingleStationResult" => %{"Felmeddelande" => error, "ID" => station_id, "Samples" => samples}}) do
sample = %ExViva.StationSample{
samples: map_samples(samples),
station_id: station_id,
requested_at: DateTime.utc_now(),
error: error
}
{:ok, sample}
end
defp map_samples(samples) do
for sample <- samples do
ExViva.Decoders.Sample.simple_decode(sample)
end
end
end
version: '2'
services:
arangodb:
image: arangodb:latest
environment:
- ARANGO_ROOT_PASSWORD=${ARANGO_ROOT_PASSWORD}
ports:
- 8529:8529
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# third-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :viva, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:viva, :key)
#
# You can also configure a third-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
# ExViva
An Elixir API to weather observations from the Swedish Maritime Administration.
Listing all stations:
```elixir
iex(1)> ExViva.get_stations
{:ok,
[
%ExViva.Station{
id: 169,
latitude: 57.789774,
longitude: 12.010189,
name: "Agnesberg (GBG Stad)"
},
%ExViva.Station{id: 21, latitude: 65.5697, longitude: 22.1725, name: "Ale"},
%ExViva.Station{
id: 15,
latitude: 58.484266,
longitude: 16.960683,
name: "Arkö"
},
%ExViva.Station{id: 17, latitude: 65.5727, longitude: 22.1703, name: "Atle"},
%ExViva.Station{
id: 25,
latitude: 55.74588,
longitude: 12.9168,
name: "Barsebäck"
},
%ExViva.Station{
id: 141,
latitude: 59.32078,
longitude: 18.15478,
name: "Blockhusudden (SH)"
},
%ExViva.Station{
id: 27,
latitude: 59.0998,
longitude: 17.67833,
name: "Brandalsund"
},
%ExViva.Station{
id: 30,
latitude: 58.34005,
longitude: 12.351683,
name: "Brinkebergskulle Nedre"
},
%ExViva.Station{
id: 31,
latitude: 58.34102,
longitude: 12.35092,
name: "Brinkebergskulle Övre"
},
%ExViva.Station{
id: 32,
latitude: 58.336,
longitude: 11.4046,
name: "Brofjorden"
},
%ExViva.Station{
id: 24,
latitude: 58.34285,
longitude: 11.40125,
name: "Brofjorden Tavla 1"
},
%ExViva.Station{
id: 135,
latitude: 58.2508,
longitude: 11.2239,
name: "Brofjordens Angöring"
},
%ExViva.Station{
id: 33,
latitude: 60.7315,
longitude: 17.325833,
name: "Bönan"
},
%ExViva.Station{
id: 5,
latitude: 58.382367,
longitude: 12.310783,
name: "Dalbobron"
},
%ExViva.Station{
id: 34,
latitude: 59.184833,
longitude: 17.642833,
name: "E4 Bron Södertälje"
},
%ExViva.Station{
id: 35,
latitude: 60.728167,
longitude: 17.557833,
name: "Eggegrund"
},
%ExViva.Station{
id: 168,
latitude: 57.696567,
longitude: 11.908833,
name: "Eriksberg (GBG Stad)"
},
%ExViva.Station{
id: 119,
latitude: 56.88462,
longitude: 12.468391,
name: "Falkenberg Vind"
},
%ExViva.Station{
id: 36,
latitude: 56.892,
longitude: 12.4895,
name: "Falkenberg Vst"
},
%ExViva.Station{
id: 91,
latitude: 55.56098,
longitude: 12.809541,
name: "Flinten 16"
},
%ExViva.Station{
id: 86,
latitude: 55.589379,
longitude: 12.844475,
name: "Flinten 7"
},
%ExViva.Station{
id: 111,
latitude: 55.518195,
longitude: 12.744768,
name: "Flinten SW"
},
%ExViva.Station{
id: 12,
latitude: 60.408611,
longitude: 18.210833,
name: "Forsmark"
},
%ExViva.Station{id: 22, latitude: 65.5718, longitude: 22.171, name: "Frej"},
%ExViva.Station{
id: 140,
latitude: 59.3463,
longitude: 18.12708,
name: "Frihamnen (SH)"
},
%ExViva.Station{
id: 39,
latitude: 59.663757,
longitude: 18.931997,
name: "Furusund"
},
%ExViva.Station{
id: 158,
latitude: 64.915833,
longitude: 21.230555,
name: "Furuögrund (SMHI)"
},
%ExViva.Station{
id: 153,
latitude: 62.349932,
longitude: 17.578059,
name: "Gubben"
},
%ExViva.Station{
id: 40,
latitude: 64.6633,
longitude: 21.3167,
name: "Gåsören"
},
%ExViva.Station{id: 41, latitude: 58.0983, longitude: 12.1506, name: "Göta"},
%ExViva.Station{
id: 42,
latitude: 57.71441,
longitude: 11.9675,
name: "Götaälvbron"
},
%ExViva.Station{
id: 43,
latitude: 56.649783,
longitude: 12.8425,
name: "Halmstad"
},
%ExViva.Station{
id: 113,
latitude: 57.896333,
longitude: 11.467167,
name: "Hamneskär"
},
%ExViva.Station{id: 44, latitude: 60.17, longitude: 18.482, name: "Hargs Hamn"},
%ExViva.Station{
id: 45,
latitude: 56.041167,
longitude: 12.6845,
name: "Helsingborg"
},
%ExViva.Station{
id: 129,
latitude: 56.018933,
longitude: 12.697067,
name: "Helsingborg Sydhamnen"
},
%ExViva.Station{
id: 130,
latitude: 56.025517,
longitude: 12.691433,
name: "Helsingborg Västhamnen"
},
%ExViva.Station{
id: 167,
latitude: 56.019992,
longitude: 12.687072,
name: "Helsingborg Öresund"
},
%ExViva.Station{
id: 56,
latitude: 59.533067,
longitude: 17.008166,
name: "Hjulstabron"
},
%ExViva.Station{
id: 137,
latitude: 56.920945,
longitude: 18.150568,
name: "Hoburg (SMHI)"
},
%ExViva.Station{
id: 134,
latitude: 63.695429,
longitude: 20.347527,
name: "Holmsund"
},
%ExViva.Station{id: 174, latitude: 63.3185, longitude: 19.1562, name: "Husum"},
%ExViva.Station{
id: 4,
latitude: 58.603779,
longitude: 17.315855,
name: "Hävringe"
},
%ExViva.Station{
id: 126,
latitude: 57.8555,
longitude: 12.0091,
name: "Jordfallsbron"
},
%ExViva.Station{id: 58, latitude: 58.63418, longitude: 16.324758, ...},
%ExViva.Station{id: 59, latitude: 65.788833, ...},
%ExViva.Station{id: 161, ...},
%ExViva.Station{...},
...
]}
```
Getting observations from a particaluar station:
```elixir
iex(1)> Viva.get_station(86)
{:ok,
%ExViva.StationSample{
requested_at: #DateTime<2019-04-27 14:37:55.199841Z>,
samples: [
%ExViva.Sample{
calm: 0,
heading: 0,
message: "",
name: "Vattenstånd",
quality: "ok",
station_id: 86,
trend: "",
type: :water_level,
unit: "cm",
updated_at: ~N[2019-04-27 16:35:00],
value: -18.0,
water_level_reference: nil
},
%ExViva.Sample{
calm: 0,
heading: 0,
message: "",
name: "Vattentemp",
quality: "ok",
station_id: 86,
trend: "",
type: :water_temperature,
unit: "°C",
updated_at: ~N[2019-04-27 16:35:00],
value: 8.2,
water_level_reference: nil
},
%ExViva.Sample{
calm: 0,
heading: 0,
message: "",
name: "Vattentemp Botten",
quality: "ok",
station_id: 86,
trend: "",
type: :water_temperature,
unit: "°C",
updated_at: ~N[2019-04-27 16:35:00],
value: 6.7,
water_level_reference: nil
},
%ExViva.Sample{
calm: 0,
heading: 204,
message: "",
name: "Ström Yta",
quality: "ok",
station_id: 86,
trend: "",
type: :stream,
unit: "knop",
updated_at: ~N[2019-04-27 16:35:00],
value: 1.1,
water_level_reference: nil
},
%ExViva.Sample{
calm: 0,
heading: 218,
message: "",
name: "Ström 4m",
quality: "ok",
station_id: 86,
trend: "",
type: :stream,
unit: "knop",
updated_at: ~N[2019-04-27 16:35:00],
value: 0.9,
water_level_reference: nil
},
%ExViva.Sample{
calm: 0,
heading: 208,
message: "",
name: "Ström 2m",
quality: "ok",
station_id: 86,
trend: "",
type: :stream,
unit: "knop",
updated_at: ~N[2019-04-27 16:35:00],
value: 1.7,
water_level_reference: nil
},
%ExViva.Sample{
calm: 0,
heading: 0,
message: "",
name: "Salthalt",
quality: "ok",
station_id: 86,
trend: "",
type: :salinity,
unit: "‰",
updated_at: ~N[2019-04-27 16:35:00],
value: 8.4,
water_level_reference: nil
},
%ExViva.Sample{
calm: 0,
heading: 0,
message: "",
name: "Salthalt Botten",
quality: "ok",
station_id: 86,
trend: "",
type: :salinity,
unit: "‰",
updated_at: ~N[2019-04-27 16:35:00],
value: 24.8,
water_level_reference: nil
},
%ExViva.Sample{
calm: 0,
heading: 0,
message: "",
name: "Densitet Botten",
quality: "ok",
station_id: 86,
trend: "",
type: :water_density,
unit: "kg/m³",
updated_at: ~N[2019-04-27 16:35:00],
value: 1019.4,
water_level_reference: nil
}
],
station_id: 86
}}
```
Copyright 2019 Hannes Nevalainen
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# The directory Mix will write compiled artifacts to.
/_build/
# If you run "mix test --cover", coverage assets end up here.
/cover/
# The directory Mix downloads your dependencies sources to.
/deps/
# Where third-party dependencies like ExDoc output generated docs.
/doc/
# Ignore .fetch files in case you like to edit your project deps locally.
/.fetch
# If the VM crashes, it generates a dump, let's ignore it too.
erl_crash.dump
# Also ignore archive artifacts (built via "mix archive.build").
*.ez
# Ignore package tarball (built via "mix hex.build").
viva-*.tar
viva-*.tar.gz
# this is downloaded data
/data
.git
.env
# The directory Mix will write compiled artifacts to.
/_build/
# If you run "mix test --cover", coverage assets end up here.
/cover/
# The directory Mix downloads your dependencies sources to.
/deps/
# Where third-party dependencies like ExDoc output generated docs.
/doc/
# Ignore .fetch files in case you like to edit your project deps locally.
/.fetch
# If the VM crashes, it generates a dump, let's ignore it too.
erl_crash.dump
# Also ignore archive artifacts (built via "mix archive.build").
*.ez
# Ignore package tarball (built via "mix hex.build").
viva-*.tar
viva-*.tar.gz
# this is downloaded data
/data
# Used by "mix format"
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"]
]