Compare commits
No commits in common. "main" and "0.1.4" have entirely different histories.
45 changed files with 1473 additions and 4394 deletions
|
|
@ -1,4 +1,2 @@
|
|||
.git
|
||||
target
|
||||
db/huellas-test
|
||||
db/*.wal
|
||||
|
|
|
|||
6
.gitignore
vendored
6
.gitignore
vendored
|
|
@ -1,4 +1,4 @@
|
|||
build
|
||||
db
|
||||
/target
|
||||
/db
|
||||
node_modules
|
||||
target
|
||||
build
|
||||
|
|
|
|||
|
|
@ -1,2 +0,0 @@
|
|||
ignored:
|
||||
- DL3018
|
||||
12
.sqlfluff
12
.sqlfluff
|
|
@ -1,12 +0,0 @@
|
|||
[sqlfluff]
|
||||
dialect = sqlite
|
||||
[sqlfluff:rules:capitalisation.keywords]
|
||||
capitalisation_policy = upper
|
||||
[sqlfluff:rules:capitalisation.identifiers]
|
||||
capitalisation_policy = upper
|
||||
[sqlfluff:rules:capitalisation.functions]
|
||||
extended_capitalisation_policy = upper
|
||||
[sqlfluff:rules:capitalisation.literals]
|
||||
capitalisation_policy = upper
|
||||
[sqlfluff:rules:capitalisation.types]
|
||||
extended_capitalisation_policy = upper
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "INSERT INTO places\n (name, address, open_hours, icon, description, longitude, latitude, url)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?)\n RETURNING id",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Int64"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 8
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "92ac9ff4e52046e57f006846914912c790d4fa63428c2e5d432358aa55bd2bbc"
|
||||
}
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "UPDATE places\n SET (name, address, open_hours, icon, description, longitude, latitude, url)\n = (?, ?, ?, ?, ?, ?, ?, ?)\n WHERE id = ?",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 9
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "96057f55cf85aa23dd20bd1277f075176c3297d02d6056ce93ef991ce4f6ceed"
|
||||
}
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "UPDATE places SET active = FALSE WHERE id = ?",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "af66ec71413501f84c7f4cb0dd732c8ebfcd3da36a5f1177918c2277a8674c28"
|
||||
}
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "SELECT id, name, address, open_hours, icon, description, url,\n longitude as \"longitude: f64\", latitude as \"latitude: f64\"\n FROM places\n WHERE active = TRUE",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "address",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "open_hours",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "icon",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "description",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "url",
|
||||
"ordinal": 6,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "longitude: f64",
|
||||
"ordinal": 7,
|
||||
"type_info": "Float"
|
||||
},
|
||||
{
|
||||
"name": "latitude: f64",
|
||||
"ordinal": 8,
|
||||
"type_info": "Float"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "dae51f97c52b1391d2fa1109a2dc550dc9e412d60c538db82a841964d631aa0f"
|
||||
}
|
||||
90
CHANGELOG.md
90
CHANGELOG.md
|
|
@ -1,90 +0,0 @@
|
|||
# Changelog
|
||||
|
||||
## [0.3.4] - 2025-06-15
|
||||
|
||||
### Features
|
||||
|
||||
- Show IG urls as @username ([#55](https://oolong.ludwig.dog/pitbuster/huellas/issues/55))
|
||||
|
||||
## [0.3.3] - 2025-06-14
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Docker build ([#52](https://oolong.ludwig.dog/pitbuster/huellas/issues/52))
|
||||
- Add address column ([#53](https://oolong.ludwig.dog/pitbuster/huellas/issues/53))
|
||||
|
||||
## [0.3.2] - 2025-06-14
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Use correct regex to display instagram urls ([#50](https://oolong.ludwig.dog/pitbuster/huellas/issues/50))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Improve README ([#43](https://oolong.ludwig.dog/pitbuster/huellas/issues/43))
|
||||
|
||||
### Features
|
||||
|
||||
- Initial TUI for administration ([#49](https://oolong.ludwig.dog/pitbuster/huellas/issues/49))
|
||||
|
||||
### Miscellaneous Tasks
|
||||
|
||||
- Bump dependencies ([#42](https://oolong.ludwig.dog/pitbuster/huellas/issues/42))
|
||||
- Update dependencies ([#44](https://oolong.ludwig.dog/pitbuster/huellas/issues/44))
|
||||
- Bump dependencies ([#45](https://oolong.ludwig.dog/pitbuster/huellas/issues/45))
|
||||
- Update dependencies ([#46](https://oolong.ludwig.dog/pitbuster/huellas/issues/46))
|
||||
- Updates ([#47](https://oolong.ludwig.dog/pitbuster/huellas/issues/47))
|
||||
- Use native HTML5 dialog element and refactor backend into hexagonal architecture. ([#48](https://oolong.ludwig.dog/pitbuster/huellas/issues/48))
|
||||
|
||||
## [0.3.0] - 2024-03-07
|
||||
|
||||
### Documentation
|
||||
|
||||
- Improve README.md ([#39](https://oolong.ludwig.dog/pitbuster/huellas/issues/39))
|
||||
|
||||
### Features
|
||||
|
||||
- [**breaking**] Use msgpack instead of json ([#40](https://oolong.ludwig.dog/pitbuster/huellas/issues/40))
|
||||
|
||||
### Miscellaneous Tasks
|
||||
|
||||
- 0.3.0 ([#41](https://oolong.ludwig.dog/pitbuster/huellas/issues/41))
|
||||
|
||||
## [0.2.3] - 2024-01-19
|
||||
|
||||
### Miscellaneous Tasks
|
||||
|
||||
- Add git hooks ([#35](https://oolong.ludwig.dog/pitbuster/huellas/issues/35))
|
||||
- Add sqlfluff config ([#36](https://oolong.ludwig.dog/pitbuster/huellas/issues/36))
|
||||
|
||||
## [0.2.2] - 2023-11-10
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Docker build ([#24](https://oolong.ludwig.dog/pitbuster/huellas/issues/24))
|
||||
- Fix GMaps links and do some dependencies maintainance ([#32](https://oolong.ludwig.dog/pitbuster/huellas/issues/32))
|
||||
|
||||
### Miscellaneous Tasks
|
||||
|
||||
- Add docker-compose.yml file for testing ([#25](https://oolong.ludwig.dog/pitbuster/huellas/issues/25))
|
||||
- Update sqlx offline files ([#28](https://oolong.ludwig.dog/pitbuster/huellas/issues/28))
|
||||
- Update rust version in docker to 1.71 ([#29](https://oolong.ludwig.dog/pitbuster/huellas/issues/29))
|
||||
- Add CHANGELOG.md and git-cliff config ([#31](https://oolong.ludwig.dog/pitbuster/huellas/issues/31))
|
||||
|
||||
## [0.2.1] - 2023-05-23
|
||||
|
||||
### Miscellaneous Tasks
|
||||
|
||||
- Add tests ([#19](https://oolong.ludwig.dog/pitbuster/huellas/issues/19))
|
||||
- Update sqlx to 0.7.0-alpha3 ([#21](https://oolong.ludwig.dog/pitbuster/huellas/issues/21))
|
||||
- Axum -> 0.6.18, tracing-subscriber -> 0.3.17, tokio -> 1.28.1, serde ->1.0.163,serde_json -> 1.0.96 ([#22](https://oolong.ludwig.dog/pitbuster/huellas/issues/22))
|
||||
|
||||
## [0.1.1] - 2022-11-23
|
||||
|
||||
### Features
|
||||
|
||||
- Add and Edit places through Leaflet.contextmenu ([#4](https://oolong.ludwig.dog/pitbuster/huellas/issues/4))
|
||||
|
||||
## [0.1.0] - 2022-10-22
|
||||
|
||||
|
||||
2904
Cargo.lock
generated
2904
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
66
Cargo.toml
66
Cargo.toml
|
|
@ -1,58 +1,20 @@
|
|||
[package]
|
||||
name = "huellas"
|
||||
version = "0.3.4"
|
||||
edition = "2024"
|
||||
version = "0.1.4"
|
||||
edition = "2021"
|
||||
license = "AGPL-3.0"
|
||||
links = "sqlite"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.98"
|
||||
axum = { version = "0.8.4", default-features = false, features = [
|
||||
"tracing",
|
||||
"tokio",
|
||||
"http1",
|
||||
"http2",
|
||||
] }
|
||||
axum-msgpack = "0.5.0"
|
||||
clap = { version = "4.5.40", features = ["derive"] }
|
||||
# This must be the same version that ratatui depends on :(
|
||||
crossterm = { version = "0.28.1", default-features = false, features = [
|
||||
"bracketed-paste",
|
||||
"event-stream",
|
||||
"serde",
|
||||
] }
|
||||
dotenvy = "0.15.7"
|
||||
itertools = "0.14.0"
|
||||
futures = { version = "0.3.31", default-features = false }
|
||||
ratatui = { version = "0.29.0", default-features = false, features = [
|
||||
"crossterm",
|
||||
] }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
sqlx = { version = "0.8.6", default-features = false, features = [
|
||||
"macros",
|
||||
"migrate",
|
||||
"runtime-tokio",
|
||||
"sqlite",
|
||||
"tls-rustls",
|
||||
] }
|
||||
thiserror = "2.0.12"
|
||||
tokio = { version = "1.45.1", default-features = false, features = [
|
||||
"macros",
|
||||
"rt-multi-thread",
|
||||
"signal",
|
||||
] }
|
||||
tokio-util = "0.7.15"
|
||||
tower-http = { version = "0.6.6", default-features = false, features = ["fs"] }
|
||||
tracing = "0.1.41"
|
||||
tracing-subscriber = { version = "0.3.19", default-features = false, features = [
|
||||
"env-filter",
|
||||
"fmt",
|
||||
"tracing",
|
||||
"tracing-log",
|
||||
] }
|
||||
tui-textarea = { version = "0.7.0", default-features = false, features = [
|
||||
"ratatui",
|
||||
"crossterm",
|
||||
] }
|
||||
rocket = {version = "0.5.0-rc.2", features = ["json"]}
|
||||
|
||||
[dev-dependencies]
|
||||
axum-test = { version = "17.3.0", features = ["msgpack"] }
|
||||
[dependencies.rocket_db_pools]
|
||||
version = "0.1.0-rc.2"
|
||||
features = ["sqlx_sqlite"]
|
||||
|
||||
[dependencies.sqlx]
|
||||
version = "0.5.13"
|
||||
default-features = false
|
||||
features = ["macros", "offline", "migrate", "sqlite"]
|
||||
|
|
|
|||
39
Dockerfile
39
Dockerfile
|
|
@ -1,8 +1,10 @@
|
|||
##### Builder ####
|
||||
FROM rust:1.87-alpine3.20 AS builder
|
||||
FROM rust:1.64-alpine as builder
|
||||
|
||||
# Install dependencies
|
||||
RUN apk add --no-cache sqlite npm musl-dev fd minify && npm install -g typescript
|
||||
RUN apk add --no-cache sqlite npm musl-dev fd minify
|
||||
|
||||
# Install Typescript
|
||||
RUN npm install -g typescript
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
|
@ -22,10 +24,13 @@ RUN cargo build --release
|
|||
COPY src /usr/src/huellas/src/
|
||||
COPY migrations /usr/src/huellas/migrations/
|
||||
COPY db /usr/src/huellas/db/
|
||||
COPY .env /usr/src/huellas/
|
||||
COPY .env sqlx-data.json Rocket.toml /usr/src/huellas/
|
||||
|
||||
## Touch main.rs to prevent cached release build and then build
|
||||
RUN touch /usr/src/huellas/src/main.rs && cargo build --release
|
||||
## Touch main.rs to prevent cached release build
|
||||
RUN touch /usr/src/huellas/src/main.rs
|
||||
|
||||
# This is the actual application build.
|
||||
RUN cargo build --release
|
||||
|
||||
# Now TS client
|
||||
COPY ts-client /usr/src/huellas/ts-client/
|
||||
|
|
@ -34,26 +39,30 @@ COPY ts-client /usr/src/huellas/ts-client/
|
|||
WORKDIR /usr/src/huellas/ts-client/
|
||||
|
||||
# Install dependencies
|
||||
RUN npm ci
|
||||
RUN npm install
|
||||
|
||||
# Transpile and delete the first line of javascript ts-client
|
||||
RUN tsc && sed -i '1,2d' build/client.js
|
||||
# Transpile
|
||||
RUN tsc
|
||||
|
||||
# Delete the first line of jvascript ts-client
|
||||
RUN sed -i '1d' build/client.js
|
||||
|
||||
# Minify static files
|
||||
COPY static /usr/src/huellas/static/
|
||||
RUN fd -e html . '/usr/src/huellas/static/' -x minify -r -o {} {} \
|
||||
&& fd -e js . '/usr/src/huellas/ts-client/build/' -x minify -r -o {} {}
|
||||
RUN fd -e html . '/usr/src/huellas/static/' -x minify -r -o {} {}
|
||||
RUN fd -e js . '/usr/src/huellas/ts-client/build/' -x minify -r -o {} {}
|
||||
|
||||
################
|
||||
##### Runtime
|
||||
FROM alpine:3.20 AS runtime
|
||||
FROM alpine:3.16 AS Runtime
|
||||
|
||||
RUN apk add --no-cache sqlite
|
||||
|
||||
# Copy application binary from builder image
|
||||
COPY --from=builder /usr/src/huellas/target/release/huellas /usr/local/bin
|
||||
# Copy .env
|
||||
COPY .env /usr/local/bin
|
||||
# Copy Rocket.toml
|
||||
COPY Rocket.toml /usr/local/bin
|
||||
|
||||
# Copy static files
|
||||
COPY --from=builder /usr/src/huellas/static /usr/local/bin/static/
|
||||
# Copy javascript client
|
||||
|
|
@ -61,4 +70,4 @@ COPY --from=builder /usr/src/huellas/ts-client/build/client.js /usr/local/bin/st
|
|||
|
||||
# Run the application
|
||||
WORKDIR /usr/local/bin
|
||||
CMD ["/usr/local/bin/huellas", "server"]
|
||||
CMD ["/usr/local/bin/huellas"]
|
||||
|
|
|
|||
32
README.md
32
README.md
|
|
@ -1,32 +1,2 @@
|
|||
# huellas
|
||||
This service is backed by an Axum server and uses a raw Typescript front-end using
|
||||
Leaflet.js.
|
||||
## Development
|
||||
To run the application locally, just do
|
||||
```shell
|
||||
cargo run
|
||||
```
|
||||
To compile the front-end code, go to the `ts-client` folder, install the dependencies
|
||||
with
|
||||
```shell
|
||||
npm install
|
||||
```
|
||||
and then run
|
||||
```
|
||||
make
|
||||
```
|
||||
### Install git hooks
|
||||
Run the following from the project root:
|
||||
```shell
|
||||
hooks/install.sh
|
||||
```
|
||||
### Migrations
|
||||
We use the `sqlx` CLI to manage migrations. To create a new one run
|
||||
```shell
|
||||
cargo sqlx migrate add
|
||||
```
|
||||
## Cross-architecture building
|
||||
Images are built for arm64 on a juicier machine using
|
||||
```shell
|
||||
docker buildx build --platform=linux/arm64 . -t oolong.ludwig.dog/pitbuster/huellas:X.Y.Z
|
||||
```
|
||||
|
||||
|
|
|
|||
4
Rocket.toml
Normal file
4
Rocket.toml
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
[default.databases.db]
|
||||
url = "sqlite://db/huellas.db"
|
||||
[release]
|
||||
address = "0.0.0.0"
|
||||
2
build.rs
2
build.rs
|
|
@ -2,4 +2,4 @@
|
|||
fn main() {
|
||||
// trigger recompilation when a new migration is added
|
||||
println!("cargo:rerun-if-changed=migrations");
|
||||
}
|
||||
}
|
||||
79
cliff.toml
79
cliff.toml
|
|
@ -1,79 +0,0 @@
|
|||
[changelog]
|
||||
# changelog header
|
||||
header = """
|
||||
# Changelog\n
|
||||
"""
|
||||
# template for the changelog body
|
||||
# https://tera.netlify.app/docs
|
||||
body = """
|
||||
{% if version %}\
|
||||
## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
|
||||
{% else %}\
|
||||
## [unreleased]
|
||||
{% endif %}\
|
||||
{% for group, commits in commits | group_by(attribute="group") %}
|
||||
### {{ group | upper_first }}
|
||||
{% for commit in commits %}
|
||||
- {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message | upper_first }}\
|
||||
{% endfor %}
|
||||
{% endfor %}\n
|
||||
"""
|
||||
# remove the leading and trailing whitespace from the template
|
||||
trim = true
|
||||
# changelog footer
|
||||
footer = """
|
||||
"""
|
||||
# postprocessors
|
||||
postprocessors = [
|
||||
{ pattern = '<REPO>', replace = "https://oolong.ludwig.dog/pitbuster/huellas" },
|
||||
]
|
||||
[git]
|
||||
# parse the commits based on https://www.conventionalcommits.org
|
||||
conventional_commits = true
|
||||
# filter out the commits that are not conventional
|
||||
filter_unconventional = true
|
||||
# process each line of a commit as an individual commit
|
||||
split_commits = false
|
||||
# regex for preprocessing the commit messages
|
||||
commit_preprocessors = [
|
||||
{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](<REPO>/issues/${2}))" }, # replace issue numbers
|
||||
]
|
||||
# regex for parsing and grouping commits
|
||||
commit_parsers = [
|
||||
{ message = "^feat", group = "Features" },
|
||||
{ message = "^fix", group = "Bug Fixes" },
|
||||
{ message = "^doc", group = "Documentation" },
|
||||
{ message = "^perf", group = "Performance" },
|
||||
{ message = "^refactor", group = "Refactor" },
|
||||
{ message = "^style", group = "Styling" },
|
||||
{ message = "^test", group = "Testing" },
|
||||
{ message = "^chore\\(changelog\\):", skip = true },
|
||||
{ message = "^chore\\(release\\):", skip = true },
|
||||
{ message = "^release:", skip = true },
|
||||
{ message = "^chore\\(deps\\)", skip = true },
|
||||
{ message = "^chore\\(pr\\)", skip = true },
|
||||
{ message = "^chore\\(pull\\)", skip = true },
|
||||
{ message = "^chore|ci", group = "Miscellaneous Tasks" },
|
||||
{ body = ".*security", group = "Security" },
|
||||
{ message = "^revert", group = "Revert" },
|
||||
]
|
||||
# extract external references
|
||||
link_parsers = [
|
||||
{ pattern = "#(\\d+)", href = "https://oolong.ludwig.dog/pitbuster/huellas/issues/$1" },
|
||||
]
|
||||
# protect breaking changes from being skipped due to matching a skipping commit_parser
|
||||
protect_breaking_commits = false
|
||||
# filter out the commits that are not matched by commit parsers
|
||||
filter_commits = false
|
||||
# glob pattern for matching git tags
|
||||
tag_pattern = "[0-9]*"
|
||||
# regex for skipping tags
|
||||
skip_tags = "v0.1.0-beta.1"
|
||||
# regex for ignoring tags
|
||||
ignore_tags = ""
|
||||
# sort the tags topologically
|
||||
topo_order = false
|
||||
# sort the commits inside sections by oldest/newest order
|
||||
sort_commits = "oldest"
|
||||
# limit the number of commits included in the changelog.
|
||||
# limit_commits = 42
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
version: '3'
|
||||
services:
|
||||
huellas:
|
||||
restart: no
|
||||
image: oolong.ludwig.dog/pitbuster/huellas:0.2.1
|
||||
volumes:
|
||||
- ./db:/usr/local/bin/db
|
||||
ports:
|
||||
- "8059:3000"
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -eu
|
||||
|
||||
if GIT_ROOT="$(git rev-parse --show-toplevel)"; then
|
||||
ln -s $GIT_ROOT/hooks/pre-commit.sh $GIT_ROOT/.git/hooks/pre-commit
|
||||
ln -s $GIT_ROOT/hooks/pre-push.sh $GIT_ROOT/.git/hooks/pre-push
|
||||
else
|
||||
echo "Failed to get git root, aborting"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -eu
|
||||
|
||||
if ! cargo fmt -- --check
|
||||
then
|
||||
echo "There are some code style issues."
|
||||
echo "Run cargo fmt first."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -eu
|
||||
|
||||
if ! cargo clippy --all-targets -- -D warnings; then
|
||||
echo "There are some clippy issues."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! cargo nextest run; then
|
||||
echo "There are some test issues."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exit 0
|
||||
|
|
@ -1 +0,0 @@
|
|||
ALTER TABLE places ADD url VARCHAR DEFAULT null;
|
||||
101
sqlx-data.json
Normal file
101
sqlx-data.json
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
{
|
||||
"db": "SQLite",
|
||||
"3fae7e613d23f9713643829d36bab2851a9c406aa32a1f8afe1bab34d53f13e7": {
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"nullable": [],
|
||||
"parameters": {
|
||||
"Right": 7
|
||||
}
|
||||
},
|
||||
"query": "UPDATE places SET (name, address, open_hours, icon, description, longitude, latitude) = (?, ?, ?, ?, ?, ?, ?)"
|
||||
},
|
||||
"af66ec71413501f84c7f4cb0dd732c8ebfcd3da36a5f1177918c2277a8674c28": {
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"nullable": [],
|
||||
"parameters": {
|
||||
"Right": 1
|
||||
}
|
||||
},
|
||||
"query": "UPDATE places SET active = FALSE WHERE id = ?"
|
||||
},
|
||||
"e10f7e8f125a3f60338f6c35b195517d4304304599c75e4f26f071e2a09609dc": {
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Int64"
|
||||
}
|
||||
],
|
||||
"nullable": [
|
||||
false
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 7
|
||||
}
|
||||
},
|
||||
"query": "INSERT INTO places (name, address, open_hours, icon, description, longitude, latitude)VALUES (?, ?, ?, ?, ?, ?, ?)RETURNING id"
|
||||
},
|
||||
"fdc2eb1d98b93f2b61c756687f1a30edf2e4a74622e23b6b72a9509a9303385d": {
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"name": "id",
|
||||
"ordinal": 0,
|
||||
"type_info": "Int64"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"ordinal": 1,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "address",
|
||||
"ordinal": 2,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "open_hours",
|
||||
"ordinal": 3,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "icon",
|
||||
"ordinal": 4,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "description",
|
||||
"ordinal": 5,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "longitude: f64",
|
||||
"ordinal": 6,
|
||||
"type_info": "Float"
|
||||
},
|
||||
{
|
||||
"name": "latitude: f64",
|
||||
"ordinal": 7,
|
||||
"type_info": "Float"
|
||||
}
|
||||
],
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
],
|
||||
"parameters": {
|
||||
"Right": 0
|
||||
}
|
||||
},
|
||||
"query": "SELECT id, name, address, open_hours, icon, description,longitude as \"longitude: f64\", latitude as \"latitude: f64\" FROM places WHERE active = TRUE"
|
||||
}
|
||||
}
|
||||
20
src/cli.rs
20
src/cli.rs
|
|
@ -1,20 +0,0 @@
|
|||
//! Cli Parameters
|
||||
|
||||
use clap::{Parser, Subcommand};
|
||||
|
||||
/// Server for saving places in a map
|
||||
#[derive(Parser)]
|
||||
#[command(version, about, long_about = None)]
|
||||
pub struct CliArgs {
|
||||
/// Application mode
|
||||
#[command(subcommand)]
|
||||
pub mode: Mode,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum Mode {
|
||||
/// Spins up the server
|
||||
Server,
|
||||
/// Fires up a TUI
|
||||
Tui,
|
||||
}
|
||||
26
src/db.rs
26
src/db.rs
|
|
@ -1,26 +0,0 @@
|
|||
//! Database handling.
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use sqlx::SqlitePool;
|
||||
|
||||
/// Creates a Database Pool
|
||||
///
|
||||
/// # Errors
|
||||
/// This function may return an error if the `DATABASE_URL` environment is not defined or if the
|
||||
/// database that URL points to is not reachable for some reason.
|
||||
pub async fn pool() -> Result<SqlitePool> {
|
||||
let db_url = std::env::var("DATABASE_URL").context("DATABASE_URL not defined")?;
|
||||
let pool = SqlitePool::connect(&db_url)
|
||||
.await
|
||||
.context("Couldn't connect to database")?;
|
||||
Ok(pool)
|
||||
}
|
||||
|
||||
/// Run migrations on the database `pool` is connected to.
|
||||
pub async fn run_migrations(pool: &SqlitePool) -> Result<()> {
|
||||
sqlx::migrate!()
|
||||
.run(pool)
|
||||
.await
|
||||
.context("Couldn't run migrations")?;
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
//! Service logging.
|
||||
|
||||
use anyhow::Result;
|
||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||
|
||||
/// Setups logging.
|
||||
///
|
||||
/// # Errors
|
||||
/// This function can return an error if called repeatedly or if logging/tracing was already setup
|
||||
/// by another means.
|
||||
pub fn setup() -> Result<()> {
|
||||
tracing_subscriber::registry()
|
||||
.with(
|
||||
tracing_subscriber::EnvFilter::try_from_default_env()
|
||||
.unwrap_or_else(|_| "huellas=debug".into()),
|
||||
)
|
||||
.with(tracing_subscriber::fmt::layer())
|
||||
.try_init()?;
|
||||
Ok(())
|
||||
}
|
||||
54
src/main.rs
54
src/main.rs
|
|
@ -1,43 +1,13 @@
|
|||
use anyhow::Result;
|
||||
use clap::Parser;
|
||||
|
||||
mod cli;
|
||||
mod db;
|
||||
mod logging;
|
||||
mod places;
|
||||
mod server;
|
||||
mod tui;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
dotenvy::dotenv().unwrap_or_default();
|
||||
logging::setup()?;
|
||||
|
||||
let args = cli::CliArgs::parse();
|
||||
|
||||
match args.mode {
|
||||
cli::Mode::Server => server_mode().await?,
|
||||
cli::Mode::Tui => tui_mode().await?,
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn server_mode() -> Result<()> {
|
||||
let pool = db::pool().await?;
|
||||
db::run_migrations(&pool).await?;
|
||||
|
||||
let places_repository = places::db_repository::DbPlacesRepository::new(pool);
|
||||
let places_routes = places::routes::places_routes(places_repository);
|
||||
|
||||
server::serve(places_routes).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn tui_mode() -> Result<()> {
|
||||
let pool = db::pool().await?;
|
||||
let places_repository = places::db_repository::DbPlacesRepository::new(pool);
|
||||
|
||||
tui::tui(places_repository).await?;
|
||||
Ok(())
|
||||
#[macro_use]
|
||||
extern crate rocket;
|
||||
use rocket::fs::FileServer;
|
||||
|
||||
mod place;
|
||||
mod routes;
|
||||
|
||||
#[launch]
|
||||
fn rocket() -> _ {
|
||||
rocket::build()
|
||||
.mount("/", FileServer::from("static"))
|
||||
.attach(routes::stage())
|
||||
}
|
||||
|
|
|
|||
14
src/place.rs
Normal file
14
src/place.rs
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
use rocket::serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub struct Place {
|
||||
pub id: Option<i64>,
|
||||
pub name: String,
|
||||
pub address: String,
|
||||
pub open_hours: String,
|
||||
pub icon: String,
|
||||
pub description: String,
|
||||
pub longitude: f64,
|
||||
pub latitude: f64,
|
||||
}
|
||||
|
|
@ -1,320 +0,0 @@
|
|||
//! `PlacesRepository` that is backed by a DB.
|
||||
|
||||
use futures::TryStreamExt;
|
||||
use sqlx::SqlitePool;
|
||||
|
||||
use crate::places::models::PlaceInsert;
|
||||
|
||||
use super::models::Place;
|
||||
use super::repository::{PlacesError, PlacesRepository};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DbPlacesRepository {
|
||||
db_pool: SqlitePool,
|
||||
}
|
||||
|
||||
impl DbPlacesRepository {
|
||||
pub fn new(db_pool: SqlitePool) -> Self {
|
||||
Self { db_pool }
|
||||
}
|
||||
}
|
||||
|
||||
impl PlacesRepository for DbPlacesRepository {
|
||||
async fn get_places(&self) -> Result<Vec<super::models::Place>, PlacesError> {
|
||||
sqlx::query!(
|
||||
r#"SELECT id, name, address, open_hours, icon, description, url,
|
||||
longitude as "longitude: f64", latitude as "latitude: f64"
|
||||
FROM places
|
||||
WHERE active = TRUE"#
|
||||
)
|
||||
.fetch(&self.db_pool)
|
||||
.map_ok(|p| Place {
|
||||
id: p.id,
|
||||
name: p.name,
|
||||
address: p.address,
|
||||
open_hours: p.open_hours,
|
||||
icon: p.icon,
|
||||
description: p.description,
|
||||
latitude: p.latitude,
|
||||
longitude: p.longitude,
|
||||
url: p.url,
|
||||
})
|
||||
.try_collect::<Vec<_>>()
|
||||
.await
|
||||
.map_err(|err| PlacesError::FailToGet(err.to_string()))
|
||||
}
|
||||
|
||||
async fn get_places_paginated(
|
||||
&self,
|
||||
offset: u32,
|
||||
limit: u8,
|
||||
) -> Result<Vec<super::models::Place>, PlacesError> {
|
||||
sqlx::query!(
|
||||
r#"SELECT id, name, address, open_hours, icon, description, url,
|
||||
longitude as "longitude: f64", latitude as "latitude: f64"
|
||||
FROM places
|
||||
WHERE active = TRUE
|
||||
ORDER BY id
|
||||
LIMIT ?
|
||||
OFFSET ?"#,
|
||||
limit,
|
||||
offset,
|
||||
)
|
||||
.fetch(&self.db_pool)
|
||||
.map_ok(|p| Place {
|
||||
id: p.id,
|
||||
name: p.name,
|
||||
address: p.address,
|
||||
open_hours: p.open_hours,
|
||||
icon: p.icon,
|
||||
description: p.description,
|
||||
latitude: p.latitude,
|
||||
longitude: p.longitude,
|
||||
url: p.url,
|
||||
})
|
||||
.try_collect::<Vec<_>>()
|
||||
.await
|
||||
.map_err(|err| PlacesError::FailToGet(err.to_string()))
|
||||
}
|
||||
|
||||
async fn insert_place(&self, place: PlaceInsert) -> Result<Place, PlacesError> {
|
||||
let id = sqlx::query_scalar!(
|
||||
r#"INSERT INTO places
|
||||
(name, address, open_hours, icon, description, longitude, latitude, url)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
RETURNING id"#,
|
||||
place.name,
|
||||
place.address,
|
||||
place.open_hours,
|
||||
place.icon,
|
||||
place.description,
|
||||
place.longitude,
|
||||
place.latitude,
|
||||
place.url
|
||||
)
|
||||
.fetch_one(&self.db_pool)
|
||||
.await
|
||||
.map_err(|err| PlacesError::FailToUpsert(err.to_string()))?;
|
||||
|
||||
Ok((place, id).into())
|
||||
}
|
||||
|
||||
async fn update_place(&self, place: Place) -> Result<Place, PlacesError> {
|
||||
let result = sqlx::query!(
|
||||
r#"UPDATE places
|
||||
SET (name, address, open_hours, icon, description, longitude, latitude, url)
|
||||
= (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
WHERE id = ?"#,
|
||||
place.name,
|
||||
place.address,
|
||||
place.open_hours,
|
||||
place.icon,
|
||||
place.description,
|
||||
place.longitude,
|
||||
place.latitude,
|
||||
place.url,
|
||||
place.id,
|
||||
)
|
||||
.execute(&self.db_pool)
|
||||
.await
|
||||
.map_err(|err| PlacesError::FailToUpsert(err.to_string()))?;
|
||||
|
||||
if result.rows_affected() == 1 {
|
||||
Ok(place)
|
||||
} else {
|
||||
Err(PlacesError::NotFound(place.id))
|
||||
}
|
||||
}
|
||||
|
||||
async fn delete_place(&self, id: i64) -> Result<(), PlacesError> {
|
||||
let result = ::sqlx::query!("UPDATE places SET active = FALSE WHERE id = ?", id)
|
||||
.execute(&self.db_pool)
|
||||
.await
|
||||
.map_err(|err| PlacesError::FailToDelete(err.to_string()))?;
|
||||
|
||||
if result.rows_affected() == 1 {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(PlacesError::NotFound(id))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod tests {
|
||||
#![cfg(test)]
|
||||
use super::DbPlacesRepository;
|
||||
use crate::places::models::PlaceInsert;
|
||||
use crate::places::repository::{PlacesError, PlacesRepository};
|
||||
|
||||
use anyhow::Result;
|
||||
use futures::future::try_join_all;
|
||||
use sqlx::sqlite::SqlitePool;
|
||||
|
||||
#[sqlx::test]
|
||||
async fn test_add_place(pool: SqlitePool) -> Result<()> {
|
||||
let repository = DbPlacesRepository::new(pool);
|
||||
let place = PlaceInsert {
|
||||
name: "Sherlock Holmes".to_owned(),
|
||||
address: "221 B Baker Street, London".to_owned(),
|
||||
description: "Museum and Gift Shop".to_owned(),
|
||||
icon: "museum".to_owned(),
|
||||
latitude: 51.5237669,
|
||||
longitude: -0.1627829,
|
||||
open_hours: "Tu-Su 09:30-18:00".to_owned(),
|
||||
url: Some("https://www.sherlock-holmes.co.uk/".to_owned()),
|
||||
};
|
||||
// Insert the place
|
||||
let res_place = repository.insert_place(place.clone()).await?;
|
||||
let (res_place, _) = res_place.into();
|
||||
// And now they should be equal
|
||||
assert_eq!(place, res_place);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn test_get_places(pool: SqlitePool) -> Result<()> {
|
||||
let repository = DbPlacesRepository::new(pool);
|
||||
let places = vec![
|
||||
PlaceInsert {
|
||||
name: "Sherlock Holmes".to_owned(),
|
||||
address: "221 B Baker Street, London".to_owned(),
|
||||
description: "Museum and Gift Shop".to_owned(),
|
||||
icon: "museum".to_owned(),
|
||||
latitude: 51.5237669,
|
||||
longitude: -0.1627829,
|
||||
open_hours: "Tu-Su 09:30-18:00".to_owned(),
|
||||
url: Some("https://www.sherlock-holmes.co.uk/".to_owned()),
|
||||
},
|
||||
PlaceInsert {
|
||||
name: "Museo Nacional de Historia Natural".to_owned(),
|
||||
address: "Parque Quinta Normal S/N, Santiago".to_owned(),
|
||||
description: "Museo".to_owned(),
|
||||
icon: "museum".to_owned(),
|
||||
latitude: -70.681838888889,
|
||||
longitude: -33.4421694444449,
|
||||
open_hours: "Tu-Su 10:00-18:00".to_owned(),
|
||||
url: Some("https://www.mnhn.gob.cl/".to_owned()),
|
||||
},
|
||||
];
|
||||
// insert the places
|
||||
for place in &places {
|
||||
let _res_place = repository.insert_place(place.clone()).await?;
|
||||
}
|
||||
// and fetch them
|
||||
let mut res_places = repository.get_places().await?;
|
||||
// and they should be equal
|
||||
res_places.sort_by(|a, b| a.id.cmp(&b.id));
|
||||
let res_places = res_places
|
||||
.into_iter()
|
||||
.map(|p| {
|
||||
let (p, _id): (PlaceInsert, i64) = p.into();
|
||||
p
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(places, res_places);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn test_delete(pool: SqlitePool) -> Result<()> {
|
||||
let repository = DbPlacesRepository::new(pool);
|
||||
let places = vec![
|
||||
PlaceInsert {
|
||||
name: "Sherlock Holmes".to_owned(),
|
||||
address: "221 B Baker Street, London".to_owned(),
|
||||
description: "Museum and Gift Shop".to_owned(),
|
||||
icon: "museum".to_owned(),
|
||||
latitude: 51.5237669,
|
||||
longitude: -0.1627829,
|
||||
open_hours: "Tu-Su 09:30-18:00".to_owned(),
|
||||
url: Some("https://www.sherlock-holmes.co.uk/".to_owned()),
|
||||
},
|
||||
PlaceInsert {
|
||||
name: "Museo Nacional de Historia Natural".to_owned(),
|
||||
address: "Parque Quinta Normal S/N, Santiago".to_owned(),
|
||||
description: "Museo".to_owned(),
|
||||
icon: "museum".to_owned(),
|
||||
latitude: -70.681838888889,
|
||||
longitude: -33.4421694444449,
|
||||
open_hours: "Tu-Su 10:00-18:00".to_owned(),
|
||||
url: Some("https://www.mnhn.gob.cl/".to_owned()),
|
||||
},
|
||||
];
|
||||
// insert the places
|
||||
let ids = try_join_all(places.iter().map(|place| async {
|
||||
let res_place = repository.insert_place(place.clone()).await?;
|
||||
Ok::<_, PlacesError>(res_place.id)
|
||||
}))
|
||||
.await?;
|
||||
// delete the first one
|
||||
repository.delete_place(ids[0]).await?;
|
||||
|
||||
// fetch the remaining places
|
||||
let res_places = repository.get_places().await?;
|
||||
let res_places = res_places
|
||||
.into_iter()
|
||||
.map(|p| {
|
||||
let (p, _id) = p.into();
|
||||
p
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
// we should only get the second place
|
||||
assert_eq!(&places[1..], res_places.as_slice());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn test_delete_not_existing(pool: SqlitePool) -> Result<()> {
|
||||
let repository = DbPlacesRepository::new(pool);
|
||||
// Try to delete a non-existing place
|
||||
let res = repository.delete_place(33).await;
|
||||
assert!(res.is_err_and(|err| err == PlacesError::NotFound(33)));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[sqlx::test]
|
||||
async fn test_update(pool: SqlitePool) -> Result<()> {
|
||||
let repository = DbPlacesRepository::new(pool);
|
||||
let places = vec![
|
||||
PlaceInsert {
|
||||
name: "Sherlock Holmes".to_owned(),
|
||||
address: "221 B Baker Street, London".to_owned(),
|
||||
description: "Museum and Gift Shop".to_owned(),
|
||||
icon: "museum".to_owned(),
|
||||
latitude: 51.5237669,
|
||||
longitude: -0.1627829,
|
||||
open_hours: "Tu-Su 09:30-18:00".to_owned(),
|
||||
url: Some("https://www.sherlock-holmes.co.uk/".to_owned()),
|
||||
},
|
||||
PlaceInsert {
|
||||
name: "Museo Nacional de Historia Natural".to_owned(),
|
||||
address: "Parque Quinta Normal S/N, Santiago".to_owned(),
|
||||
description: "Museo".to_owned(),
|
||||
icon: "museum".to_owned(),
|
||||
latitude: -70.681838888889,
|
||||
longitude: -33.4421694444449,
|
||||
open_hours: "Tu-Su 10:00-18:00".to_owned(),
|
||||
url: Some("https://www.mnhn.gob.cl/".to_owned()),
|
||||
},
|
||||
];
|
||||
// insert original place
|
||||
let res = repository.insert_place(places[0].clone()).await?;
|
||||
// Add the returned ID to the new place so we can do the update
|
||||
let place = (places[1].clone(), res.id).into();
|
||||
// update the place
|
||||
let _res = repository.update_place(place).await?;
|
||||
|
||||
// fetch the places
|
||||
let res_places = repository.get_places().await?;
|
||||
let res_places = res_places
|
||||
.into_iter()
|
||||
.map(|p| {
|
||||
let (p, _id) = p.into();
|
||||
p
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
// we should get the updated place
|
||||
assert_eq!(&places[1..], res_places.as_slice());
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
pub mod db_repository;
|
||||
pub mod models;
|
||||
pub mod repository;
|
||||
pub mod routes;
|
||||
|
|
@ -1,119 +0,0 @@
|
|||
/// Models
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Place can be any place of interest we want to mark in a map
|
||||
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||
pub struct Place {
|
||||
pub id: i64,
|
||||
/// Name
|
||||
pub name: String,
|
||||
/// Address
|
||||
pub address: String,
|
||||
/// Opening Hours
|
||||
pub open_hours: String,
|
||||
/// Icon name
|
||||
pub icon: String,
|
||||
/// Description
|
||||
pub description: String,
|
||||
/// Longitude of the place
|
||||
pub longitude: f64,
|
||||
/// latitude of the place
|
||||
pub latitude: f64,
|
||||
/// URL for the place website
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
/// Insert Place payload
|
||||
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||
pub struct PlaceInsert {
|
||||
/// Name
|
||||
pub name: String,
|
||||
/// Address
|
||||
pub address: String,
|
||||
/// Opening Hours
|
||||
pub open_hours: String,
|
||||
/// Icon name
|
||||
pub icon: String,
|
||||
/// Description
|
||||
pub description: String,
|
||||
/// Longitude of the place
|
||||
pub longitude: f64,
|
||||
/// latitude of the place
|
||||
pub latitude: f64,
|
||||
/// URL for the place website
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
/// UpsertPlace payload
|
||||
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||
pub struct PlaceUpsert {
|
||||
pub id: Option<i64>,
|
||||
/// Name
|
||||
pub name: String,
|
||||
/// Address
|
||||
pub address: String,
|
||||
/// Opening Hours
|
||||
pub open_hours: String,
|
||||
/// Icon name
|
||||
pub icon: String,
|
||||
/// Description
|
||||
pub description: String,
|
||||
/// Longitude of the place
|
||||
pub longitude: f64,
|
||||
/// latitude of the place
|
||||
pub latitude: f64,
|
||||
/// URL for the place website
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
impl From<(PlaceInsert, i64)> for Place {
|
||||
fn from((place, id): (PlaceInsert, i64)) -> Self {
|
||||
Self {
|
||||
id,
|
||||
name: place.name,
|
||||
address: place.address,
|
||||
open_hours: place.open_hours,
|
||||
icon: place.icon,
|
||||
description: place.description,
|
||||
longitude: place.longitude,
|
||||
latitude: place.latitude,
|
||||
url: place.url,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PlaceUpsert> for (PlaceInsert, Option<i64>) {
|
||||
fn from(place: PlaceUpsert) -> Self {
|
||||
(
|
||||
PlaceInsert {
|
||||
name: place.name,
|
||||
address: place.address,
|
||||
open_hours: place.open_hours,
|
||||
icon: place.icon,
|
||||
description: place.description,
|
||||
longitude: place.longitude,
|
||||
latitude: place.latitude,
|
||||
url: place.url,
|
||||
},
|
||||
place.id,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Place> for (PlaceInsert, i64) {
|
||||
fn from(place: Place) -> Self {
|
||||
(
|
||||
PlaceInsert {
|
||||
name: place.name,
|
||||
address: place.address,
|
||||
open_hours: place.open_hours,
|
||||
icon: place.icon,
|
||||
description: place.description,
|
||||
longitude: place.longitude,
|
||||
latitude: place.latitude,
|
||||
url: place.url,
|
||||
},
|
||||
place.id,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,129 +0,0 @@
|
|||
//! Places Repository
|
||||
#[cfg(test)]
|
||||
use std::sync::Arc;
|
||||
|
||||
use thiserror::Error;
|
||||
#[cfg(test)]
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use super::models::{Place, PlaceInsert};
|
||||
|
||||
/// Trait to handle Places.
|
||||
pub trait PlacesRepository: Clone + Send + Sync + 'static {
|
||||
/// Get all of the Places
|
||||
fn get_places(&self) -> impl Future<Output = Result<Vec<Place>, PlacesError>> + Send;
|
||||
|
||||
/// Get all of the Places
|
||||
fn get_places_paginated(
|
||||
&self,
|
||||
offset: u32,
|
||||
limit: u8,
|
||||
) -> impl Future<Output = Result<Vec<Place>, PlacesError>> + Send;
|
||||
|
||||
/// Inserts a Place.
|
||||
fn insert_place(
|
||||
&self,
|
||||
place: PlaceInsert,
|
||||
) -> impl Future<Output = Result<Place, PlacesError>> + Send;
|
||||
|
||||
/// Updates a Place.
|
||||
fn update_place(&self, place: Place)
|
||||
-> impl Future<Output = Result<Place, PlacesError>> + Send;
|
||||
|
||||
/// Deletes the place for the given `id`.
|
||||
fn delete_place(&self, id: i64) -> impl Future<Output = Result<(), PlacesError>> + Send;
|
||||
}
|
||||
|
||||
#[derive(Debug, Error, PartialEq)]
|
||||
pub enum PlacesError {
|
||||
#[error("Couldn't retrieve places: {0}")]
|
||||
FailToGet(String),
|
||||
#[error("Couldn't upsert place: {0}")]
|
||||
FailToUpsert(String),
|
||||
#[error("Couldn't delete place: {0}")]
|
||||
FailToDelete(String),
|
||||
#[error("Place with id {0} not found")]
|
||||
NotFound(i64),
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[derive(Clone)]
|
||||
pub struct MockPlacesRepository {
|
||||
get_places_count: Arc<RwLock<usize>>,
|
||||
get_places_paginated_count: Arc<RwLock<usize>>,
|
||||
insert_place_count: Arc<RwLock<usize>>,
|
||||
update_place_count: Arc<RwLock<usize>>,
|
||||
delete_place_count: Arc<RwLock<usize>>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl MockPlacesRepository {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
get_places_count: Arc::new(RwLock::new(0)),
|
||||
get_places_paginated_count: Arc::new(RwLock::new(0)),
|
||||
insert_place_count: Arc::new(RwLock::new(0)),
|
||||
update_place_count: Arc::new(RwLock::new(0)),
|
||||
delete_place_count: Arc::new(RwLock::new(0)),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_places_count(&self) -> usize {
|
||||
*self.get_places_count.read().await
|
||||
}
|
||||
|
||||
#[expect(dead_code)]
|
||||
pub async fn get_places_paginated_count(&self) -> usize {
|
||||
*self.get_places_paginated_count.read().await
|
||||
}
|
||||
|
||||
pub async fn insert_place_count(&self) -> usize {
|
||||
*self.insert_place_count.read().await
|
||||
}
|
||||
|
||||
pub async fn update_place_count(&self) -> usize {
|
||||
*self.update_place_count.read().await
|
||||
}
|
||||
|
||||
pub async fn delete_place_count(&self) -> usize {
|
||||
*self.delete_place_count.read().await
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl PlacesRepository for MockPlacesRepository {
|
||||
async fn get_places(&self) -> Result<Vec<Place>, PlacesError> {
|
||||
let mut get_places_count = self.get_places_count.write().await;
|
||||
*get_places_count += 1;
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
async fn get_places_paginated(
|
||||
&self,
|
||||
_offset: u32,
|
||||
_limit: u8,
|
||||
) -> Result<Vec<Place>, PlacesError> {
|
||||
let mut get_places_paginated_count = self.get_places_paginated_count.write().await;
|
||||
*get_places_paginated_count += 1;
|
||||
Ok(Vec::new())
|
||||
}
|
||||
|
||||
async fn insert_place(&self, place: super::models::PlaceInsert) -> Result<Place, PlacesError> {
|
||||
let mut insert_place_count = self.insert_place_count.write().await;
|
||||
*insert_place_count += 1;
|
||||
let place: Place = (place, 0).into();
|
||||
Ok(place)
|
||||
}
|
||||
|
||||
async fn update_place(&self, place: Place) -> Result<Place, PlacesError> {
|
||||
let mut update_place_count = self.update_place_count.write().await;
|
||||
*update_place_count += 1;
|
||||
Ok(place)
|
||||
}
|
||||
|
||||
async fn delete_place(&self, _id: i64) -> Result<(), PlacesError> {
|
||||
let mut delete_place_count = self.delete_place_count.write().await;
|
||||
*delete_place_count += 1;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
@ -1,145 +0,0 @@
|
|||
use axum::Router;
|
||||
use axum::extract::{Path, State};
|
||||
use axum::http::StatusCode;
|
||||
use axum::routing::{delete, get, put};
|
||||
use axum_msgpack::MsgPack;
|
||||
|
||||
use super::models::{Place, PlaceUpsert};
|
||||
use super::repository::{PlacesError, PlacesRepository};
|
||||
|
||||
type Result<T, E = (StatusCode, String)> = std::result::Result<T, E>;
|
||||
|
||||
fn internal_error(err: PlacesError) -> (StatusCode, String) {
|
||||
match err {
|
||||
PlacesError::FailToGet(_) | PlacesError::FailToUpsert(_) | PlacesError::FailToDelete(_) => {
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, err.to_string())
|
||||
}
|
||||
PlacesError::NotFound(_) => (StatusCode::NOT_FOUND, err.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_places<PR: PlacesRepository>(
|
||||
State(repository): State<PR>,
|
||||
) -> Result<MsgPack<Vec<Place>>> {
|
||||
let places = repository.get_places().await.map_err(internal_error)?;
|
||||
Ok(MsgPack(places))
|
||||
}
|
||||
|
||||
async fn upsert_place<PR: PlacesRepository>(
|
||||
State(repository): State<PR>,
|
||||
MsgPack(place): MsgPack<PlaceUpsert>,
|
||||
) -> Result<MsgPack<Place>> {
|
||||
let place = match place.into() {
|
||||
(place, Some(id)) => repository.update_place((place, id).into()).await,
|
||||
(place, None) => repository.insert_place(place).await,
|
||||
}
|
||||
.map_err(internal_error)?;
|
||||
Ok(MsgPack(place))
|
||||
}
|
||||
|
||||
async fn delete_place<PR: PlacesRepository>(
|
||||
State(repository): State<PR>,
|
||||
Path(id): Path<i64>,
|
||||
) -> Result<()> {
|
||||
repository.delete_place(id).await.map_err(internal_error)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn places_routes<PR: PlacesRepository>(repository: PR) -> Router {
|
||||
Router::new()
|
||||
.route("/", get(get_places::<PR>))
|
||||
.route("/", put(upsert_place::<PR>))
|
||||
.route("/{id}", delete(delete_place::<PR>))
|
||||
.with_state(repository)
|
||||
}
|
||||
|
||||
mod tests {
|
||||
#![cfg(test)]
|
||||
use super::places_routes;
|
||||
use crate::places::models::{Place, PlaceUpsert};
|
||||
use crate::places::repository::MockPlacesRepository;
|
||||
|
||||
use anyhow::Result;
|
||||
use axum::Router;
|
||||
use axum::http::StatusCode;
|
||||
use axum_test::TestServer;
|
||||
|
||||
fn setup_server() -> Result<(TestServer, MockPlacesRepository)> {
|
||||
let places_repository = MockPlacesRepository::new();
|
||||
let router = Router::new().nest("/places", places_routes(places_repository.clone()));
|
||||
Ok((TestServer::new(router)?, places_repository))
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_add_place() -> Result<()> {
|
||||
let (server, mock_repository) = setup_server()?;
|
||||
let place = PlaceUpsert {
|
||||
id: None,
|
||||
name: "Sherlock Holmes".to_owned(),
|
||||
address: "221 B Baker Street, London".to_owned(),
|
||||
description: "Museum and Gift Shop".to_owned(),
|
||||
icon: "museum".to_owned(),
|
||||
latitude: 51.5237669,
|
||||
longitude: -0.1627829,
|
||||
open_hours: "Tu-Su 09:30-18:00".to_owned(),
|
||||
url: Some("https://www.sherlock-holmes.co.uk/".to_owned()),
|
||||
};
|
||||
// Insert the place
|
||||
let res = server.put("/places").msgpack(&place).await;
|
||||
// We should get a success on the request
|
||||
assert_eq!(res.status_code(), StatusCode::OK);
|
||||
let _res_place: Place = res.msgpack();
|
||||
// The correct function should be called
|
||||
assert_eq!(mock_repository.insert_place_count().await, 1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_places() -> Result<()> {
|
||||
let (server, mock_repository) = setup_server()?;
|
||||
// Get the places
|
||||
let res = server.get("/places").await;
|
||||
// We should get a success on the request
|
||||
assert_eq!(res.status_code(), StatusCode::OK);
|
||||
let _res_places: Vec<Place> = res.msgpack();
|
||||
// and the correct function should be called
|
||||
assert_eq!(mock_repository.get_places_count().await, 1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_delete() -> Result<()> {
|
||||
let (server, mock_repository) = setup_server()?;
|
||||
// Call delete
|
||||
let res = server.delete("/places/0").await;
|
||||
// We should get a success on the request
|
||||
assert_eq!(res.status_code(), StatusCode::OK);
|
||||
// The correct function should be called
|
||||
assert_eq!(mock_repository.delete_place_count().await, 1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_update() -> Result<()> {
|
||||
let (server, mock_repository) = setup_server()?;
|
||||
let places = PlaceUpsert {
|
||||
id: Some(1),
|
||||
name: "Sherlock Holmes".to_owned(),
|
||||
address: "221 B Baker Street, London".to_owned(),
|
||||
description: "Museum and Gift Shop".to_owned(),
|
||||
icon: "museum".to_owned(),
|
||||
latitude: 51.5237669,
|
||||
longitude: -0.1627829,
|
||||
open_hours: "Tu-Su 09:30-18:00".to_owned(),
|
||||
url: Some("https://www.sherlock-holmes.co.uk/".to_owned()),
|
||||
};
|
||||
// upsert the place
|
||||
let res = server.put("/places").msgpack(&places).await;
|
||||
// We should get a success on the request
|
||||
assert_eq!(res.status_code(), StatusCode::OK);
|
||||
let _res_place: Place = res.msgpack();
|
||||
// The correct function should be called
|
||||
assert_eq!(mock_repository.update_place_count().await, 1);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
131
src/routes.rs
Normal file
131
src/routes.rs
Normal file
|
|
@ -0,0 +1,131 @@
|
|||
use rocket::fairing::{self, AdHoc};
|
||||
use rocket::response::status::{Accepted, Created, NotFound};
|
||||
use rocket::serde::json::Json;
|
||||
use rocket::{Build, Rocket};
|
||||
use rocket_db_pools::{Connection, Database};
|
||||
|
||||
use rocket::futures::stream::TryStreamExt;
|
||||
|
||||
use crate::place::Place;
|
||||
type Result<T, E = rocket::response::Debug<sqlx::Error>> = std::result::Result<T, E>;
|
||||
|
||||
#[derive(Database)]
|
||||
#[database("db")]
|
||||
struct Db(rocket_db_pools::sqlx::SqlitePool);
|
||||
|
||||
#[get("/places")]
|
||||
async fn get_places(mut db: Connection<Db>) -> Result<Json<Vec<Place>>> {
|
||||
let places = rocket_db_pools::sqlx::query!(
|
||||
"SELECT id, name, address, open_hours, icon, description," +
|
||||
r#"longitude as "longitude: f64", latitude as "latitude: f64" FROM places WHERE active = TRUE"#
|
||||
)
|
||||
.fetch(&mut *db)
|
||||
.map_ok(|p| Place {
|
||||
id: Some(p.id),
|
||||
name: p.name,
|
||||
address: p.address,
|
||||
open_hours: p.open_hours,
|
||||
icon: p.icon,
|
||||
description: p.description,
|
||||
latitude: p.latitude,
|
||||
longitude: p.longitude,
|
||||
})
|
||||
.try_collect::<Vec<_>>()
|
||||
.await?;
|
||||
|
||||
Ok(Json(places))
|
||||
}
|
||||
|
||||
#[derive(Debug, Responder)]
|
||||
enum UpsertResponse {
|
||||
Created(Created<Json<Place>>),
|
||||
Accepted(Accepted<Json<Place>>),
|
||||
NotFound(NotFound<Json<Place>>),
|
||||
}
|
||||
|
||||
#[put("/places", format = "json", data = "<place>")]
|
||||
async fn upsert_place(db: Connection<Db>, place: Json<Place>) -> Result<UpsertResponse> {
|
||||
if place.id.is_some() {
|
||||
update_place(db, place).await
|
||||
} else {
|
||||
insert_place(db, place).await
|
||||
}
|
||||
}
|
||||
|
||||
struct Id {
|
||||
id: i64,
|
||||
}
|
||||
|
||||
async fn insert_place(mut db: Connection<Db>, mut place: Json<Place>) -> Result<UpsertResponse> {
|
||||
let i = ::sqlx::query_as!(
|
||||
Id,
|
||||
"INSERT INTO places (name, address, open_hours, icon, description, longitude, latitude)\
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)\
|
||||
RETURNING id",
|
||||
place.name,
|
||||
place.address,
|
||||
place.open_hours,
|
||||
place.icon,
|
||||
place.description,
|
||||
place.longitude,
|
||||
place.latitude
|
||||
)
|
||||
.fetch_one(&mut *db)
|
||||
.await?;
|
||||
|
||||
place.id = Some(i.id);
|
||||
Ok(UpsertResponse::Created(Created::new("/places").body(place)))
|
||||
}
|
||||
|
||||
async fn update_place(mut db: Connection<Db>, place: Json<Place>) -> Result<UpsertResponse> {
|
||||
let result = ::sqlx::query!(
|
||||
"UPDATE places SET (name, address, open_hours, icon, description, longitude, latitude) = (?, ?, ?, ?, ?, ?, ?) WHERE id = ?",
|
||||
place.name,
|
||||
place.address,
|
||||
place.open_hours,
|
||||
place.icon,
|
||||
place.description,
|
||||
place.longitude,
|
||||
place.latitude,
|
||||
place.id,
|
||||
)
|
||||
.execute(&mut *db)
|
||||
.await?;
|
||||
|
||||
if result.rows_affected() == 1 {
|
||||
Ok(UpsertResponse::Accepted(Accepted(Some(place))))
|
||||
} else {
|
||||
Ok(UpsertResponse::NotFound(NotFound(place)))
|
||||
}
|
||||
}
|
||||
|
||||
#[delete("/places/<id>")]
|
||||
async fn delete_place(mut db: Connection<Db>, id: i64) -> Result<Option<()>> {
|
||||
let result = ::sqlx::query!("UPDATE places SET active = FALSE WHERE id = ?", id)
|
||||
.execute(&mut *db)
|
||||
.await?;
|
||||
|
||||
Ok((result.rows_affected() == 1).then(|| ()))
|
||||
}
|
||||
|
||||
async fn run_migrations(rocket: Rocket<Build>) -> fairing::Result {
|
||||
match Db::fetch(&rocket) {
|
||||
Some(db) => match ::sqlx::migrate!("./migrations").run(&**db).await {
|
||||
Ok(_) => Ok(rocket),
|
||||
Err(e) => {
|
||||
error!("Failed to initialize SQLx database: {}", e);
|
||||
Err(rocket)
|
||||
}
|
||||
},
|
||||
None => Err(rocket),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stage() -> AdHoc {
|
||||
AdHoc::on_ignite("SQLx Stage", |rocket| async {
|
||||
rocket
|
||||
.attach(Db::init())
|
||||
.attach(AdHoc::try_on_ignite("SQLx Migrations", run_migrations))
|
||||
.mount("/", routes![upsert_place, get_places, delete_place])
|
||||
})
|
||||
}
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
//! HTTP Server
|
||||
|
||||
use anyhow::Result;
|
||||
use axum::Router;
|
||||
use axum::serve::ListenerExt;
|
||||
use std::net::SocketAddr;
|
||||
use tower_http::services::ServeDir;
|
||||
|
||||
pub async fn serve(place_routes: Router) -> Result<()> {
|
||||
let port = std::env::var("PORT").unwrap_or_default();
|
||||
let port = str::parse(&port).unwrap_or(3000);
|
||||
let address = SocketAddr::from(([0, 0, 0, 0], port));
|
||||
|
||||
let routes = Router::new()
|
||||
.nest("/places", place_routes)
|
||||
.fallback_service(ServeDir::new("static"));
|
||||
|
||||
tracing::debug!("listening on {}", address);
|
||||
let listener = tokio::net::TcpListener::bind(address)
|
||||
.await?
|
||||
.tap_io(|tcp_stream| {
|
||||
if let Err(err) = tcp_stream.set_nodelay(true) {
|
||||
tracing::trace!("failed to set TCP_NODELAY on incoming connection: {err:#}");
|
||||
}
|
||||
});
|
||||
axum::serve(listener, routes.into_make_service())
|
||||
.with_graceful_shutdown(shutdown_signal())
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn shutdown_signal() {
|
||||
tokio::signal::ctrl_c()
|
||||
.await
|
||||
.expect("failed to listen for ctrl-c");
|
||||
tracing::debug!("Received shutdown signal");
|
||||
}
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
//! Keyboard handling
|
||||
|
||||
use crossterm::event::{KeyCode, KeyEvent, KeyModifiers};
|
||||
|
||||
use super::state::{Mode, State};
|
||||
|
||||
/// Event handling
|
||||
pub async fn handle_key(state: &mut State, key_event: KeyEvent) {
|
||||
if state.confirmation.is_some() {
|
||||
match key_event.code {
|
||||
KeyCode::Char('y') => state.proceed_confirmation().await,
|
||||
KeyCode::Char('n') | KeyCode::Esc => state.cancel_confirmation(),
|
||||
_ => {}
|
||||
};
|
||||
return;
|
||||
}
|
||||
match state.mode {
|
||||
Mode::List => match key_event.code {
|
||||
KeyCode::Char('d') => state.confirm_deletion(),
|
||||
KeyCode::Char('e') => {
|
||||
state.set_edit_mode();
|
||||
}
|
||||
KeyCode::Home => state.selected_place.select_first(),
|
||||
KeyCode::End => state.selected_place.select_last(),
|
||||
KeyCode::PageUp => state.prev_page(),
|
||||
KeyCode::PageDown => state.next_page(),
|
||||
KeyCode::Up | KeyCode::Char('k') => state.selected_place.select_previous(),
|
||||
KeyCode::Down | KeyCode::Char('j') => state.selected_place.select_next(),
|
||||
KeyCode::Esc | KeyCode::Char('q') => state.quit = true,
|
||||
_ => {}
|
||||
},
|
||||
Mode::Edit => match (key_event.modifiers, key_event.code) {
|
||||
(_, KeyCode::Esc) => state.set_list_mode(),
|
||||
(_, KeyCode::Tab) => state.edit_next(),
|
||||
|
||||
(_, KeyCode::BackTab) => state.edit_prev(),
|
||||
(KeyModifiers::CONTROL, KeyCode::Char('s')) => state.start_save(),
|
||||
_ => state.edit_input(key_event),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
//! TUI
|
||||
|
||||
pub mod keys;
|
||||
pub mod state;
|
||||
pub mod terminal;
|
||||
pub mod ui;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use state::State;
|
||||
use terminal::Event;
|
||||
use ui::UI;
|
||||
|
||||
use crate::places::db_repository::DbPlacesRepository;
|
||||
|
||||
/// Fires up the UI
|
||||
pub async fn tui(places_repository: DbPlacesRepository) -> Result<()> {
|
||||
let (_, terminal_height) = crossterm::terminal::size()?;
|
||||
let mut state = State::new(places_repository, terminal_height);
|
||||
let mut ui = UI::new(&state);
|
||||
let mut tui = terminal::Tui::new()?;
|
||||
|
||||
let result = loop {
|
||||
match tui.next().await? {
|
||||
Event::Key(key_event) => keys::handle_key(&mut state, key_event).await,
|
||||
Event::Render => {
|
||||
let messages = state.ui_messages.drain(0..).collect::<Vec<_>>();
|
||||
ui.handle_messages(&mut state, messages);
|
||||
tui.draw(|frame| ui.draw(&mut state, frame))?;
|
||||
}
|
||||
Event::Tick => {
|
||||
state.fetch_places().await;
|
||||
}
|
||||
Event::Resize(_, h) => state.height = h,
|
||||
Event::Quit => state.quit = true,
|
||||
_ => {}
|
||||
}
|
||||
if state.quit {
|
||||
break Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
tui.stop()?;
|
||||
|
||||
result
|
||||
}
|
||||
184
src/tui/state.rs
184
src/tui/state.rs
|
|
@ -1,184 +0,0 @@
|
|||
//! TUI state
|
||||
|
||||
use ratatui::crossterm::event::KeyEvent;
|
||||
use ratatui::widgets::TableState;
|
||||
|
||||
use crate::places::db_repository::DbPlacesRepository;
|
||||
use crate::places::models::Place;
|
||||
use crate::places::repository::PlacesRepository;
|
||||
|
||||
use super::ui::Message;
|
||||
|
||||
pub struct State {
|
||||
pub height: u16,
|
||||
pub page: u32,
|
||||
pub mode: Mode,
|
||||
places_repository: DbPlacesRepository,
|
||||
pub places: Vec<Place>,
|
||||
places_status: DataStatus,
|
||||
pub confirmation: Option<ConfirmationStatus>,
|
||||
pub selected_place: TableState,
|
||||
pub ui_messages: Vec<Message>,
|
||||
pub quit: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum Mode {
|
||||
List,
|
||||
Edit,
|
||||
}
|
||||
|
||||
enum DataStatus {
|
||||
Fresh,
|
||||
Old,
|
||||
}
|
||||
|
||||
pub enum ConfirmationStatus {
|
||||
Deletion(i64),
|
||||
Save(Place),
|
||||
}
|
||||
|
||||
impl State {
|
||||
pub fn new(places_repository: DbPlacesRepository, height: u16) -> Self {
|
||||
Self {
|
||||
height,
|
||||
page: 0,
|
||||
mode: Mode::List,
|
||||
places_repository,
|
||||
places_status: DataStatus::Old,
|
||||
places: vec![],
|
||||
selected_place: TableState::default(),
|
||||
confirmation: None,
|
||||
ui_messages: Vec::new(),
|
||||
quit: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_list_mode(&mut self) {
|
||||
self.mode = Mode::List;
|
||||
self.push_mode_change();
|
||||
}
|
||||
|
||||
pub fn set_edit_mode(&mut self) {
|
||||
self.mode = Mode::Edit;
|
||||
self.push_mode_change();
|
||||
|
||||
let Some(selection) = self.selected_place.selected() else {
|
||||
return;
|
||||
};
|
||||
let Some(place) = self.places.get(selection) else {
|
||||
return;
|
||||
};
|
||||
self.ui_messages.push(Message::EditPlace(place.clone()))
|
||||
}
|
||||
|
||||
fn push_mode_change(&mut self) {
|
||||
self.ui_messages.push(Message::UpdateAppMode(self.mode));
|
||||
}
|
||||
|
||||
pub fn next_page(&mut self) {
|
||||
self.page += 1;
|
||||
self.places_status = DataStatus::Old;
|
||||
self.push_page_change();
|
||||
}
|
||||
|
||||
pub fn prev_page(&mut self) {
|
||||
self.page = self.page.saturating_sub(1);
|
||||
self.places_status = DataStatus::Old;
|
||||
self.push_page_change();
|
||||
}
|
||||
|
||||
fn push_page_change(&mut self) {
|
||||
self.ui_messages.push(Message::UpdatePage(self.page));
|
||||
}
|
||||
|
||||
pub fn edit_next(&mut self) {
|
||||
self.ui_messages.push(Message::EditNext);
|
||||
}
|
||||
|
||||
pub fn edit_prev(&mut self) {
|
||||
self.ui_messages.push(Message::EditPrev);
|
||||
}
|
||||
|
||||
pub fn edit_input(&mut self, key_event: KeyEvent) {
|
||||
self.ui_messages.push(Message::Input(key_event));
|
||||
}
|
||||
|
||||
pub async fn fetch_places(&mut self) {
|
||||
if let DataStatus::Fresh = self.places_status {
|
||||
return;
|
||||
}
|
||||
let limit = (self.height as u8).saturating_sub(3);
|
||||
let offset = (limit as u32) * self.page;
|
||||
match self
|
||||
.places_repository
|
||||
.get_places_paginated(offset, limit)
|
||||
.await
|
||||
{
|
||||
Ok(places) => {
|
||||
self.places = places;
|
||||
if !self.places.is_empty() {
|
||||
self.selected_place.select(Some(0));
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::error!("{err}");
|
||||
}
|
||||
}
|
||||
self.places_status = DataStatus::Fresh;
|
||||
self.ui_messages
|
||||
.push(Message::UpdatePlaces(self.places.clone()))
|
||||
}
|
||||
|
||||
pub fn confirm_deletion(&mut self) {
|
||||
if let Some(Some(id)) = self
|
||||
.selected_place
|
||||
.selected()
|
||||
.map(|index| self.places.get(index).map(|p| p.id))
|
||||
{
|
||||
self.confirmation = Some(ConfirmationStatus::Deletion(id))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start_save(&mut self) {
|
||||
if let Some(Some(id)) = self
|
||||
.selected_place
|
||||
.selected()
|
||||
.map(|index| self.places.get(index).map(|p| p.id))
|
||||
{
|
||||
self.ui_messages.push(Message::SavePlace(id));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn confirm_save(&mut self, place: Place) {
|
||||
self.confirmation = Some(ConfirmationStatus::Save(place));
|
||||
}
|
||||
|
||||
pub fn cancel_confirmation(&mut self) {
|
||||
self.confirmation = None;
|
||||
}
|
||||
|
||||
pub async fn proceed_confirmation(&mut self) {
|
||||
let Some(confirmation) = &self.confirmation else {
|
||||
return;
|
||||
};
|
||||
|
||||
match confirmation {
|
||||
ConfirmationStatus::Deletion(id) => {
|
||||
if let Err(err) = self.places_repository.delete_place(*id).await {
|
||||
tracing::error!("{err}");
|
||||
}
|
||||
}
|
||||
ConfirmationStatus::Save(place) => {
|
||||
if let Err(err) = self.places_repository.update_place(place.clone()).await {
|
||||
tracing::error!("{err}");
|
||||
}
|
||||
self.mode = Mode::List;
|
||||
self.push_mode_change();
|
||||
}
|
||||
}
|
||||
|
||||
self.confirmation = None;
|
||||
self.places_status = DataStatus::Old;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,189 +0,0 @@
|
|||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
use anyhow::{Result, anyhow};
|
||||
use crossterm::event::{Event as CrosstermEvent, KeyEvent, KeyEventKind, MouseEvent};
|
||||
use crossterm::terminal::{EnterAlternateScreen, LeaveAlternateScreen};
|
||||
use futures::{FutureExt, StreamExt};
|
||||
use ratatui::Terminal;
|
||||
use ratatui::prelude::CrosstermBackend;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::sync::mpsc::{UnboundedReceiver, unbounded_channel};
|
||||
use tokio::task::JoinHandle;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
/// Terminal events.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub enum Event {
|
||||
Init,
|
||||
Quit,
|
||||
Error,
|
||||
Closed,
|
||||
/// Triggers background actions
|
||||
Tick,
|
||||
/// UI Render
|
||||
Render,
|
||||
FocusGained,
|
||||
FocusLost,
|
||||
Paste(String),
|
||||
/// Key Press
|
||||
Key(KeyEvent),
|
||||
Mouse(MouseEvent),
|
||||
/// Terminal Resize
|
||||
Resize(u16, u16),
|
||||
}
|
||||
|
||||
/// Terminal event handler.
|
||||
pub struct Tui {
|
||||
/// Terminal backend
|
||||
pub terminal: ratatui::Terminal<CrosstermBackend<std::io::Stderr>>,
|
||||
/// Event handler task.
|
||||
pub task: JoinHandle<Result<()>>,
|
||||
/// Cancelation token
|
||||
pub cancellation_token: CancellationToken,
|
||||
/// Event receiver channel.
|
||||
receiver: UnboundedReceiver<Event>,
|
||||
}
|
||||
|
||||
impl Tui {
|
||||
pub fn new() -> Result<Self> {
|
||||
let tick_rate = 4.0;
|
||||
let frame_rate = 20.0;
|
||||
let terminal = Terminal::new(CrosstermBackend::new(std::io::stderr()))?;
|
||||
let cancellation_token = CancellationToken::new();
|
||||
|
||||
// setup the event handling task
|
||||
let (sender, receiver) = unbounded_channel();
|
||||
let task = {
|
||||
let cancellation_token = cancellation_token.clone();
|
||||
let sender = sender.clone();
|
||||
let tick_delay = std::time::Duration::from_secs_f64(1.0 / tick_rate);
|
||||
let render_delay = std::time::Duration::from_secs_f64(1.0 / frame_rate);
|
||||
tokio::spawn(async move {
|
||||
let mut reader = crossterm::event::EventStream::new();
|
||||
let mut tick_interval = tokio::time::interval(tick_delay);
|
||||
let mut render_interval = tokio::time::interval(render_delay);
|
||||
sender.send(Event::Init)?;
|
||||
loop {
|
||||
let tick_delay = tick_interval.tick();
|
||||
let render_delay = render_interval.tick();
|
||||
let crossterm_event = reader.next().fuse();
|
||||
tokio::select! {
|
||||
_ = cancellation_token.cancelled() => {
|
||||
break;
|
||||
}
|
||||
maybe_event = crossterm_event => {
|
||||
match maybe_event {
|
||||
Some(Ok(ev)) => {
|
||||
match ev {
|
||||
CrosstermEvent::Key(key) => {
|
||||
if key.kind == KeyEventKind::Press {
|
||||
sender.send(Event::Key(key))?
|
||||
}
|
||||
},
|
||||
CrosstermEvent::Mouse(mouse) => {
|
||||
sender.send(Event::Mouse(mouse))?
|
||||
},
|
||||
CrosstermEvent::Resize(x, y)=> {
|
||||
sender.send(Event::Resize(x,y))?
|
||||
},
|
||||
CrosstermEvent::FocusLost => sender.send(Event::FocusLost)?,
|
||||
CrosstermEvent::FocusGained => sender.send(Event::FocusGained)?,
|
||||
CrosstermEvent::Paste(s) => sender.send(Event::Paste(s))?,
|
||||
}
|
||||
},
|
||||
Some(Err(_)) => sender.send(Event::Error)?,
|
||||
None => {}
|
||||
}
|
||||
},
|
||||
_ = tick_delay => {
|
||||
sender.send(Event::Tick)?;
|
||||
},
|
||||
_ = render_delay => {
|
||||
sender.send(Event::Render)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
};
|
||||
|
||||
// Setup the terminal
|
||||
crossterm::terminal::enable_raw_mode()?;
|
||||
crossterm::execute!(
|
||||
std::io::stderr(),
|
||||
EnterAlternateScreen,
|
||||
crossterm::cursor::Hide
|
||||
)?;
|
||||
|
||||
Ok(Self {
|
||||
terminal,
|
||||
cancellation_token,
|
||||
task,
|
||||
receiver,
|
||||
})
|
||||
}
|
||||
|
||||
fn cancel(&self) {
|
||||
self.cancellation_token.cancel();
|
||||
}
|
||||
|
||||
pub fn stop(&self) -> Result<()> {
|
||||
self.cancel();
|
||||
let mut counter = 0;
|
||||
while !self.task.is_finished() {
|
||||
std::thread::sleep(std::time::Duration::from_millis(10));
|
||||
counter += 1;
|
||||
if counter > 5 {
|
||||
self.task.abort();
|
||||
}
|
||||
if counter > 10 {
|
||||
return Err(anyhow!(
|
||||
"Failed to abort task in 100 milliseconds for unknown reason"
|
||||
));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn exit(&mut self) -> Result<()> {
|
||||
self.stop()?;
|
||||
if crossterm::terminal::is_raw_mode_enabled()? {
|
||||
self.flush()?;
|
||||
crossterm::execute!(
|
||||
std::io::stderr(),
|
||||
LeaveAlternateScreen,
|
||||
crossterm::cursor::Show
|
||||
)?;
|
||||
crossterm::terminal::disable_raw_mode()?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Receive the next event from the handler task.
|
||||
pub async fn next(&mut self) -> Result<Event> {
|
||||
self.receiver
|
||||
.recv()
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("Events channel was closed"))
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Tui {
|
||||
type Target = ratatui::Terminal<CrosstermBackend<std::io::Stderr>>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.terminal
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for Tui {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.terminal
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Tui {
|
||||
fn drop(&mut self) {
|
||||
self.exit().unwrap();
|
||||
}
|
||||
}
|
||||
487
src/tui/ui.rs
487
src/tui/ui.rs
|
|
@ -1,487 +0,0 @@
|
|||
//! UI definition and drawing
|
||||
|
||||
use itertools::Itertools;
|
||||
use ratatui::Frame;
|
||||
use ratatui::crossterm::event::{KeyCode, KeyEvent};
|
||||
use ratatui::layout::{Constraint, Direction, Flex, Layout, Rect};
|
||||
use ratatui::style::{Color, Modifier, Style, Stylize};
|
||||
use ratatui::text::{Line, Span, Text, ToSpan};
|
||||
use ratatui::widgets::{Block, Borders, Clear, Padding, Paragraph, Row, Table};
|
||||
use tui_textarea::TextArea;
|
||||
|
||||
use crate::places::models::Place;
|
||||
|
||||
use super::state::{ConfirmationStatus, Mode, State};
|
||||
|
||||
pub enum Message {
|
||||
UpdateAppMode(Mode),
|
||||
UpdatePage(u32),
|
||||
UpdatePlaces(Vec<Place>),
|
||||
EditPlace(Place),
|
||||
EditNext,
|
||||
EditPrev,
|
||||
SavePlace(i64),
|
||||
Input(KeyEvent),
|
||||
}
|
||||
|
||||
pub struct UI {
|
||||
header: Header,
|
||||
main: Main,
|
||||
footer: Footer,
|
||||
}
|
||||
|
||||
impl UI {
|
||||
pub fn new(state: &State) -> Self {
|
||||
Self {
|
||||
header: Header::new(state),
|
||||
main: Main::new(state),
|
||||
footer: Footer::new(state),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn handle_messages<M: IntoIterator<Item = Message>>(
|
||||
&mut self,
|
||||
state: &mut State,
|
||||
messages: M,
|
||||
) {
|
||||
for m in messages {
|
||||
match m {
|
||||
Message::UpdateAppMode(mode) => {
|
||||
self.header.update_app_mode(mode);
|
||||
self.footer.update_keybindings(mode);
|
||||
}
|
||||
Message::UpdatePage(page) => self.header.update_page(page),
|
||||
Message::UpdatePlaces(places) => self.main.update_places_table(places),
|
||||
Message::EditPlace(place) => self.main.set_edit_textareas(place),
|
||||
Message::EditNext => self.main.next_textarea(),
|
||||
Message::EditPrev => self.main.prev_textarea(),
|
||||
Message::SavePlace(id) => self.main.save_place(state, id),
|
||||
Message::Input(key_event) => self.main.pass_input(key_event),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// UI drawing
|
||||
pub fn draw(&mut self, state: &mut State, f: &mut Frame<'_>) {
|
||||
let main_split = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints([
|
||||
Constraint::Length(1),
|
||||
Constraint::Fill(1),
|
||||
Constraint::Length(1),
|
||||
])
|
||||
.split(f.area());
|
||||
|
||||
self.header.draw(f, main_split[0]);
|
||||
self.main.draw(state, f, main_split[1]);
|
||||
self.footer.draw(f, main_split[2]);
|
||||
}
|
||||
}
|
||||
|
||||
struct Header {
|
||||
app_name: Span<'static>,
|
||||
app_mode: Span<'static>,
|
||||
page: Line<'static>,
|
||||
}
|
||||
|
||||
impl Header {
|
||||
fn new(state: &State) -> Self {
|
||||
let app_name = Span::styled(
|
||||
" huellas ",
|
||||
Style::new().bg(Color::Gray).fg(Color::Black).bold(),
|
||||
);
|
||||
let app_mode = Self::get_app_mode(state.mode);
|
||||
let page = Self::get_page(state.page);
|
||||
Self {
|
||||
app_name,
|
||||
app_mode,
|
||||
page,
|
||||
}
|
||||
}
|
||||
|
||||
fn update_app_mode(&mut self, new_mode: Mode) {
|
||||
self.app_mode = Self::get_app_mode(new_mode)
|
||||
}
|
||||
|
||||
fn get_app_mode(mode: Mode) -> Span<'static> {
|
||||
match mode {
|
||||
Mode::List => " LIST ".to_span().black().on_light_green().bold(),
|
||||
Mode::Edit => " EDIT ".to_span().black().on_light_blue().bold(),
|
||||
}
|
||||
}
|
||||
|
||||
fn update_page(&mut self, new_page: u32) {
|
||||
self.page = Self::get_page(new_page)
|
||||
}
|
||||
|
||||
fn get_page(page: u32) -> Line<'static> {
|
||||
Line::from_iter([
|
||||
"Page: ".to_span(),
|
||||
Span::raw(page.to_string()),
|
||||
" ".to_span(),
|
||||
])
|
||||
.right_aligned()
|
||||
}
|
||||
|
||||
fn draw(&self, f: &mut Frame<'_>, area: Rect) {
|
||||
let split = Layout::default()
|
||||
.direction(Direction::Horizontal)
|
||||
.constraints([
|
||||
Constraint::Length(9),
|
||||
Constraint::Fill(1),
|
||||
Constraint::Length(6),
|
||||
])
|
||||
.split(area);
|
||||
f.render_widget(&self.app_name, split[0]);
|
||||
f.render_widget(&self.page, split[1]);
|
||||
f.render_widget(&self.app_mode, split[2]);
|
||||
}
|
||||
}
|
||||
|
||||
struct Main {
|
||||
places_table: Table<'static>,
|
||||
edit_textareas: Vec<TextArea<'static>>,
|
||||
selected_textarea: usize,
|
||||
}
|
||||
|
||||
impl Main {
|
||||
fn new(state: &State) -> Self {
|
||||
let places_table = Self::get_places_table(state.places.clone());
|
||||
let edit_textareas = Vec::new();
|
||||
let selected_textarea = 0;
|
||||
Self {
|
||||
places_table,
|
||||
edit_textareas,
|
||||
selected_textarea,
|
||||
}
|
||||
}
|
||||
|
||||
fn update_places_table(&mut self, new_places: Vec<Place>) {
|
||||
self.places_table = Self::get_places_table(new_places);
|
||||
}
|
||||
|
||||
fn get_places_table(places: Vec<Place>) -> Table<'static> {
|
||||
let places = places.into_iter().map(|p| {
|
||||
Row::new([
|
||||
p.id.to_string(),
|
||||
p.name,
|
||||
p.latitude.to_string(),
|
||||
p.longitude.to_string(),
|
||||
p.icon,
|
||||
p.address,
|
||||
p.open_hours,
|
||||
p.description,
|
||||
url(p.url),
|
||||
])
|
||||
});
|
||||
|
||||
let widths = [
|
||||
Constraint::Length(3),
|
||||
Constraint::Fill(1),
|
||||
Constraint::Length(7),
|
||||
Constraint::Length(7),
|
||||
Constraint::Length(8),
|
||||
Constraint::Fill(1),
|
||||
Constraint::Fill(1),
|
||||
Constraint::Fill(1),
|
||||
Constraint::Fill(1),
|
||||
];
|
||||
|
||||
Table::new(places, widths)
|
||||
.header(
|
||||
Row::new([
|
||||
"Id",
|
||||
"Name",
|
||||
"Lat",
|
||||
"Long",
|
||||
"Icon",
|
||||
"Address",
|
||||
"Open Hours",
|
||||
"Description",
|
||||
"URL",
|
||||
])
|
||||
.style(Style::new().white().on_dark_gray().bold()),
|
||||
)
|
||||
.row_highlight_style(Style::new().reversed())
|
||||
}
|
||||
|
||||
fn set_edit_textareas(&mut self, place: Place) {
|
||||
let mut name = TextArea::new(vec![place.name]);
|
||||
name.set_block(Block::default().title("Name"));
|
||||
let mut latitude = TextArea::new(vec![place.latitude.to_string()]);
|
||||
latitude.set_block(Block::default().title("Latitude"));
|
||||
let mut longitude = TextArea::new(vec![place.longitude.to_string()]);
|
||||
longitude.set_block(Block::default().title("Longitude"));
|
||||
let mut icon = TextArea::new(vec![place.icon]);
|
||||
icon.set_block(Block::default().title("Icon"));
|
||||
let mut address = TextArea::new(vec![place.address]);
|
||||
address.set_block(Block::default().title("Address"));
|
||||
let mut url = TextArea::new(vec![place.url.unwrap_or_default()]);
|
||||
url.set_block(Block::default().title("URL"));
|
||||
let mut open_hours = TextArea::new(vec![place.open_hours]);
|
||||
open_hours.set_block(Block::default().title("Open Hours"));
|
||||
let mut description = TextArea::new(vec![place.description]);
|
||||
description.set_block(Block::default().title("Description"));
|
||||
|
||||
self.edit_textareas = vec![
|
||||
name,
|
||||
latitude,
|
||||
longitude,
|
||||
icon,
|
||||
address,
|
||||
url,
|
||||
open_hours,
|
||||
description,
|
||||
];
|
||||
|
||||
for textarea in &mut self.edit_textareas {
|
||||
inactive_textarea(textarea);
|
||||
}
|
||||
|
||||
active_textarea(&mut self.edit_textareas[0]);
|
||||
self.selected_textarea = 0;
|
||||
}
|
||||
|
||||
fn next_textarea(&mut self) {
|
||||
let n = self.edit_textareas.len();
|
||||
if n != 0 {
|
||||
if let Some(prev_textarea) = self.edit_textareas.get_mut(self.selected_textarea) {
|
||||
inactive_textarea(prev_textarea);
|
||||
}
|
||||
self.selected_textarea = (self.selected_textarea + 1) % n;
|
||||
if let Some(next_textarea) = self.edit_textareas.get_mut(self.selected_textarea) {
|
||||
active_textarea(next_textarea);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn prev_textarea(&mut self) {
|
||||
let n = self.edit_textareas.len();
|
||||
if n != 0 {
|
||||
if let Some(prev_textarea) = self.edit_textareas.get_mut(self.selected_textarea) {
|
||||
inactive_textarea(prev_textarea);
|
||||
}
|
||||
self.selected_textarea = (self.selected_textarea + n - 1) % n;
|
||||
if let Some(next_textarea) = self.edit_textareas.get_mut(self.selected_textarea) {
|
||||
active_textarea(next_textarea);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn save_place(&self, state: &mut State, id: i64) {
|
||||
let name = self.edit_textareas[0].lines().concat();
|
||||
let Ok(latitude) = self.edit_textareas[1].lines().concat().parse::<f64>() else {
|
||||
return;
|
||||
};
|
||||
let Ok(longitude) = self.edit_textareas[2].lines().concat().parse::<f64>() else {
|
||||
return;
|
||||
};
|
||||
let icon = self.edit_textareas[3].lines().concat();
|
||||
let address = self.edit_textareas[4].lines().concat();
|
||||
let url = self.edit_textareas[5].lines().concat();
|
||||
let url = if url.is_empty() { None } else { Some(url) };
|
||||
let open_hours = self.edit_textareas[6].lines().concat();
|
||||
let description = self.edit_textareas[7].lines().concat();
|
||||
let place = Place {
|
||||
id,
|
||||
name,
|
||||
address,
|
||||
open_hours,
|
||||
icon,
|
||||
description,
|
||||
longitude,
|
||||
latitude,
|
||||
url,
|
||||
};
|
||||
state.confirm_save(place);
|
||||
}
|
||||
|
||||
fn pass_input(&mut self, key_event: KeyEvent) {
|
||||
let Some(active_textarea) = self.edit_textareas.get_mut(self.selected_textarea) else {
|
||||
return;
|
||||
};
|
||||
match key_event.code {
|
||||
KeyCode::Enter => {
|
||||
// Only allow line breaking on open hours and description fields
|
||||
if self.selected_textarea == 6 || self.selected_textarea == 7 {
|
||||
active_textarea.input(key_event);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
active_textarea.input(key_event);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn draw(&self, state: &mut State, f: &mut Frame<'_>, area: Rect) {
|
||||
match state.mode {
|
||||
Mode::List => self.list_draw(state, f, area),
|
||||
Mode::Edit => self.edit_draw(state, f, area),
|
||||
}
|
||||
confirmation_dialog_draw(state, f, area);
|
||||
}
|
||||
|
||||
fn list_draw(&self, state: &mut State, f: &mut Frame<'_>, area: Rect) {
|
||||
f.render_stateful_widget(&self.places_table, area, &mut state.selected_place);
|
||||
}
|
||||
|
||||
fn edit_draw(&self, _state: &mut State, f: &mut Frame<'_>, area: Rect) {
|
||||
let areas: [_; 8] = Layout::default()
|
||||
.direction(Direction::Vertical)
|
||||
.constraints([
|
||||
Constraint::Length(3),
|
||||
Constraint::Length(3),
|
||||
Constraint::Length(3),
|
||||
Constraint::Length(3),
|
||||
Constraint::Length(3),
|
||||
Constraint::Length(3),
|
||||
Constraint::Min(5),
|
||||
Constraint::Min(5),
|
||||
])
|
||||
.areas(area);
|
||||
for (textarea, area) in self.edit_textareas.iter().zip(areas.into_iter()) {
|
||||
f.render_widget(textarea, area);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn confirmation_dialog_draw(state: &mut State, f: &mut Frame<'_>, area: Rect) {
|
||||
let Some(confirmation) = &state.confirmation else {
|
||||
return;
|
||||
};
|
||||
|
||||
let dialog_area = center(area, Constraint::Percentage(80), Constraint::Percentage(60));
|
||||
let (action, id) = match confirmation {
|
||||
ConfirmationStatus::Deletion(id) => ("delete", id),
|
||||
ConfirmationStatus::Save(place) => ("save", &place.id),
|
||||
};
|
||||
let confirmation_dialog = Paragraph::new(Text::from_iter([
|
||||
Line::from_iter([
|
||||
"Do you want to ".to_span(),
|
||||
action.to_span(),
|
||||
" place with id: ".to_span(),
|
||||
id.to_span(),
|
||||
]),
|
||||
Line::from(""),
|
||||
Line::from("Y/N".to_span().bold()),
|
||||
]))
|
||||
.centered()
|
||||
.block(Block::bordered().padding(Padding::uniform(1)));
|
||||
|
||||
f.render_widget(Clear, dialog_area);
|
||||
f.render_widget(confirmation_dialog, dialog_area);
|
||||
}
|
||||
|
||||
struct Footer {
|
||||
keybindings: Paragraph<'static>,
|
||||
}
|
||||
|
||||
impl Footer {
|
||||
fn new(state: &State) -> Self {
|
||||
let keybindings = Self::get_keybindings(state.mode);
|
||||
Self { keybindings }
|
||||
}
|
||||
|
||||
fn update_keybindings(&mut self, new_mode: Mode) {
|
||||
self.keybindings = Self::get_keybindings(new_mode)
|
||||
}
|
||||
|
||||
#[expect(unstable_name_collisions)]
|
||||
fn get_keybindings(mode: Mode) -> Paragraph<'static> {
|
||||
let separator = Span::styled(" ", Style::new().black().on_black());
|
||||
match mode {
|
||||
Mode::List => {
|
||||
let keybindings = [
|
||||
("j/k", "Next/Previous"),
|
||||
("Home/End", "First/Last"),
|
||||
("PgUp", "Prev Page"),
|
||||
("PgDown", "Next Page"),
|
||||
("e", "Edit"),
|
||||
("d", "Delete"),
|
||||
]
|
||||
.map(|(key, action)| keybinding(key, action).to_vec())
|
||||
.into_iter()
|
||||
.intersperse(vec![separator])
|
||||
.flatten();
|
||||
Paragraph::new(Line::from_iter(keybindings))
|
||||
}
|
||||
Mode::Edit => {
|
||||
let keybindings = [
|
||||
("Esc", "Close w/o saving"),
|
||||
("Tab/S-Tab", "Next/prev field"),
|
||||
("C-s", "Save"),
|
||||
]
|
||||
.map(|(key, action)| keybinding(key, action).to_vec())
|
||||
.into_iter()
|
||||
.intersperse(vec![separator])
|
||||
.flatten();
|
||||
Paragraph::new(Line::from_iter(keybindings))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn draw(&self, f: &mut Frame<'_>, area: Rect) {
|
||||
f.render_widget(&self.keybindings, area);
|
||||
}
|
||||
}
|
||||
|
||||
fn url(url: Option<String>) -> String {
|
||||
match url {
|
||||
Some(url) => {
|
||||
if url.starts_with("https://instagram.com/") {
|
||||
format!(
|
||||
"@{}",
|
||||
url.trim_start_matches("https://instagram.com/")
|
||||
.trim_end_matches("/")
|
||||
)
|
||||
} else {
|
||||
url
|
||||
}
|
||||
}
|
||||
None => String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn keybinding(key: &'static str, action: &'static str) -> [Span<'static>; 5] {
|
||||
let black_bold = Style::new().black().on_gray().bold();
|
||||
let red_bold = Style::new().red().on_gray().bold();
|
||||
let black = Style::new().black().on_gray();
|
||||
[
|
||||
Span::styled(" <", black_bold),
|
||||
Span::styled(key, red_bold),
|
||||
Span::styled("> ", black_bold),
|
||||
Span::styled(action, black),
|
||||
Span::styled(" ", black),
|
||||
]
|
||||
}
|
||||
|
||||
fn center(area: Rect, horizontal: Constraint, vertical: Constraint) -> Rect {
|
||||
let [area] = Layout::horizontal([horizontal])
|
||||
.flex(Flex::Center)
|
||||
.areas(area);
|
||||
let [area] = Layout::vertical([vertical]).flex(Flex::Center).areas(area);
|
||||
area
|
||||
}
|
||||
|
||||
fn inactive_textarea(textarea: &mut TextArea<'_>) {
|
||||
textarea.set_cursor_line_style(Style::default());
|
||||
textarea.set_cursor_style(Style::default());
|
||||
if let Some(block) = textarea.block().map(|block| {
|
||||
block
|
||||
.clone()
|
||||
.borders(Borders::ALL)
|
||||
.style(Style::default().fg(Color::Gray))
|
||||
}) {
|
||||
textarea.set_block(block);
|
||||
};
|
||||
}
|
||||
|
||||
fn active_textarea(textarea: &mut TextArea<'_>) {
|
||||
textarea.set_cursor_style(Style::default().add_modifier(Modifier::REVERSED));
|
||||
if let Some(block) = textarea
|
||||
.block()
|
||||
.map(|block| block.clone().style(Style::default().bold()))
|
||||
{
|
||||
textarea.set_block(block);
|
||||
};
|
||||
}
|
||||
|
|
@ -17,19 +17,18 @@
|
|||
<script src="https://cdnjs.cloudflare.com/ajax/libs/leaflet-contextmenu/1.4.0/leaflet.contextmenu.min.js"
|
||||
integrity="sha512-8sfQf8cr0KjCeN32YPfjvLU2cMvyY1lhCXTMfpTZ16CvwIzeVQtwtKlxeSqFs/TpXjKhp1Dcv77LQmn1VFaOZg=="
|
||||
crossorigin="anonymous" referrerpolicy="no-referrer"></script>
|
||||
<script crossorigin src="https://unpkg.com/@msgpack/msgpack@3.1.2/dist.umd/msgpack.min.js"
|
||||
integrity="sha512-B9xeVWeBMLLUlFALrj2/h3IY/N7MJSkzBrwIltslJSlfWdPsQsQinFJ3X9PuAsz695c5qy5U0194ZqZTg8H3yg=="
|
||||
crossorigin="anonymous" referrerpolicy="no-referrer"></script>
|
||||
|
||||
<style type="text/css" media="screen">
|
||||
body {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
html, body, #map {
|
||||
height: 100vh;
|
||||
width: 100vw;
|
||||
}
|
||||
|
||||
.leaflet-popup-content h3 {
|
||||
margin-top: 1em;
|
||||
margin-bottom: 0.5em;
|
||||
|
|
@ -39,8 +38,20 @@
|
|||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
dialog {
|
||||
#modal {
|
||||
display: none;
|
||||
position: fixed;
|
||||
z-index: 400;
|
||||
left: 0;
|
||||
top: 0;
|
||||
height: 100vh;
|
||||
width: 100vw;
|
||||
overflow: auto;
|
||||
background-color: rgb(0,0,0);
|
||||
background-color: rgba(0,0,0,0.4);
|
||||
font-family: "Helvetica Neue", Arial, Helvetica, sans-serif;
|
||||
}
|
||||
#modal-form {
|
||||
margin: 15vh auto;
|
||||
color: #333;
|
||||
background-color: white;
|
||||
|
|
@ -49,9 +60,6 @@
|
|||
width: 80vw;
|
||||
border-radius: 12px;
|
||||
}
|
||||
dialog::backdrop {
|
||||
background-color: rgba(0,0,0,0.4);
|
||||
}
|
||||
#close {
|
||||
color: #333;
|
||||
float: right;
|
||||
|
|
@ -83,64 +91,62 @@
|
|||
</head>
|
||||
<body>
|
||||
<div id="map"></div>
|
||||
<dialog id="dialog">
|
||||
<span id="close">×</span>
|
||||
<h1>Título</h1>
|
||||
<form>
|
||||
<p>
|
||||
<label for="id"> Id:</label>
|
||||
<input type="text" id="id" name="id" size="30" readonly>
|
||||
</p>
|
||||
<p>
|
||||
<label for="name"> Longitud:</label>
|
||||
<input type="text" id="long" name="long" size="30" readonly>
|
||||
</p>
|
||||
<p>
|
||||
<label for="name"> Latitud:</label>
|
||||
<input type="text" id="lat" name="lat" size="30" readonly>
|
||||
</p>
|
||||
<p>
|
||||
<label for="name"> Nombre:</label>
|
||||
<input type="text" id="name" name="name" size="30">
|
||||
<p>
|
||||
<label for="address"> Dirección:</label>
|
||||
<input type="text" id="address" name="address" size="30">
|
||||
</p>
|
||||
<p>
|
||||
<label for="open_hours"> Horario:</label>
|
||||
<textarea id="open_hours" name="open_hours"
|
||||
cols="30"></textarea>
|
||||
</p>
|
||||
<p>
|
||||
<label for="icon"> Ícono:</label>
|
||||
<select id="icon" name="icon">
|
||||
<option value="bar">Bar</option>
|
||||
<option value="coffee">Café</option>
|
||||
<option value="cinema">Cine</option>
|
||||
<option value="food">Comida</option>
|
||||
<option value="jazz">Jazz</option>
|
||||
<option value="library">Librería</option>
|
||||
<option value="marker" selected>Marcador</option>
|
||||
<option value="museum">Museo</option>
|
||||
<option value="dining">Restaurant</option>
|
||||
<option value="mask">Teatro</option>
|
||||
<option value="shop">Tienda</option>
|
||||
</select>
|
||||
</p>
|
||||
<p>
|
||||
<label for="url"> URL:</label>
|
||||
<input type="text" id="url" name="url" size="30">
|
||||
</p>
|
||||
<p>
|
||||
<label for="description"> Descripción:</label>
|
||||
<textarea id="description" name="description"
|
||||
cols="30" rows="5"></textarea>
|
||||
</p>
|
||||
<p>
|
||||
<button type="button" id="button">Enviar </button>
|
||||
</p>
|
||||
</form>
|
||||
</dialog>
|
||||
<div id="modal">
|
||||
<div id="modal-form">
|
||||
<span id="close">×</span>
|
||||
<h1>Título</h1>
|
||||
<form>
|
||||
<p>
|
||||
<label for="id"> Id:</label>
|
||||
<input type="text" id="id" name="id" size="30" readonly>
|
||||
</p>
|
||||
<p>
|
||||
<label for="name"> Longitud:</label>
|
||||
<input type="text" id="long" name="long" size="30" readonly>
|
||||
</p>
|
||||
<p>
|
||||
<label for="name"> Latitud:</label>
|
||||
<input type="text" id="lat" name="lat" size="30" readonly>
|
||||
</p>
|
||||
<p>
|
||||
<label for="name"> Nombre:</label>
|
||||
<input type="text" id="name" name="name" size="30">
|
||||
<p>
|
||||
<label for="address"> Dirección:</label>
|
||||
<input type="text" id="address" name="address" size="30">
|
||||
</p>
|
||||
<p>
|
||||
<label for="open_hours"> Horario:</label>
|
||||
<textarea id="open_hours" name="open_hours"
|
||||
cols="30"></textarea>
|
||||
</p>
|
||||
<p>
|
||||
<label for="icon"> Ícono:</label>
|
||||
<select id="icon" name="icon">
|
||||
<option value="bar">Bar</option>
|
||||
<option value="coffee">Café</option>
|
||||
<option value="cinema">Cine</option>
|
||||
<option value="food">Comida</option>
|
||||
<option value="jazz">Jazz</option>
|
||||
<option value="library">Librería</option>
|
||||
<option value="marker" selected>Marcador</option>
|
||||
<option value="museum">Museo</option>
|
||||
<option value="dining">Restaurant</option>
|
||||
<option value="mask">Teatro</option>
|
||||
<option value="shop">Tienda</option>
|
||||
</select>
|
||||
</p>
|
||||
<p>
|
||||
<label for="description"> Descripción:</label>
|
||||
<textarea id="description" name="description"
|
||||
cols="30" rows="5"></textarea>
|
||||
</p>
|
||||
<p>
|
||||
<button type="button" id="button">Enviar </button>
|
||||
</p>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
<script src="client.js" onload="setupMap()"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
build/client.js: client.ts
|
||||
all: client.ts
|
||||
tsc
|
||||
sed -i '1,2d' build/client.js
|
||||
sed -i '1d' build/client.js
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import * as L from 'leaflet-contextmenu';
|
||||
import { Feature, FeatureCollection, Point } from 'geojson';
|
||||
import * as MessagePack from "@msgpack/msgpack";
|
||||
|
||||
interface PlaceModel {
|
||||
id: number | null;
|
||||
|
|
@ -11,12 +10,10 @@ interface PlaceModel {
|
|||
description: string;
|
||||
latitude: number;
|
||||
longitude: number;
|
||||
url: string | null;
|
||||
}
|
||||
|
||||
async function loadPlaces(): Promise<Array<PlaceModel>> {
|
||||
let bytes = await fetch('places').then(response => response.body);
|
||||
return (await MessagePack.decodeAsync(bytes)) as Array<PlaceModel>;
|
||||
return await fetch('places').then(response => response.json());
|
||||
}
|
||||
|
||||
function toFeature(place: PlaceModel): Feature {
|
||||
|
|
@ -27,8 +24,7 @@ function toFeature(place: PlaceModel): Feature {
|
|||
"address": place.address,
|
||||
"open_hours": place.open_hours,
|
||||
"icon": place.icon,
|
||||
"description": place.description,
|
||||
"url": place.url,
|
||||
"description": place.description
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
|
|
@ -72,8 +68,6 @@ function getPlaceFromForm(): PlaceModel {
|
|||
(document.getElementById("lat") as HTMLInputElement).value);
|
||||
const longitude = parseFloat(
|
||||
(document.getElementById("long") as HTMLSelectElement).value);
|
||||
const url =
|
||||
(document.getElementById("url") as HTMLInputElement).value;
|
||||
|
||||
return {
|
||||
id: id,
|
||||
|
|
@ -84,7 +78,6 @@ function getPlaceFromForm(): PlaceModel {
|
|||
description: description,
|
||||
latitude: latitude,
|
||||
longitude: longitude,
|
||||
url: url,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -95,7 +88,6 @@ function clearForm(): void {
|
|||
const longInput = (document.getElementById("long") as HTMLInputElement);
|
||||
const nameInput = (document.getElementById("name") as HTMLInputElement);
|
||||
const addressInput = (document.getElementById("address") as HTMLInputElement);
|
||||
const urlInput = (document.getElementById("url") as HTMLInputElement);
|
||||
const openHoursArea = (document.getElementById("open_hours") as HTMLTextAreaElement);
|
||||
const descriptionArea = (document.getElementById("description") as HTMLTextAreaElement);
|
||||
|
||||
|
|
@ -105,7 +97,6 @@ function clearForm(): void {
|
|||
longInput.value = "";
|
||||
nameInput.value = "";
|
||||
addressInput.value = "";
|
||||
urlInput.value = "";
|
||||
openHoursArea.value = "";
|
||||
descriptionArea.value = "";
|
||||
|
||||
|
|
@ -113,7 +104,7 @@ function clearForm(): void {
|
|||
document.getElementById("button").onclick = null;
|
||||
|
||||
/* Now you see it, now you don't*/
|
||||
(document.getElementById("dialog") as HTMLDialogElement).close();
|
||||
document.getElementById("modal").style.display = "none";
|
||||
}
|
||||
|
||||
async function createPlace(): Promise<void> {
|
||||
|
|
@ -122,12 +113,11 @@ async function createPlace(): Promise<void> {
|
|||
await fetch('places', {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/msgpack',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: MessagePack.encode(newPlace),
|
||||
body: JSON.stringify(newPlace),
|
||||
})
|
||||
.then((response) => response.body)
|
||||
.then((bytes) => MessagePack.decodeAsync(bytes))
|
||||
.then((response) => response.json())
|
||||
.then((place: PlaceModel) => {
|
||||
places.set(
|
||||
toStr({ lat: place.latitude, lng: place.longitude }),
|
||||
|
|
@ -146,12 +136,11 @@ async function editPlace(): Promise<void> {
|
|||
await fetch('places', {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/msgpack',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: MessagePack.encode(newPlace),
|
||||
body: JSON.stringify(newPlace),
|
||||
})
|
||||
.then((response) => response.body)
|
||||
.then((bytes) => MessagePack.decodeAsync(bytes))
|
||||
.then((response) => response.json())
|
||||
.then((place: PlaceModel) => {
|
||||
places.set(
|
||||
toStr({ lat: place.latitude, lng: place.longitude }),
|
||||
|
|
@ -179,27 +168,24 @@ async function getAddressReverse(lat: number, long: number): Promise<string> {
|
|||
return `${address.road} ${address.house_number}, ${address.city}`;
|
||||
}
|
||||
|
||||
function toLink(url: string): string {
|
||||
let content = url;
|
||||
const m = url.match(/https:\/\/instagram\.com\/((?:\w|\.)+)\/?/);
|
||||
if (m) {
|
||||
content = `@${m[1]}`;
|
||||
}
|
||||
return `<a href="${url}" target="_blank">${content}</a>`
|
||||
}
|
||||
|
||||
async function setupMap(): Promise<void> {
|
||||
/* Create/Edit form */
|
||||
const dialog = document.getElementById("dialog") as HTMLDialogElement;
|
||||
const modal = document.getElementById("modal");
|
||||
const closeButton = document.getElementById("close");
|
||||
|
||||
closeButton.onclick = function() {
|
||||
dialog.close();
|
||||
modal.style.display = "none";
|
||||
}
|
||||
|
||||
window.onclick = function(e: Event) {
|
||||
if (e.target == modal) {
|
||||
modal.style.display = "none";
|
||||
}
|
||||
}
|
||||
|
||||
async function openForm(op: Operation, lat: number, long: number): Promise<void> {
|
||||
/* Fill the form for us */
|
||||
const h1 = dialog.getElementsByTagName("h1")[0];
|
||||
const h1 = modal.getElementsByTagName("h1")[0];
|
||||
if (op == Operation.Create) {
|
||||
clearForm()
|
||||
h1.innerText = "Añadir lugar nuevo";
|
||||
|
|
@ -218,7 +204,6 @@ async function setupMap(): Promise<void> {
|
|||
/*Get the form elements*/
|
||||
const idInput = (document.getElementById("id") as HTMLInputElement);
|
||||
const nameInput = (document.getElementById("name") as HTMLInputElement);
|
||||
const urlInput = (document.getElementById("url") as HTMLInputElement);
|
||||
const openHoursArea = (document.getElementById("open_hours") as HTMLTextAreaElement);
|
||||
const iconSelect = (document.getElementById("icon") as HTMLSelectElement);
|
||||
const descriptionArea = (document.getElementById("description") as HTMLTextAreaElement);
|
||||
|
|
@ -227,7 +212,6 @@ async function setupMap(): Promise<void> {
|
|||
idInput.value = place.id.toString();
|
||||
nameInput.value = place.name;
|
||||
addressInput.value = place.address;
|
||||
urlInput.value = place.url;
|
||||
openHoursArea.value = place.open_hours;
|
||||
iconSelect.value = place.icon;
|
||||
descriptionArea.value = place.description;
|
||||
|
|
@ -249,7 +233,7 @@ async function setupMap(): Promise<void> {
|
|||
}
|
||||
|
||||
/* Make it appear */
|
||||
dialog.showModal();
|
||||
modal.style.display = "block";
|
||||
}
|
||||
|
||||
function openCreateForm(e: MapEvent) {
|
||||
|
|
@ -303,22 +287,19 @@ async function setupMap(): Promise<void> {
|
|||
|
||||
function onEachFeature(feature: Feature, layer: L.Layer) {
|
||||
if (feature.properties) {
|
||||
let popupStr = `<h3>${feature.properties.name}</h3>`;
|
||||
let popupStr = "<h3>" + feature.properties.name + "</h3>";
|
||||
popupStr += "<ul>"
|
||||
if (feature.properties.address)
|
||||
popupStr += `<li><b>Dirección:</b>${feature.properties.address}</li>`;
|
||||
popupStr += "<li><b>Dirección:</b> " + feature.properties.address + "</li>";
|
||||
if (feature.properties.open_hours)
|
||||
popupStr += `<li><b>Horario:</b>${feature.properties.open_hours}</li>`;
|
||||
popupStr += "<li><b>Horario:</b> " + feature.properties.open_hours + "</li>";
|
||||
if (feature.properties.description)
|
||||
popupStr += `<li>${feature.properties.description}</li>`;
|
||||
if (feature.properties.url)
|
||||
popupStr += `<li>${toLink(feature.properties.url)}</li>`;
|
||||
popupStr += "<li>" + feature.properties.description + "</li>";
|
||||
|
||||
const lnglat = (feature.geometry as Point).coordinates;
|
||||
const lng = lnglat[0];
|
||||
const lat = lnglat[1];
|
||||
popupStr += `<a href="https://www.google.com/maps/dir//` +
|
||||
`${lat},${lng}/@${lat},${lng},15z" target="_blank">GMaps</a>`
|
||||
popupStr += "<a href=\"https://www.google.com/maps/dir//" +
|
||||
lnglat[1] + "," + lnglat[0] + "/@" + lnglat[1] + "," + lnglat[0] +
|
||||
",15z\" target=\"_blank\">GMaps</a>"
|
||||
popupStr += "</ul>";
|
||||
|
||||
layer.bindPopup(popupStr);
|
||||
|
|
|
|||
174
ts-client/package-lock.json
generated
174
ts-client/package-lock.json
generated
|
|
@ -1,96 +1,82 @@
|
|||
{
|
||||
"name": "ts-client",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"dependencies": {
|
||||
"@msgpack/msgpack": "^3.0.0-beta2",
|
||||
"geojson": "^0.5.0",
|
||||
"leaflet": "^1.9.4",
|
||||
"leaflet-contextmenu": "^1.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/leaflet": "^1.9.8"
|
||||
}
|
||||
},
|
||||
"node_modules/@msgpack/msgpack": {
|
||||
"version": "3.0.0-beta2",
|
||||
"resolved": "https://registry.npmjs.org/@msgpack/msgpack/-/msgpack-3.0.0-beta2.tgz",
|
||||
"integrity": "sha512-y+l1PNV0XDyY8sM3YtuMLK5vE3/hkfId+Do8pLo/OPxfxuFAUwcGz3oiiUuV46/aBpwTzZ+mRWVMtlSKbradhw==",
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/geojson": {
|
||||
"version": "7946.0.10",
|
||||
"resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.10.tgz",
|
||||
"integrity": "sha512-Nmh0K3iWQJzniTuPRcJn5hxXkfB1T1pgB89SBig5PlJQU5yocazeu4jATJlaA0GYFKWMqDdvYemoSnF2pXgLVA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/leaflet": {
|
||||
"version": "1.9.8",
|
||||
"resolved": "https://registry.npmjs.org/@types/leaflet/-/leaflet-1.9.8.tgz",
|
||||
"integrity": "sha512-EXdsL4EhoUtGm2GC2ZYtXn+Fzc6pluVgagvo2VC1RHWToLGlTRwVYoDpqS/7QXa01rmDyBjJk3Catpf60VMkwg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/geojson": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/geojson": {
|
||||
"version": "0.5.0",
|
||||
"resolved": "https://registry.npmjs.org/geojson/-/geojson-0.5.0.tgz",
|
||||
"integrity": "sha512-/Bx5lEn+qRF4TfQ5aLu6NH+UKtvIv7Lhc487y/c8BdludrCTpiWf9wyI0RTyqg49MFefIAvFDuEi5Dfd/zgNxQ==",
|
||||
"engines": {
|
||||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/leaflet": {
|
||||
"version": "1.9.4",
|
||||
"resolved": "https://registry.npmjs.org/leaflet/-/leaflet-1.9.4.tgz",
|
||||
"integrity": "sha512-nxS1ynzJOmOlHp+iL3FyWqK89GtNL8U8rvlMOsQdTTssxZwCXh8N2NB3GDQOL+YR3XnWyZAxwQixURb+FA74PA=="
|
||||
},
|
||||
"node_modules/leaflet-contextmenu": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/leaflet-contextmenu/-/leaflet-contextmenu-1.4.0.tgz",
|
||||
"integrity": "sha512-BXASCmJ5bLkuJGDCpWmvGqhZi5AzeOY0IbQalfkgBcMAMfAOFSvD4y0gIQxF/XzEyLkjXaRiUpibVj4+Cf3tUA=="
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@msgpack/msgpack": {
|
||||
"version": "3.0.0-beta2",
|
||||
"resolved": "https://registry.npmjs.org/@msgpack/msgpack/-/msgpack-3.0.0-beta2.tgz",
|
||||
"integrity": "sha512-y+l1PNV0XDyY8sM3YtuMLK5vE3/hkfId+Do8pLo/OPxfxuFAUwcGz3oiiUuV46/aBpwTzZ+mRWVMtlSKbradhw=="
|
||||
},
|
||||
"@types/geojson": {
|
||||
"version": "7946.0.10",
|
||||
"resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.10.tgz",
|
||||
"integrity": "sha512-Nmh0K3iWQJzniTuPRcJn5hxXkfB1T1pgB89SBig5PlJQU5yocazeu4jATJlaA0GYFKWMqDdvYemoSnF2pXgLVA==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/leaflet": {
|
||||
"version": "1.9.8",
|
||||
"resolved": "https://registry.npmjs.org/@types/leaflet/-/leaflet-1.9.8.tgz",
|
||||
"integrity": "sha512-EXdsL4EhoUtGm2GC2ZYtXn+Fzc6pluVgagvo2VC1RHWToLGlTRwVYoDpqS/7QXa01rmDyBjJk3Catpf60VMkwg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/geojson": "*"
|
||||
}
|
||||
},
|
||||
"geojson": {
|
||||
"version": "0.5.0",
|
||||
"resolved": "https://registry.npmjs.org/geojson/-/geojson-0.5.0.tgz",
|
||||
"integrity": "sha512-/Bx5lEn+qRF4TfQ5aLu6NH+UKtvIv7Lhc487y/c8BdludrCTpiWf9wyI0RTyqg49MFefIAvFDuEi5Dfd/zgNxQ=="
|
||||
},
|
||||
"leaflet": {
|
||||
"version": "1.9.4",
|
||||
"resolved": "https://registry.npmjs.org/leaflet/-/leaflet-1.9.4.tgz",
|
||||
"integrity": "sha512-nxS1ynzJOmOlHp+iL3FyWqK89GtNL8U8rvlMOsQdTTssxZwCXh8N2NB3GDQOL+YR3XnWyZAxwQixURb+FA74PA=="
|
||||
},
|
||||
"leaflet-contextmenu": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/leaflet-contextmenu/-/leaflet-contextmenu-1.4.0.tgz",
|
||||
"integrity": "sha512-BXASCmJ5bLkuJGDCpWmvGqhZi5AzeOY0IbQalfkgBcMAMfAOFSvD4y0gIQxF/XzEyLkjXaRiUpibVj4+Cf3tUA=="
|
||||
}
|
||||
}
|
||||
"name": "ts-client",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"dependencies": {
|
||||
"geojson": "^0.5.0",
|
||||
"leaflet": "^1.8.0",
|
||||
"leaflet-contextmenu": "^1.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/leaflet": "^1.7.11"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/geojson": {
|
||||
"version": "7946.0.10",
|
||||
"resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.10.tgz",
|
||||
"integrity": "sha512-Nmh0K3iWQJzniTuPRcJn5hxXkfB1T1pgB89SBig5PlJQU5yocazeu4jATJlaA0GYFKWMqDdvYemoSnF2pXgLVA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/leaflet": {
|
||||
"version": "1.7.11",
|
||||
"resolved": "https://registry.npmjs.org/@types/leaflet/-/leaflet-1.7.11.tgz",
|
||||
"integrity": "sha512-VwAYom2pfIAf/pLj1VR5aLltd4tOtHyvfaJlNYCoejzP2nu52PrMi1ehsLRMUS+bgafmIIKBV1cMfKeS+uJ0Vg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/geojson": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/geojson": {
|
||||
"version": "0.5.0",
|
||||
"resolved": "https://registry.npmjs.org/geojson/-/geojson-0.5.0.tgz",
|
||||
"integrity": "sha512-/Bx5lEn+qRF4TfQ5aLu6NH+UKtvIv7Lhc487y/c8BdludrCTpiWf9wyI0RTyqg49MFefIAvFDuEi5Dfd/zgNxQ==",
|
||||
"engines": {
|
||||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/leaflet": {
|
||||
"version": "1.8.0",
|
||||
"resolved": "https://registry.npmjs.org/leaflet/-/leaflet-1.8.0.tgz",
|
||||
"integrity": "sha512-gwhMjFCQiYs3x/Sf+d49f10ERXaEFCPr+nVTryhAW8DWbMGqJqt9G4XuIaHmFW08zYvhgdzqXGr8AlW8v8dQkA=="
|
||||
},
|
||||
"node_modules/leaflet-contextmenu": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/leaflet-contextmenu/-/leaflet-contextmenu-1.4.0.tgz",
|
||||
"integrity": "sha512-BXASCmJ5bLkuJGDCpWmvGqhZi5AzeOY0IbQalfkgBcMAMfAOFSvD4y0gIQxF/XzEyLkjXaRiUpibVj4+Cf3tUA=="
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/geojson": {
|
||||
"version": "7946.0.10",
|
||||
"resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.10.tgz",
|
||||
"integrity": "sha512-Nmh0K3iWQJzniTuPRcJn5hxXkfB1T1pgB89SBig5PlJQU5yocazeu4jATJlaA0GYFKWMqDdvYemoSnF2pXgLVA==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/leaflet": {
|
||||
"version": "1.7.11",
|
||||
"resolved": "https://registry.npmjs.org/@types/leaflet/-/leaflet-1.7.11.tgz",
|
||||
"integrity": "sha512-VwAYom2pfIAf/pLj1VR5aLltd4tOtHyvfaJlNYCoejzP2nu52PrMi1ehsLRMUS+bgafmIIKBV1cMfKeS+uJ0Vg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/geojson": "*"
|
||||
}
|
||||
},
|
||||
"geojson": {
|
||||
"version": "0.5.0",
|
||||
"resolved": "https://registry.npmjs.org/geojson/-/geojson-0.5.0.tgz",
|
||||
"integrity": "sha512-/Bx5lEn+qRF4TfQ5aLu6NH+UKtvIv7Lhc487y/c8BdludrCTpiWf9wyI0RTyqg49MFefIAvFDuEi5Dfd/zgNxQ=="
|
||||
},
|
||||
"leaflet": {
|
||||
"version": "1.8.0",
|
||||
"resolved": "https://registry.npmjs.org/leaflet/-/leaflet-1.8.0.tgz",
|
||||
"integrity": "sha512-gwhMjFCQiYs3x/Sf+d49f10ERXaEFCPr+nVTryhAW8DWbMGqJqt9G4XuIaHmFW08zYvhgdzqXGr8AlW8v8dQkA=="
|
||||
},
|
||||
"leaflet-contextmenu": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/leaflet-contextmenu/-/leaflet-contextmenu-1.4.0.tgz",
|
||||
"integrity": "sha512-BXASCmJ5bLkuJGDCpWmvGqhZi5AzeOY0IbQalfkgBcMAMfAOFSvD4y0gIQxF/XzEyLkjXaRiUpibVj4+Cf3tUA=="
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
{
|
||||
"devDependencies": {
|
||||
"@types/leaflet": "^1.9.8"
|
||||
},
|
||||
"dependencies": {
|
||||
"@msgpack/msgpack": "^3.0.0-beta2",
|
||||
"geojson": "^0.5.0",
|
||||
"leaflet": "^1.9.4",
|
||||
"leaflet-contextmenu": "^1.4.0"
|
||||
}
|
||||
"devDependencies": {
|
||||
"@types/leaflet": "^1.7.11"
|
||||
},
|
||||
"dependencies": {
|
||||
"geojson": "^0.5.0",
|
||||
"leaflet": "^1.8.0",
|
||||
"leaflet-contextmenu": "^1.4.0"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@
|
|||
],
|
||||
"types": [
|
||||
"leaflet",
|
||||
// "geojson",
|
||||
"geojson",
|
||||
],
|
||||
"moduleResolution": "node"
|
||||
},
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue