balls
This commit is contained in:
19
.direnv/bin/nix-direnv-reload
Executable file
19
.direnv/bin/nix-direnv-reload
Executable file
@@ -0,0 +1,19 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -e
|
||||||
|
if [[ ! -d "/home/mrfluffy/Documents/projects/rust/whereAmI" ]]; then
|
||||||
|
echo "Cannot find source directory; Did you move it?"
|
||||||
|
echo "(Looking for "/home/mrfluffy/Documents/projects/rust/whereAmI")"
|
||||||
|
echo 'Cannot force reload with this script - use "direnv reload" manually and then try again'
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# rebuild the cache forcefully
|
||||||
|
_nix_direnv_force_reload=1 direnv exec "/home/mrfluffy/Documents/projects/rust/whereAmI" true
|
||||||
|
|
||||||
|
# Update the mtime for .envrc.
|
||||||
|
# This will cause direnv to reload again - but without re-building.
|
||||||
|
touch "/home/mrfluffy/Documents/projects/rust/whereAmI/.envrc"
|
||||||
|
|
||||||
|
# Also update the timestamp of whatever profile_rc we have.
|
||||||
|
# This makes sure that we know we are up to date.
|
||||||
|
touch -r "/home/mrfluffy/Documents/projects/rust/whereAmI/.envrc" "/home/mrfluffy/Documents/projects/rust/whereAmI/.direnv"/*.rc
|
||||||
1
.direnv/flake-inputs/0yj36irhwn225ywy1saz0gf5wr2ciz50-source
Symbolic link
1
.direnv/flake-inputs/0yj36irhwn225ywy1saz0gf5wr2ciz50-source
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
/nix/store/0yj36irhwn225ywy1saz0gf5wr2ciz50-source
|
||||||
1
.direnv/flake-inputs/g1rkrcba88bmgmjc2lrnwcala1w2yblq-source
Symbolic link
1
.direnv/flake-inputs/g1rkrcba88bmgmjc2lrnwcala1w2yblq-source
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
/nix/store/g1rkrcba88bmgmjc2lrnwcala1w2yblq-source
|
||||||
1
.direnv/flake-inputs/p0h1gvdli8k29651567l38qx7sxmkm5w-source
Symbolic link
1
.direnv/flake-inputs/p0h1gvdli8k29651567l38qx7sxmkm5w-source
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
/nix/store/p0h1gvdli8k29651567l38qx7sxmkm5w-source
|
||||||
1
.direnv/flake-inputs/s1ra3mlx2r37qxrm8w9438a3gwaws1mg-source
Symbolic link
1
.direnv/flake-inputs/s1ra3mlx2r37qxrm8w9438a3gwaws1mg-source
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
/nix/store/s1ra3mlx2r37qxrm8w9438a3gwaws1mg-source
|
||||||
1
.direnv/flake-profile-a5d5b61aa8a61b7d9d765e1daf971a9a578f1cfa
Symbolic link
1
.direnv/flake-profile-a5d5b61aa8a61b7d9d765e1daf971a9a578f1cfa
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
/nix/store/43vi36d27viiyg22q566927b7divdx8f-nix-shell-env
|
||||||
2154
.direnv/flake-profile-a5d5b61aa8a61b7d9d765e1daf971a9a578f1cfa.rc
Normal file
2154
.direnv/flake-profile-a5d5b61aa8a61b7d9d765e1daf971a9a578f1cfa.rc
Normal file
File diff suppressed because it is too large
Load Diff
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
/target
|
||||||
1927
Cargo.lock
generated
Normal file
1927
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
15
Cargo.toml
Normal file
15
Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
[package]
|
||||||
|
name = "whereAmI"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
axum = { version = "0.7", features = ["json"] }
|
||||||
|
tokio = { version = "1.0", features = ["full"] }
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
reqwest = { version = "0.12", features = ["json"] }
|
||||||
|
tracing = "0.1"
|
||||||
|
tracing-subscriber = { version = "0.3", features = ["fmt", "env-filter"] }
|
||||||
|
tower-http = { version = "0.5", features = ["add-extension"] } # needed for ConnectInfo
|
||||||
|
chrono = "0.4.42"
|
||||||
|
dotenvy = "0.15"
|
||||||
63
flake.lock
generated
Normal file
63
flake.lock
generated
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"fenix": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs": [
|
||||||
|
"nixpkgs"
|
||||||
|
],
|
||||||
|
"rust-analyzer-src": "rust-analyzer-src"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1767250179,
|
||||||
|
"narHash": "sha256-PnQdWvPZqHp+7yaHWDFX3NYSKaOy0fjkwpR+rIQC7AY=",
|
||||||
|
"rev": "a3eaf682db8800962943a77ab77c0aae966f9825",
|
||||||
|
"revCount": 2511,
|
||||||
|
"type": "tarball",
|
||||||
|
"url": "https://api.flakehub.com/f/pinned/nix-community/fenix/0.1.2511%2Brev-a3eaf682db8800962943a77ab77c0aae966f9825/019b78a8-f9ad-7faf-9a11-350b6ae3fcd9/source.tar.gz"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"type": "tarball",
|
||||||
|
"url": "https://flakehub.com/f/nix-community/fenix/0.1.%2A"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1767640445,
|
||||||
|
"narHash": "sha256-UWYqmD7JFBEDBHWYcqE6s6c77pWdcU/i+bwD6XxMb8A=",
|
||||||
|
"rev": "9f0c42f8bc7151b8e7e5840fb3bd454ad850d8c5",
|
||||||
|
"revCount": 922875,
|
||||||
|
"type": "tarball",
|
||||||
|
"url": "https://api.flakehub.com/f/pinned/NixOS/nixpkgs/0.1.922875%2Brev-9f0c42f8bc7151b8e7e5840fb3bd454ad850d8c5/019b9446-bef1-749d-8068-2eb76ae32808/source.tar.gz"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"type": "tarball",
|
||||||
|
"url": "https://flakehub.com/f/NixOS/nixpkgs/0.1.%2A"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"fenix": "fenix",
|
||||||
|
"nixpkgs": "nixpkgs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"rust-analyzer-src": {
|
||||||
|
"flake": false,
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1767191410,
|
||||||
|
"narHash": "sha256-cCZGjubgDWmstvFkS6eAw2qk2ihgWkycw55u2dtLd70=",
|
||||||
|
"owner": "rust-lang",
|
||||||
|
"repo": "rust-analyzer",
|
||||||
|
"rev": "a9026e6d5068172bf5a0d52a260bb290961d1cb4",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "rust-lang",
|
||||||
|
"ref": "nightly",
|
||||||
|
"repo": "rust-analyzer",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
||||||
103
flake.nix
Normal file
103
flake.nix
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
{
|
||||||
|
description = "A Nix-flake-based Rust development environment with build and run support";
|
||||||
|
|
||||||
|
inputs = {
|
||||||
|
nixpkgs.url = "https://flakehub.com/f/NixOS/nixpkgs/0.1.*"; # unstable
|
||||||
|
|
||||||
|
fenix = {
|
||||||
|
url = "https://flakehub.com/f/nix-community/fenix/0.1.*";
|
||||||
|
inputs.nixpkgs.follows = "nixpkgs";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
outputs = { self, nixpkgs, fenix }:
|
||||||
|
let
|
||||||
|
supportedSystems = [
|
||||||
|
"x86_64-linux"
|
||||||
|
"aarch64-linux"
|
||||||
|
"x86_64-darwin"
|
||||||
|
"aarch64-darwin"
|
||||||
|
];
|
||||||
|
|
||||||
|
forEachSupportedSystem = f:
|
||||||
|
nixpkgs.lib.genAttrs supportedSystems (system:
|
||||||
|
f {
|
||||||
|
pkgs = import nixpkgs {
|
||||||
|
inherit system;
|
||||||
|
overlays = [ self.overlays.default ];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
);
|
||||||
|
in
|
||||||
|
{
|
||||||
|
overlays.default = final: prev: {
|
||||||
|
rustToolchain =
|
||||||
|
with fenix.packages.${prev.stdenv.hostPlatform.system};
|
||||||
|
combine (with stable; [
|
||||||
|
cargo
|
||||||
|
rustc
|
||||||
|
clippy
|
||||||
|
rustfmt
|
||||||
|
rust-src
|
||||||
|
]);
|
||||||
|
};
|
||||||
|
|
||||||
|
packages = forEachSupportedSystem ({ pkgs }: {
|
||||||
|
default = pkgs.rustPlatform.buildRustPackage {
|
||||||
|
pname = "whereAmI";
|
||||||
|
version = "0.1.0";
|
||||||
|
|
||||||
|
# Keep Cargo.lock even if gitignored
|
||||||
|
src = pkgs.lib.cleanSourceWith {
|
||||||
|
src = ./.;
|
||||||
|
filter = path: type:
|
||||||
|
let
|
||||||
|
name = pkgs.lib.baseNameOf path;
|
||||||
|
in
|
||||||
|
name == "Cargo.lock"
|
||||||
|
|| pkgs.lib.cleanSourceFilter path type;
|
||||||
|
};
|
||||||
|
|
||||||
|
cargoLock = {
|
||||||
|
lockFile = ./Cargo.lock;
|
||||||
|
};
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
pkgs.pkg-config
|
||||||
|
];
|
||||||
|
|
||||||
|
buildInputs = [
|
||||||
|
pkgs.openssl
|
||||||
|
];
|
||||||
|
|
||||||
|
meta = with pkgs.lib; {
|
||||||
|
description = "A simple Rust program to show current location info";
|
||||||
|
mainProgram = "whereAmI";
|
||||||
|
license = licenses.mit;
|
||||||
|
maintainers = [ ];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
devShells = forEachSupportedSystem ({ pkgs }: {
|
||||||
|
default = pkgs.mkShell {
|
||||||
|
packages = with pkgs; [
|
||||||
|
rustToolchain
|
||||||
|
openssl
|
||||||
|
pkg-config
|
||||||
|
cargo-deny
|
||||||
|
cargo-edit
|
||||||
|
cargo-watch
|
||||||
|
rust-analyzer
|
||||||
|
];
|
||||||
|
|
||||||
|
env = {
|
||||||
|
# Needed for rust-analyzer stdlib discovery
|
||||||
|
RUST_SRC_PATH =
|
||||||
|
"${pkgs.rustToolchain}/lib/rustlib/src/rust/library";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
257
src/main.rs
Normal file
257
src/main.rs
Normal file
@@ -0,0 +1,257 @@
|
|||||||
|
use axum::{
|
||||||
|
extract::{Request, State},
|
||||||
|
http::{HeaderMap, StatusCode},
|
||||||
|
middleware::Next,
|
||||||
|
response::Response,
|
||||||
|
routing::{get, put},
|
||||||
|
Json, Router,
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
|
use std::net::SocketAddr;
|
||||||
|
use std::sync::{Arc, Mutex};
|
||||||
|
use tokio::net::TcpListener;
|
||||||
|
use tracing::{debug, error, info, warn, instrument, Level};
|
||||||
|
// ── Shared state ──────────────────────────────────────────────────────────────
|
||||||
|
type Countries = Arc<Mutex<HashMap<String, String>>>;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
struct AppState {
|
||||||
|
countries: Countries,
|
||||||
|
api_keys: Arc<HashSet<String>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Request / Response bodies ─────────────────────────────────────────────────
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct UpdateLocation {
|
||||||
|
lat: f64,
|
||||||
|
lon: f64,
|
||||||
|
key: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct GetLocation {
|
||||||
|
key: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct CountryResponse {
|
||||||
|
country: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── BigDataCloud response ────────────────────────────────────────────────────
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct BDCResponse {
|
||||||
|
country: Option<String>,
|
||||||
|
#[serde(rename = "countryCode")]
|
||||||
|
country_code: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Middleware: API Key authentication with structured logging ───────────────
|
||||||
|
#[instrument(skip(state, req, next))]
|
||||||
|
async fn api_key_middleware(
|
||||||
|
headers: HeaderMap,
|
||||||
|
State(state): State<AppState>,
|
||||||
|
req: Request,
|
||||||
|
next: Next,
|
||||||
|
) -> Result<Response, StatusCode> {
|
||||||
|
let client_ip: String = req
|
||||||
|
.extensions()
|
||||||
|
.get::<axum::extract::ConnectInfo<SocketAddr>>()
|
||||||
|
.map(|ci| ci.0.ip().to_string())
|
||||||
|
.unwrap_or_else(|| "unknown".into());
|
||||||
|
|
||||||
|
let api_key = headers
|
||||||
|
.get("X-API-Key")
|
||||||
|
.and_then(|v| v.to_str().ok())
|
||||||
|
.map(|s| s.to_string());
|
||||||
|
|
||||||
|
match api_key {
|
||||||
|
Some(ref key) if state.api_keys.contains(key) => {
|
||||||
|
info!(
|
||||||
|
client_ip = %client_ip,
|
||||||
|
method = %req.method(),
|
||||||
|
uri = %req.uri(),
|
||||||
|
"AUTH OK"
|
||||||
|
);
|
||||||
|
Ok(next.run(req).await)
|
||||||
|
}
|
||||||
|
Some(_) => {
|
||||||
|
warn!(client_ip = %client_ip, "INVALID API KEY");
|
||||||
|
Err(StatusCode::UNAUTHORIZED)
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
warn!(
|
||||||
|
client_ip = %client_ip,
|
||||||
|
method = %req.method(),
|
||||||
|
uri = %req.uri(),
|
||||||
|
"MISSING X-API-Key header"
|
||||||
|
);
|
||||||
|
Err(StatusCode::UNAUTHORIZED)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Reverse geocode using BigDataCloud ───────────────────────────────────────
|
||||||
|
#[instrument(skip_all, fields(lat = %lat, lon = %lon))]
|
||||||
|
async fn get_country_from_coords(lat: f64, lon: f64) -> Result<String, StatusCode> {
|
||||||
|
debug!("Querying BigDataCloud");
|
||||||
|
let url = format!(
|
||||||
|
"https://api.bigdatacloud.net/data/reverse-geocode-client?latitude={}&longitude={}&localityLanguage=en",
|
||||||
|
lat, lon
|
||||||
|
);
|
||||||
|
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.user_agent("DeviceLocationServer/1.0 (contact: your-email@example.com)") // ← CHANGE THIS!
|
||||||
|
.build()
|
||||||
|
.map_err(|e| {
|
||||||
|
error!("Failed to build reqwest client: {}", e);
|
||||||
|
StatusCode::BAD_GATEWAY
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let response = client.get(&url).send().await;
|
||||||
|
|
||||||
|
match response {
|
||||||
|
Ok(res) if res.status().is_success() => {
|
||||||
|
debug!("BigDataCloud responded with 200 OK");
|
||||||
|
let data: BDCResponse = res.json().await.map_err(|e| {
|
||||||
|
error!("Failed to parse JSON from BigDataCloud: {}", e);
|
||||||
|
StatusCode::BAD_GATEWAY
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let country = data
|
||||||
|
.country
|
||||||
|
.or_else(|| data.country_code.map(|cc| cc.to_uppercase()))
|
||||||
|
.ok_or_else(|| {
|
||||||
|
warn!("No country information in BigDataCloud response");
|
||||||
|
StatusCode::NOT_FOUND
|
||||||
|
})?;
|
||||||
|
|
||||||
|
info!(country = %country, "Resolved country from coordinates");
|
||||||
|
Ok(country)
|
||||||
|
}
|
||||||
|
Ok(res) => {
|
||||||
|
warn!("BigDataCloud returned error status: {}", res.status());
|
||||||
|
Err(StatusCode::BAD_GATEWAY)
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
error!("Network error contacting BigDataCloud: {}", e);
|
||||||
|
Err(StatusCode::BAD_GATEWAY)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── PUT /location ─────────────────────────────────────────────────────────────
|
||||||
|
#[instrument(skip(state, payload))]
|
||||||
|
async fn update_location(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Json(payload): Json<UpdateLocation>,
|
||||||
|
) -> Result<StatusCode, StatusCode> {
|
||||||
|
info!(
|
||||||
|
key = %payload.key,
|
||||||
|
lat = payload.lat,
|
||||||
|
lon = payload.lon,
|
||||||
|
"Received location update"
|
||||||
|
);
|
||||||
|
|
||||||
|
match get_country_from_coords(payload.lat, payload.lon).await {
|
||||||
|
Ok(country) => {
|
||||||
|
let mut countries = state.countries.lock().unwrap();
|
||||||
|
countries.insert(payload.key.clone(), country.clone());
|
||||||
|
info!(
|
||||||
|
key = %payload.key,
|
||||||
|
country = %country,
|
||||||
|
"Stored device country"
|
||||||
|
);
|
||||||
|
Ok(StatusCode::OK)
|
||||||
|
}
|
||||||
|
Err(status) => {
|
||||||
|
warn!("Geocoding failed with status: {}", status);
|
||||||
|
Err(status)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── GET /get/location ─────────────────────────────────────────────────────────
|
||||||
|
#[instrument(skip(state, payload))]
|
||||||
|
async fn get_location(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Json(payload): Json<GetLocation>,
|
||||||
|
) -> Result<Json<CountryResponse>, StatusCode> {
|
||||||
|
info!(key = %payload.key, "Requesting stored location");
|
||||||
|
|
||||||
|
let countries = state.countries.lock().unwrap();
|
||||||
|
if let Some(country) = countries.get(&payload.key) {
|
||||||
|
info!(key = %payload.key, country = %country, "Found stored country");
|
||||||
|
Ok(Json(CountryResponse {
|
||||||
|
country: country.clone(),
|
||||||
|
}))
|
||||||
|
} else {
|
||||||
|
warn!(key = %payload.key, "Device key not found");
|
||||||
|
Err(StatusCode::NOT_FOUND)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Main ──────────────────────────────────────────────────────────────────────
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() {
|
||||||
|
// Load .env file if present (great for local development)
|
||||||
|
dotenvy::dotenv().ok();
|
||||||
|
|
||||||
|
// Initialize structured logging
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_max_level(Level::INFO) // Set to DEBUG for more details
|
||||||
|
.with_target(true)
|
||||||
|
.with_thread_names(false)
|
||||||
|
.pretty()
|
||||||
|
.init();
|
||||||
|
|
||||||
|
// Load API keys from environment variable
|
||||||
|
let api_keys_str = std::env::var("API_KEYS")
|
||||||
|
.expect("API_KEYS environment variable is required");
|
||||||
|
|
||||||
|
let valid_api_keys: HashSet<String> = if api_keys_str.trim().is_empty() {
|
||||||
|
HashSet::new()
|
||||||
|
} else {
|
||||||
|
api_keys_str
|
||||||
|
.split(',')
|
||||||
|
.map(|s| s.trim().to_string())
|
||||||
|
.filter(|s| !s.is_empty())
|
||||||
|
.collect()
|
||||||
|
};
|
||||||
|
|
||||||
|
if valid_api_keys.is_empty() {
|
||||||
|
warn!("No API keys configured – all requests will be rejected!");
|
||||||
|
} else {
|
||||||
|
info!("Loaded {} API key(s) from API_KEYS", valid_api_keys.len());
|
||||||
|
}
|
||||||
|
|
||||||
|
let state = AppState {
|
||||||
|
countries: Arc::new(Mutex::new(HashMap::new())),
|
||||||
|
api_keys: Arc::new(valid_api_keys),
|
||||||
|
};
|
||||||
|
|
||||||
|
let app = Router::new()
|
||||||
|
.route("/location", put(update_location))
|
||||||
|
.route("/get/location", get(get_location))
|
||||||
|
.layer(axum::middleware::from_fn_with_state(
|
||||||
|
state.clone(),
|
||||||
|
api_key_middleware,
|
||||||
|
))
|
||||||
|
.with_state(state);
|
||||||
|
|
||||||
|
let listener = TcpListener::bind("0.0.0.0:3000").await.unwrap();
|
||||||
|
let addr = listener.local_addr().unwrap();
|
||||||
|
|
||||||
|
info!("Country Location Server starting");
|
||||||
|
info!("Listening on http://{}", addr);
|
||||||
|
info!("Using BigDataCloud for reverse geocoding");
|
||||||
|
warn!("Remember to update the User-Agent email in get_country_from_coords()!");
|
||||||
|
|
||||||
|
axum::serve(
|
||||||
|
listener,
|
||||||
|
app.into_make_service_with_connect_info::<SocketAddr>(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user