Update: Base done

This commit is contained in:
mrfluffy-dev
2025-05-09 01:21:31 +01:00
parent a9591a1d84
commit 9b208edfde
20 changed files with 430 additions and 190 deletions

View File

@@ -1,19 +1,19 @@
#!/usr/bin/env bash #!/usr/bin/env bash
set -e set -e
if [[ ! -d "/home/work/Documents/rust/bitBeam" ]]; then if [[ ! -d "/home/mrfluffy/bitBeam" ]]; then
echo "Cannot find source directory; Did you move it?" echo "Cannot find source directory; Did you move it?"
echo "(Looking for "/home/work/Documents/rust/bitBeam")" echo "(Looking for "/home/mrfluffy/bitBeam")"
echo 'Cannot force reload with this script - use "direnv reload" manually and then try again' echo 'Cannot force reload with this script - use "direnv reload" manually and then try again'
exit 1 exit 1
fi fi
# rebuild the cache forcefully # rebuild the cache forcefully
_nix_direnv_force_reload=1 direnv exec "/home/work/Documents/rust/bitBeam" true _nix_direnv_force_reload=1 direnv exec "/home/mrfluffy/bitBeam" true
# Update the mtime for .envrc. # Update the mtime for .envrc.
# This will cause direnv to reload again - but without re-building. # This will cause direnv to reload again - but without re-building.
touch "/home/work/Documents/rust/bitBeam/.envrc" touch "/home/mrfluffy/bitBeam/.envrc"
# Also update the timestamp of whatever profile_rc we have. # Also update the timestamp of whatever profile_rc we have.
# This makes sure that we know we are up to date. # This makes sure that we know we are up to date.
touch -r "/home/work/Documents/rust/bitBeam/.envrc" "/home/work/Documents/rust/bitBeam/.direnv"/*.rc touch -r "/home/mrfluffy/bitBeam/.envrc" "/home/mrfluffy/bitBeam/.direnv"/*.rc

View File

@@ -0,0 +1 @@
/nix/store/004vk3n8jlx9vw4nzy1f0d2dhsy7wz52-source

View File

@@ -1 +0,0 @@
/nix/store/52hxk3ygip5xv1jrjymnn4yh9rqikj91-source

View File

@@ -1 +1 @@
/nix/store/vdijc3indsq6j6xbridfqjib4pkg6vhs-nix-shell-env /nix/store/sc88ikf5zh532nisyr5v9h6f6q6fay54-nix-shell-env

View File

@@ -1 +1 @@
/nix/store/vdijc3indsq6j6xbridfqjib4pkg6vhs-nix-shell-env /nix/store/sc88ikf5zh532nisyr5v9h6f6q6fay54-nix-shell-env

View File

@@ -12,6 +12,10 @@ export AS
AS_FOR_BUILD='as' AS_FOR_BUILD='as'
export AS_FOR_BUILD export AS_FOR_BUILD
BASH='/nix/store/xg75pc4yyfd5n2fimhb98ps910q5lm5n-bash-5.2p37/bin/bash' BASH='/nix/store/xg75pc4yyfd5n2fimhb98ps910q5lm5n-bash-5.2p37/bin/bash'
BITBEAM_DATABASE_URL='sqlite://./bitbeam.sqlite'
export BITBEAM_DATABASE_URL
BITBEAM_DB_TYPE='sqlite'
export BITBEAM_DB_TYPE
CC='gcc' CC='gcc'
export CC export CC
CC_FOR_BUILD='gcc' CC_FOR_BUILD='gcc'
@@ -43,7 +47,7 @@ NIX_BINTOOLS_WRAPPER_TARGET_BUILD_x86_64_unknown_linux_gnu='1'
export NIX_BINTOOLS_WRAPPER_TARGET_BUILD_x86_64_unknown_linux_gnu export NIX_BINTOOLS_WRAPPER_TARGET_BUILD_x86_64_unknown_linux_gnu
NIX_BINTOOLS_WRAPPER_TARGET_HOST_x86_64_unknown_linux_gnu='1' NIX_BINTOOLS_WRAPPER_TARGET_HOST_x86_64_unknown_linux_gnu='1'
export NIX_BINTOOLS_WRAPPER_TARGET_HOST_x86_64_unknown_linux_gnu export NIX_BINTOOLS_WRAPPER_TARGET_HOST_x86_64_unknown_linux_gnu
NIX_BUILD_CORES='12' NIX_BUILD_CORES='16'
export NIX_BUILD_CORES export NIX_BUILD_CORES
NIX_CC='/nix/store/dc6bahp3f5af2rxz3pal9m3kp4vx4rpy-gcc-wrapper-14.2.1.20250322' NIX_CC='/nix/store/dc6bahp3f5af2rxz3pal9m3kp4vx4rpy-gcc-wrapper-14.2.1.20250322'
export NIX_CC export NIX_CC
@@ -53,7 +57,7 @@ NIX_CC_WRAPPER_TARGET_BUILD_x86_64_unknown_linux_gnu='1'
export NIX_CC_WRAPPER_TARGET_BUILD_x86_64_unknown_linux_gnu export NIX_CC_WRAPPER_TARGET_BUILD_x86_64_unknown_linux_gnu
NIX_CC_WRAPPER_TARGET_HOST_x86_64_unknown_linux_gnu='1' NIX_CC_WRAPPER_TARGET_HOST_x86_64_unknown_linux_gnu='1'
export NIX_CC_WRAPPER_TARGET_HOST_x86_64_unknown_linux_gnu export NIX_CC_WRAPPER_TARGET_HOST_x86_64_unknown_linux_gnu
NIX_CFLAGS_COMPILE=' -frandom-seed=vdijc3inds -isystem /nix/store/jv45xs1p8v9mcychfgkv6vxridcn532h-openssl-3.4.1-dev/include -isystem /nix/store/jv45xs1p8v9mcychfgkv6vxridcn532h-openssl-3.4.1-dev/include' NIX_CFLAGS_COMPILE=' -frandom-seed=sc88ikf5zh -isystem /nix/store/jv45xs1p8v9mcychfgkv6vxridcn532h-openssl-3.4.1-dev/include -isystem /nix/store/jv45xs1p8v9mcychfgkv6vxridcn532h-openssl-3.4.1-dev/include'
export NIX_CFLAGS_COMPILE export NIX_CFLAGS_COMPILE
NIX_CFLAGS_COMPILE_FOR_BUILD=' -isystem /nix/store/jv45xs1p8v9mcychfgkv6vxridcn532h-openssl-3.4.1-dev/include -isystem /nix/store/jv45xs1p8v9mcychfgkv6vxridcn532h-openssl-3.4.1-dev/include -isystem /nix/store/jv45xs1p8v9mcychfgkv6vxridcn532h-openssl-3.4.1-dev/include' NIX_CFLAGS_COMPILE_FOR_BUILD=' -isystem /nix/store/jv45xs1p8v9mcychfgkv6vxridcn532h-openssl-3.4.1-dev/include -isystem /nix/store/jv45xs1p8v9mcychfgkv6vxridcn532h-openssl-3.4.1-dev/include -isystem /nix/store/jv45xs1p8v9mcychfgkv6vxridcn532h-openssl-3.4.1-dev/include'
export NIX_CFLAGS_COMPILE_FOR_BUILD export NIX_CFLAGS_COMPILE_FOR_BUILD
@@ -61,7 +65,7 @@ NIX_ENFORCE_NO_NATIVE='1'
export NIX_ENFORCE_NO_NATIVE export NIX_ENFORCE_NO_NATIVE
NIX_HARDENING_ENABLE='bindnow format fortify fortify3 pic relro stackclashprotection stackprotector strictoverflow zerocallusedregs' NIX_HARDENING_ENABLE='bindnow format fortify fortify3 pic relro stackclashprotection stackprotector strictoverflow zerocallusedregs'
export NIX_HARDENING_ENABLE export NIX_HARDENING_ENABLE
NIX_LDFLAGS='-rpath /home/work/Documents/rust/bitBeam/outputs/out/lib -L/nix/store/jn59p00df0j13ad5jn0q0irfpq6azvby-rust-default-1.86.0/lib -L/nix/store/xy8x4g472i5n1bh24c5ixhbnk6qlm9vz-openssl-3.4.1/lib -L/nix/store/jn59p00df0j13ad5jn0q0irfpq6azvby-rust-default-1.86.0/lib -L/nix/store/xy8x4g472i5n1bh24c5ixhbnk6qlm9vz-openssl-3.4.1/lib' NIX_LDFLAGS='-rpath /home/mrfluffy/bitBeam/outputs/out/lib -L/nix/store/jn59p00df0j13ad5jn0q0irfpq6azvby-rust-default-1.86.0/lib -L/nix/store/xy8x4g472i5n1bh24c5ixhbnk6qlm9vz-openssl-3.4.1/lib -L/nix/store/jn59p00df0j13ad5jn0q0irfpq6azvby-rust-default-1.86.0/lib -L/nix/store/xy8x4g472i5n1bh24c5ixhbnk6qlm9vz-openssl-3.4.1/lib'
export NIX_LDFLAGS export NIX_LDFLAGS
NIX_LDFLAGS_FOR_BUILD=' -L/nix/store/jn59p00df0j13ad5jn0q0irfpq6azvby-rust-default-1.86.0/lib -L/nix/store/xy8x4g472i5n1bh24c5ixhbnk6qlm9vz-openssl-3.4.1/lib -L/nix/store/jn59p00df0j13ad5jn0q0irfpq6azvby-rust-default-1.86.0/lib -L/nix/store/xy8x4g472i5n1bh24c5ixhbnk6qlm9vz-openssl-3.4.1/lib -L/nix/store/jn59p00df0j13ad5jn0q0irfpq6azvby-rust-default-1.86.0/lib -L/nix/store/xy8x4g472i5n1bh24c5ixhbnk6qlm9vz-openssl-3.4.1/lib' NIX_LDFLAGS_FOR_BUILD=' -L/nix/store/jn59p00df0j13ad5jn0q0irfpq6azvby-rust-default-1.86.0/lib -L/nix/store/xy8x4g472i5n1bh24c5ixhbnk6qlm9vz-openssl-3.4.1/lib -L/nix/store/jn59p00df0j13ad5jn0q0irfpq6azvby-rust-default-1.86.0/lib -L/nix/store/xy8x4g472i5n1bh24c5ixhbnk6qlm9vz-openssl-3.4.1/lib -L/nix/store/jn59p00df0j13ad5jn0q0irfpq6azvby-rust-default-1.86.0/lib -L/nix/store/xy8x4g472i5n1bh24c5ixhbnk6qlm9vz-openssl-3.4.1/lib'
export NIX_LDFLAGS_FOR_BUILD export NIX_LDFLAGS_FOR_BUILD
@@ -173,7 +177,7 @@ declare -a envHostHostHooks=('ccWrapper_addCVars' 'bintoolsWrapper_addLDVars' 'p
declare -a envHostTargetHooks=('ccWrapper_addCVars' 'bintoolsWrapper_addLDVars' 'pkgConfigWrapper_addPkgConfigPath' ) declare -a envHostTargetHooks=('ccWrapper_addCVars' 'bintoolsWrapper_addLDVars' 'pkgConfigWrapper_addPkgConfigPath' )
declare -a envTargetTargetHooks=() declare -a envTargetTargetHooks=()
declare -a fixupOutputHooks=('if [ -z "${dontPatchELF-}" ]; then patchELF "$prefix"; fi' 'if [[ -z "${noAuditTmpdir-}" && -e "$prefix" ]]; then auditTmpdir "$prefix"; fi' 'if [ -z "${dontGzipMan-}" ]; then compressManPages "$prefix"; fi' '_moveLib64' '_moveSbin' '_moveSystemdUserUnits' 'patchShebangsAuto' '_pruneLibtoolFiles' '_doStrip' ) declare -a fixupOutputHooks=('if [ -z "${dontPatchELF-}" ]; then patchELF "$prefix"; fi' 'if [[ -z "${noAuditTmpdir-}" && -e "$prefix" ]]; then auditTmpdir "$prefix"; fi' 'if [ -z "${dontGzipMan-}" ]; then compressManPages "$prefix"; fi' '_moveLib64' '_moveSbin' '_moveSystemdUserUnits' 'patchShebangsAuto' '_pruneLibtoolFiles' '_doStrip' )
guess='12' guess='16'
initialPath='/nix/store/cg09nslw3w6afyynjw484b86d47ic1cb-coreutils-9.7 /nix/store/frspb25x6v43fwv6b0wna1fm5nsqcp0b-findutils-4.10.0 /nix/store/8cs5vjkbwf2vicgms4km5k1kgbznhwip-diffutils-3.12 /nix/store/1h8gf327cgid0jgjygrj31amp63mn7a7-gnused-4.9 /nix/store/2wni3gbcf6fqwlfb2h9sv7jvqlpf1ylq-gnugrep-3.11 /nix/store/f8x04xqd2cs274k0hgfzsrms6sby2fgx-gawk-5.3.2 /nix/store/5jmcn57x2j9mkdr3j947cbja2hpxmhfn-gnutar-1.35 /nix/store/04z4rhjadrnd0w3ib2sl42pa3xjgpf7p-gzip-1.14 /nix/store/sysih19x8xx8l473d3qnr760hy758lkq-bzip2-1.0.8-bin /nix/store/agn71jakv0a9669k3zx5g9aqm2sl9z77-gnumake-4.4.1 /nix/store/xg75pc4yyfd5n2fimhb98ps910q5lm5n-bash-5.2p37 /nix/store/qd20g193gch8bj4h0h44wr97mw0bhkmf-patch-2.7.6 /nix/store/fi495i6cz40rq1axig930jzyw7ln0zhm-xz-5.8.1-bin /nix/store/hb9v0qx9vk0420z5grlnv2y5wcf6dp6i-file-5.46' initialPath='/nix/store/cg09nslw3w6afyynjw484b86d47ic1cb-coreutils-9.7 /nix/store/frspb25x6v43fwv6b0wna1fm5nsqcp0b-findutils-4.10.0 /nix/store/8cs5vjkbwf2vicgms4km5k1kgbznhwip-diffutils-3.12 /nix/store/1h8gf327cgid0jgjygrj31amp63mn7a7-gnused-4.9 /nix/store/2wni3gbcf6fqwlfb2h9sv7jvqlpf1ylq-gnugrep-3.11 /nix/store/f8x04xqd2cs274k0hgfzsrms6sby2fgx-gawk-5.3.2 /nix/store/5jmcn57x2j9mkdr3j947cbja2hpxmhfn-gnutar-1.35 /nix/store/04z4rhjadrnd0w3ib2sl42pa3xjgpf7p-gzip-1.14 /nix/store/sysih19x8xx8l473d3qnr760hy758lkq-bzip2-1.0.8-bin /nix/store/agn71jakv0a9669k3zx5g9aqm2sl9z77-gnumake-4.4.1 /nix/store/xg75pc4yyfd5n2fimhb98ps910q5lm5n-bash-5.2p37 /nix/store/qd20g193gch8bj4h0h44wr97mw0bhkmf-patch-2.7.6 /nix/store/fi495i6cz40rq1axig930jzyw7ln0zhm-xz-5.8.1-bin /nix/store/hb9v0qx9vk0420z5grlnv2y5wcf6dp6i-file-5.46'
mesonFlags='' mesonFlags=''
export mesonFlags export mesonFlags
@@ -181,7 +185,7 @@ name='nix-shell-env'
export name export name
nativeBuildInputs='/nix/store/jn59p00df0j13ad5jn0q0irfpq6azvby-rust-default-1.86.0 /nix/store/jv45xs1p8v9mcychfgkv6vxridcn532h-openssl-3.4.1-dev /nix/store/nbph466agczbny52jzk143ydcp2x14q4-pkg-config-wrapper-0.29.2 /nix/store/ml99slpq5d1yv9827m99cbdkn430kg4c-cargo-deny-0.18.2 /nix/store/dsq2icwvw7qwas4jawzywqrc04hf14ab-cargo-edit-0.13.3 /nix/store/fqdx4vdaa09fxa8cm4dfww0jh1mkz4y9-cargo-watch-8.5.3 /nix/store/xgp1b532vvisxvc4527b13gaw46s4dik-rust-analyzer-2025-05-05' nativeBuildInputs='/nix/store/jn59p00df0j13ad5jn0q0irfpq6azvby-rust-default-1.86.0 /nix/store/jv45xs1p8v9mcychfgkv6vxridcn532h-openssl-3.4.1-dev /nix/store/nbph466agczbny52jzk143ydcp2x14q4-pkg-config-wrapper-0.29.2 /nix/store/ml99slpq5d1yv9827m99cbdkn430kg4c-cargo-deny-0.18.2 /nix/store/dsq2icwvw7qwas4jawzywqrc04hf14ab-cargo-edit-0.13.3 /nix/store/fqdx4vdaa09fxa8cm4dfww0jh1mkz4y9-cargo-watch-8.5.3 /nix/store/xgp1b532vvisxvc4527b13gaw46s4dik-rust-analyzer-2025-05-05'
export nativeBuildInputs export nativeBuildInputs
out='/home/work/Documents/rust/bitBeam/outputs/out' out='/home/mrfluffy/bitBeam/outputs/out'
export out export out
outputBin='out' outputBin='out'
outputDev='out' outputDev='out'
@@ -212,7 +216,7 @@ preConfigurePhases=' updateAutotoolsGnuConfigScriptsPhase'
declare -a preFixupHooks=('_moveToShare' '_multioutDocs' '_multioutDevs' ) declare -a preFixupHooks=('_moveToShare' '_multioutDocs' '_multioutDevs' )
preferLocalBuild='1' preferLocalBuild='1'
export preferLocalBuild export preferLocalBuild
prefix='/home/work/Documents/rust/bitBeam/outputs/out' prefix='/home/mrfluffy/bitBeam/outputs/out'
declare -a propagatedBuildDepFiles=('propagated-build-build-deps' 'propagated-native-build-inputs' 'propagated-build-target-deps' ) declare -a propagatedBuildDepFiles=('propagated-build-build-deps' 'propagated-native-build-inputs' 'propagated-build-target-deps' )
propagatedBuildInputs='' propagatedBuildInputs=''
export propagatedBuildInputs export propagatedBuildInputs

4
.gitignore vendored
View File

@@ -1 +1,5 @@
/target /target
/media_store
*.log
*.splite
*.sqlite*

17
Cargo.lock generated
View File

@@ -171,6 +171,8 @@ dependencies = [
"bytes", "bytes",
"chrono", "chrono",
"extract", "extract",
"fern",
"log",
"rand 0.9.1", "rand 0.9.1",
"serde", "serde",
"sqlx", "sqlx",
@@ -397,7 +399,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e"
dependencies = [ dependencies = [
"libc", "libc",
"windows-sys 0.52.0", "windows-sys 0.59.0",
] ]
[[package]] [[package]]
@@ -448,6 +450,15 @@ version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "fern"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4316185f709b23713e41e3195f90edef7fb00c3ed4adc79769cf09cc762a3b29"
dependencies = [
"log",
]
[[package]] [[package]]
name = "flume" name = "flume"
version = "0.11.1" version = "0.11.1"
@@ -1355,7 +1366,7 @@ dependencies = [
"errno", "errno",
"libc", "libc",
"linux-raw-sys", "linux-raw-sys",
"windows-sys 0.52.0", "windows-sys 0.59.0",
] ]
[[package]] [[package]]
@@ -1820,7 +1831,7 @@ dependencies = [
"getrandom 0.3.2", "getrandom 0.3.2",
"once_cell", "once_cell",
"rustix", "rustix",
"windows-sys 0.52.0", "windows-sys 0.59.0",
] ]
[[package]] [[package]]

View File

@@ -8,6 +8,8 @@ axum = "0.8"
bytes = "1.10" bytes = "1.10"
chrono = {version = "0.4", features = ["serde"]} chrono = {version = "0.4", features = ["serde"]}
extract = "0.1" extract = "0.1"
fern = "0.7.1"
log = {version = "0.4.27", feature = "std"}
rand = "0.9" rand = "0.9"
serde = {version = "1.0", features = ["derive"]} serde = {version = "1.0", features = ["derive"]}
sqlx = { version = "0.8", features = [ sqlx = { version = "0.8", features = [

Binary file not shown.

View File

@@ -10,50 +10,77 @@
}; };
}; };
outputs = inputs @ { self, nixpkgs, rust-overlay, ... }: outputs =
inputs@{
self,
nixpkgs,
rust-overlay,
...
}:
let let
supportedSystems = [ "x86_64-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" ]; supportedSystems = [
forEachSupportedSystem = f: nixpkgs.lib.genAttrs supportedSystems (system: f { "x86_64-linux"
pkgs = import nixpkgs { "aarch64-linux"
inherit system; "x86_64-darwin"
overlays = [ rust-overlay.overlays.default ]; "aarch64-darwin"
}; ];
}); forEachSupportedSystem =
f:
nixpkgs.lib.genAttrs supportedSystems (
system:
f {
pkgs = import nixpkgs {
inherit system;
overlays = [ rust-overlay.overlays.default ];
};
}
);
in in
{ {
# Define the package (your Rust binary) # Define the package (your Rust binary)
packages = forEachSupportedSystem ({ pkgs }: { packages = forEachSupportedSystem (
default = pkgs.rustPlatform.buildRustPackage { { pkgs }:
name = "bitBeam"; {
src = ./.; default = pkgs.rustPlatform.buildRustPackage {
name = "bitBeam";
src = ./.;
# Specify dependencies (replace with your project's actual dependencies) # Specify dependencies (replace with your project's actual dependencies)
buildInputs = [ pkgs.openssl pkgs.pkg-config ]; buildInputs = [
pkgs.openssl
pkgs.pkg-config
];
# Generate this with `cargo generate-lockfile` if you don't have it # Generate this with `cargo generate-lockfile` if you don't have it
cargoLock = { cargoLock = {
lockFile = ./Cargo.lock; lockFile = ./Cargo.lock;
};
# Optional: Override the Rust version if needed
nativeBuildInputs = [ pkgs.rust-bin.stable.latest.default ];
}; };
}
# Optional: Override the Rust version if needed );
nativeBuildInputs = [ pkgs.rust-bin.stable.latest.default ];
};
});
# Development environment (existing setup) # Development environment (existing setup)
devShells = forEachSupportedSystem ({ pkgs }: { devShells = forEachSupportedSystem (
default = pkgs.mkShell { { pkgs }:
packages = with pkgs; [ {
rust-bin.stable.latest.default default = pkgs.mkShell {
openssl packages = with pkgs; [
pkg-config rust-bin.stable.latest.default
cargo-deny openssl
cargo-edit pkg-config
cargo-watch cargo-deny
rust-analyzer cargo-edit
]; cargo-watch
RUST_SRC_PATH = "${pkgs.rust-bin.stable.latest.default}/lib/rustlib/src/rust/library"; rust-analyzer
}; ];
}); RUST_SRC_PATH = "${pkgs.rust-bin.stable.latest.default}/lib/rustlib/src/rust/library";
BITBEAM_DATABASE_URL = "sqlite://./bitbeam.sqlite";
BITBEAM_DB_TYPE = "sqlite";
};
}
);
}; };
} }

163
src/api.rs Normal file
View File

@@ -0,0 +1,163 @@
use axum::{
body::Bytes,
extract::ConnectInfo,
http::{HeaderMap, StatusCode},
response::{IntoResponse, Response},
Extension, Json,
};
use chrono::Utc;
use rand::Rng;
use sqlx::AnyPool;
use std::path::Path;
use tokio::fs;
use uuid::Uuid;
use log::{info, warn, error};
use std::net::SocketAddr;
use crate::data;
/// Handler to return all files as JSON
/// This function retrieves all files from the database
/// and returns them as a JSON response.
/// It also logs the IP address of the client making the request.
pub async fn all_files(Extension(pool): Extension<AnyPool>, ConnectInfo(addr): ConnectInfo<SocketAddr>) -> impl IntoResponse {
//log the IP address of the client and the call
let ip = addr.ip().to_string();
info!("Received an all_files request from IP: {}", ip);
// build the query and map the result to the File struct
// and return the result as JSON if successful
// or return an error message if not
match sqlx::query_as::<_, data::File>(
r#"
SELECT *
FROM files
"#,
)
.fetch_all(&pool)
.await
{
Ok(files) => {
info!("DB select all success");
(StatusCode::OK, Json(files)).into_response()
},
Err(e) => {
warn!("DB select all error: {}", e);
(
StatusCode::INTERNAL_SERVER_ERROR,
"Database select all error",
)
.into_response()
}
}
}
/// Handler to upload a file
/// This function handles the file upload process.
/// It receives the file data in the request body,
/// saves it to the server's file system,
/// and stores the file metadata in the database.
/// It also logs the IP address of the client making the request.
pub async fn upload(Extension(pool): Extension<AnyPool>,
ConnectInfo(addr): ConnectInfo<SocketAddr>,
Extension(config): Extension<data::Config> ,
headers: HeaderMap,
body: Bytes,
) -> Response {
//log the IP address of the client and the call
let ip = addr.ip().to_string();
info!("Received update from IP: {}", ip);
// gets the content type from the headers
let content_type = headers
.get("content-type")
.and_then(|hv| hv.to_str().ok())
.unwrap_or("application/octet-stream")
.to_string();
// gets the download limit from the headers
let download_limit = headers
.get("download_limit") // Option<&HeaderValue>
.and_then(|hv| hv.to_str().ok()) // Option<&str>
.and_then(|s| s.parse::<i32>().ok()) // Option<u32>
.unwrap_or(1); // u32
//generate a random UUID for the file ID
let id = {
// Fallback to random UUID if body is too small
let mut rng = rand::rng();
Uuid::from_u128(rng.random::<u128>()).to_string()
};
//create the directory if it doesn't exist
let dir = Path::new(&config.data_path);
if let Err(e) = fs::create_dir_all(dir).await {
warn!("could not make dir at {} error: {}", &config.data_path ,e);
return (
axum::http::StatusCode::INTERNAL_SERVER_ERROR,
"Directory creation error",
)
.into_response();
}
//create the file path
// the file path is the directory + the file ID + file type if file type is not application/x-executable
if content_type == "application/x-executable" {
info!("File type is application/x-executable");
} else {
info!("File type is {}", content_type);
}
let file_path = dir.join(
if content_type == "application/x-executable" {
format!("{}",id)
} else {
format!("{}.{}",
id,
content_type.split('/').last().unwrap_or("bin"))
},
);
if let Err(e) = fs::write(&file_path, &body).await {
warn!("write error {}: {}", id, e);
return (
axum::http::StatusCode::INTERNAL_SERVER_ERROR,
"File write error",
)
.into_response();
}
let file_size = body.len() as i64;
let upload_time = Utc::now().timestamp(); // i64
let download_count = 0;
if let Err(e) = sqlx::query(
r#"
INSERT INTO files
(id, content_type, upload_time, download_limit, download_count, file_size)
VALUES (?, ?, ?, ?, ?, ?)
"#,
)
.bind(&id)
.bind(&content_type)
.bind(&upload_time)
.bind(download_limit)
.bind(download_count)
.bind(file_size as i64)
.execute(&pool)
.await
{
error!("DB insert error {}: {}", id, e);
return (
axum::http::StatusCode::INTERNAL_SERVER_ERROR,
"Database insert error",
)
.into_response();
}
let uploaded_file = data::File {
id,
content_type,
upload_time,
download_limit,
download_count,
file_size,
};
Json(uploaded_file).into_response()
}

37
src/data.rs Normal file
View File

@@ -0,0 +1,37 @@
use serde::Serialize;
use sqlx::FromRow;
/// This struct represents a file in the database.
/// It contains fields for the file's ID, content type,
/// upload time, download limit, download count,
/// and file size.
/// It derives the `FromRow` trait from `sqlx`
/// to allow it to be created from a database row.
/// It also derives the `Serialize` trait
/// from `serde`
/// to allow it to be serialized into JSON.
#[derive(FromRow, Serialize)]
pub struct File {
pub id: String,
pub content_type: String,
pub upload_time: i64,
pub download_limit: i32,
pub download_count: i32,
pub file_size: i64,
}
/// This struct is used to represent the configuration settings for the application.
/// It contains various fields that are used to configure the database connection,
/// data path, server port, and logging settings.
/// It derives the `Clone` trait
/// to allow it to be cloned.
#[derive(Clone)]
pub struct Config {
pub db_type: String,
pub database_url: String,
pub data_path: String,
pub port: String,
pub listener_addr: String,
pub log_level: String,
pub log_location: String,
}

View File

@@ -1,49 +1,67 @@
use axum::{ use axum::{
body::Bytes,
extract::DefaultBodyLimit, extract::DefaultBodyLimit,
http::{HeaderMap, StatusCode},
response::{IntoResponse, Response},
routing::{get, post}, routing::{get, post},
Extension, Json, Router, response::IntoResponse,
Extension, Router,
}; };
use chrono::{DateTime, Utc}; use sqlx::{any::AnyPoolOptions, migrate::MigrateDatabase, AnyPool, Sqlite};
use rand::Rng; use log::{info, warn, error, debug};
use serde::Serialize;
use sqlx::{any::AnyPoolOptions, migrate::MigrateDatabase, AnyPool, Encode, FromRow, Sqlite};
use std::path::Path; use std::path::Path;
use tokio::fs; use tokio::fs;
use uuid::Uuid;
#[derive(FromRow, Serialize)] use std::net::SocketAddr;
struct File { mod api;
id: String, mod data;
content_type: String,
upload_time: i64,
download_limit: i32,
download_count: i32,
file_size: i64,
}
struct Config {
db_type: String,
database_url: String,
}
/// This is the main function of the application.
/// It sets up the database connection,
/// initializes the logging system,
/// and starts the web server.
/// It uses the Axum framework to handle HTTP requests.
/// It also uses SQLx for database interactions.
/// It uses the Fern library for logging.
/// It uses the Tokio runtime for asynchronous programming.
/// It uses the Chrono library for date and time handling.
/// It uses the UUID library for generating unique identifiers.
/// It uses the Bytes library for handling byte arrays.
/// It uses the Serde library for serialization and deserialization.
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
sqlx::any::install_default_drivers(); sqlx::any::install_default_drivers();
// Read and normalize DB type and connection URL // Load the configuration from environment variables
let config = Config { let config = data::Config {
db_type: std::env::var("BITBEEM_DB_TYPE").unwrap_or_else(|_| "postgres".to_string()), db_type: std::env::var("BITBEAM_DB_TYPE").unwrap_or_else(|_| "postgres".to_string()),
database_url: match std::env::var("BITBEEM_DB_TYPE").unwrap().as_str() { database_url: match std::env::var("BITBEAM_DB_TYPE").unwrap().as_str() {
"postgres" => std::env::var("BITBEEM_DATABASE_URL") "postgres" => std::env::var("BITBEAM_DATABASE_URL")
.expect("BITBEEM_DATABASE_URL must be set for Postgres"), .expect("BITBEAM_DATABASE_URL must be set for Postgres"),
"sqlite" => std::env::var("BITBEEM_DATABASE_URL") "sqlite" => std::env::var("BITBEAM_DATABASE_URL")
.expect("BITBEEM_DATABASE_URL must be set for SQLite"), .expect("BITBEAM_DATABASE_URL must be set for SQLite"),
other => panic!("Unsupported BITBEEM_DB_TYPE: {}", other), other => panic!("Unsupported BITBEAM_DB_TYPE: {}", other),
}, },
data_path: std::env::var("BITBEAM_DATA_PATH").unwrap_or_else(|_| "./media_store".to_string()),
port: std::env::var("BITBEAM_PORT").unwrap_or_else(|_| "3000".to_string()),
listener_addr: std::env::var("BITBEAM_ADDR").unwrap_or_else(|_| "127.0.0.1".to_string()),
log_level: std::env::var("BITBEAM_LOG_LEVEL").unwrap_or_else(|_| "info".to_string()),
log_location: std::env::var("BITBEAM_LOG_LOCATION").unwrap_or_else(|_| "./bitbeam.log".to_string()),
}; };
// Setting up the logging system
// The log level is set based on the environment variable BITBEAM_LOG_LEVEL
let level = match config.log_level.as_str() {
"debug" => log::LevelFilter::Debug,
"info" => log::LevelFilter::Info,
"warn" => log::LevelFilter::Warn,
"error" => log::LevelFilter::Error,
_ => log::LevelFilter::Info,
};
// Initialize the logging system
let log_path = &config.log_location;
let _logs = init_logging(log_path, level);
info!("done loading config");
// Create the data path if it doesn't exist
// only if the db type is sqlite
// otherwise, the data path is not used
if config.db_type == "sqlite" { if config.db_type == "sqlite" {
if !Sqlite::database_exists(&config.database_url) if !Sqlite::database_exists(&config.database_url)
.await .await
@@ -51,22 +69,27 @@ async fn main() {
{ {
println!("Creating database {}", config.database_url); println!("Creating database {}", config.database_url);
match Sqlite::create_database(&config.database_url).await { match Sqlite::create_database(&config.database_url).await {
Ok(_) => println!("Create db success"), Ok(_) => info!("Create db success"),
Err(error) => panic!("error: {}", error), Err(error) => {
error!("Error creating database: {}", error);
panic!("error: {}", error)
},
} }
} else { } else {
println!("Database already exists"); info!("Database already exists");
} }
} }
// Create a generic AnyPool // Create the database connection any pool
// The connection pool is created using the database URL from the configuration
let pool: AnyPool = AnyPoolOptions::new() let pool: AnyPool = AnyPoolOptions::new()
.max_connections(5) .max_connections(5)
.connect(&config.database_url) .connect(&config.database_url)
.await .await
.expect("could not connect to database"); .expect("could not connect to database");
// Migration SQL // Setting up the database schema
// The database schema is created if it doesn't exist
if let Err(_e) = sqlx::query( if let Err(_e) = sqlx::query(
r#" r#"
CREATE TABLE IF NOT EXISTS files ( CREATE TABLE IF NOT EXISTS files (
@@ -82,120 +105,89 @@ async fn main() {
.execute(&pool) .execute(&pool)
.await .await
{ {
eprintln!("DB created"); info!("DB created");
}; };
//create the directory if it doesn't exist
let dir = Path::new(&config.data_path);
if let Err(e) = fs::create_dir_all(dir).await {
warn!("could not make dir at {} error: {}", &config.data_path ,e);
}
//let file_path = dir.join(&id);
// Setting up the web server
// The web server is created using the Axum framework
// these are the routes
let app = Router::new() let app = Router::new()
.route("/", get(|| async { "Hello, World!" })) .route("/", get(|| async { "Hello, World!" }))
.route("/upload", post(upload)) .route("/upload", post(api::upload))
.route("/all_files", get(all_files)) .route("/all_files", get(api::all_files))
.layer(DefaultBodyLimit::max(100 * 1024 * 1024)) .layer(DefaultBodyLimit::max(100 * 1024 * 1024))
.layer(Extension(pool)); .layer(Extension(pool))
.layer(Extension(config.clone()))
.into_make_service_with_connect_info::<SocketAddr>();
// The web server is started using the Axum framework
// The server listens on the address and port specified in the configuration
axum::serve( axum::serve(
tokio::net::TcpListener::bind("0.0.0.0:3000").await.unwrap(), match tokio::net::TcpListener::bind(format!("{}:{}",&config.listener_addr,&config.port)).await {
Ok(listener) => listener,
Err(e) => {
error!("Error binding to address {}:{} : {}",&config.listener_addr,&config.port, e);
return;
}
},
app, app,
) )
.await .await
.unwrap(); .unwrap();
} }
/// Handler to return all files as JSON /// This function initializes the logging system.
async fn all_files(Extension(pool): Extension<AnyPool>) -> impl IntoResponse { /// It sets up a logger that writes to both stdout and a log file.
// Run the query and map each row into a File /// It uses the Fern library for logging.
match sqlx::query_as::<_, File>( /// It formats the log messages to include the date, time, log level, target, and message.
r#" /// It also sets the log level based on the provided level filter.
SELECT * /// It takes the log file path and log level as parameters.
FROM files fn init_logging(log_file_path: &str, level: log::LevelFilter) -> Result<(), Box<dyn std::error::Error>> {
"#, // Build a Dispatch for stdout
) let stdout_dispatch = fern::Dispatch::new()
.fetch_all(&pool) .format(|out, message, record| {
.await out.finish(format_args!(
{ "[{date}][{lvl}][{target}] {msg}",
Ok(files) => (StatusCode::OK, Json(files)).into_response(), date = chrono::Local::now().format("%Y-%m-%d %H:%M:%S"),
Err(e) => { lvl = record.level(),
eprintln!("DB select all error: {}", e); target = record.target(),
( msg = message,
StatusCode::INTERNAL_SERVER_ERROR, ))
"Database select all error", })
) .level(level)
.into_response() .chain(std::io::stdout());
}
} // Build a Dispatch for a rolling log file
let file_dispatch = fern::Dispatch::new()
.format(|out, message, record| {
out.finish(format_args!(
"[{date}][{lvl}][{target}] {msg}",
date = chrono::Local::now().format("%Y-%m-%d %H:%M:%S"),
lvl = record.level(),
target = record.target(),
msg = message,
))
})
.level(level)
.chain(fern::log_file(log_file_path)?);
// Combine the stdout and file dispatches
// and apply them
// This sets up the logger to write to both stdout and the log file
fern::Dispatch::new()
.chain(stdout_dispatch)
.chain(file_dispatch)
.apply()?;
Ok(())
} }
async fn upload(Extension(pool): Extension<AnyPool>, headers: HeaderMap, body: Bytes) -> Response {
let content_type = headers
.get("content-type")
.and_then(|hv| hv.to_str().ok())
.unwrap_or("application/octet-stream")
.to_string();
let id = {
// Fallback to random UUID if body is too small
let mut rng = rand::rng();
Uuid::from_u128(rng.random::<u128>()).to_string()
};
let dir = Path::new("./media_store");
if let Err(e) = fs::create_dir_all(dir).await {
eprintln!("mkdir error: {}", e);
return (
axum::http::StatusCode::INTERNAL_SERVER_ERROR,
"Directory creation error",
)
.into_response();
}
let file_path = dir.join(&id);
if let Err(e) = fs::write(&file_path, &body).await {
eprintln!("write error {}: {}", id, e);
return (
axum::http::StatusCode::INTERNAL_SERVER_ERROR,
"File write error",
)
.into_response();
}
let file_size = body.len() as i64;
let upload_time = Utc::now().timestamp(); // i64
let download_limit = headers
.get("download_limit") // Option<&HeaderValue>
.and_then(|hv| hv.to_str().ok()) // Option<&str>
.and_then(|s| s.parse::<i32>().ok()) // Option<u32>
.unwrap_or(2); // u32 let download_count = 0;
let download_count = 0;
if let Err(e) = sqlx::query(
r#"
INSERT INTO files
(id, content_type, upload_time, download_limit, download_count, file_size)
VALUES (?, ?, ?, ?, ?, ?)
"#,
)
.bind(&id)
.bind(&content_type)
.bind(&upload_time)
.bind(download_limit)
.bind(download_count)
.bind(file_size as i64)
.execute(&pool)
.await
{
eprintln!("DB insert error {}: {}", id, e);
return (
axum::http::StatusCode::INTERNAL_SERVER_ERROR,
"Database insert error",
)
.into_response();
}
let uploaded_file = File {
id,
content_type,
upload_time,
download_limit,
download_count,
file_size,
};
Json(uploaded_file).into_response()
}