Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
456 changes: 456 additions & 0 deletions Cargo.lock

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,5 @@ dotenvy = "0.15.7"
sqlx = { version = "0.7.2", features = [ "runtime-tokio", "tls-rustls", "postgres", "uuid" ] }
chrono = "0.4.31"
uuid = { version = "1.5.0", features = ["v4"] }
plotters = "0.3.5"
lodepng = "3.9.1"
51 changes: 51 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
####################################################################################################
## Builder
####################################################################################################
FROM rust:latest AS builder

RUN update-ca-certificates

RUN apt install pkg-config libfreetype6-dev libfontconfig1-dev

# Create appuser
ENV USER=queery
ENV UID=10001

ARG SQLX_OFFLINE=true

RUN adduser \
--disabled-password \
--gecos "" \
--home "/nonexistent" \
--shell "/sbin/nologin" \
--no-create-home \
--uid "${UID}" \
"${USER}"


WORKDIR /queery

COPY ./ .

RUN cargo build --release

####################################################################################################
## Final image
####################################################################################################
FROM debian:bookworm-slim

RUN apt-get update && apt-get install -y libfontconfig1

# Import from builder.
COPY --from=builder /etc/passwd /etc/passwd
COPY --from=builder /etc/group /etc/group

WORKDIR /queery

# Copy our build
COPY --from=builder /queery/target/release/queery /usr/bin/queery

# Use an unprivileged user.
USER queery:queery

CMD ["queery"]
6 changes: 3 additions & 3 deletions build.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// generated by `sqlx migrate build-script`
fn main() {
// trigger recompilation when a new migration is added
println!("cargo:rerun-if-changed=migrations");
}
// trigger recompilation when a new migration is added
println!("cargo:rerun-if-changed=migrations");
}
13 changes: 12 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,13 +1,24 @@
version: '3.8'
services:
queery:
container_name: "queery"
image: queery:latest
env_file: .env
command: ["queery"]
build:
dockerfile: Dockerfile
depends_on:
- db
restart: unless-stopped
db:
container_name: "queery-db"
image: postgres:16
restart: always
env_file:
- .env
ports:
- '5432:5432'
volumes:
- db-data:/var/lib/postgresql/data
restart: unless-stopped
volumes:
db-data:
70 changes: 69 additions & 1 deletion src/commands.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
use poise::command;
use poise::serenity_prelude::Timestamp;
use poise::serenity_prelude::{AttachmentType, Channel, Timestamp};
use tracing::{error, info};

use crate::log::ChangeResolution;
use crate::plotting::create_log_graph;
use crate::time_period::TimePeriod;
use crate::{Context, Error};

/// Test connection speed.
#[command(slash_command)]
pub async fn ping(ctx: Context<'_>) -> Result<(), Error> {
let before_timestamp = Timestamp::now().timestamp_millis();
Expand All @@ -23,3 +28,66 @@ pub async fn ping(ctx: Context<'_>) -> Result<(), Error> {

Ok(())
}

/// Fetch the logs for the current channel.
#[command(slash_command, required_permissions="MANAGE_CHANNELS")]
pub async fn logs(
ctx: Context<'_>,
#[description="How far back to search."]
time_period: TimePeriod,
#[description="The channel to fetch the logs for, defaults to the current channel."]
channel: Option<Channel>,
) -> Result<(), Error> {
let channel_id = channel.map_or_else(|| ctx.channel_id(), |channel| channel.id());

let now_timestamp = Timestamp::now();
let search_start_timestamp = time_period.relative_timestamp_from(now_timestamp.timestamp());

let logs = ctx
.data()
.fetch_logs_between(channel_id.0 as i64, search_start_timestamp, now_timestamp.timestamp())
.await?;

// Combign logs together to make displaying on graph easier
// Todo: This has not been fully tested yet
let logs = match time_period {
TimePeriod::Hour => logs,
TimePeriod::HalfDay => logs.change_resolution(360),
TimePeriod::Day => logs.change_resolution(720),
TimePeriod::Week => logs.change_resolution(2520),
};

if logs.is_empty() {
ctx.say("No logs found for this period").await?;
return Ok(());
}

let channel_name = channel_id
.name(ctx.cache())
.await
.unwrap_or_else(|| "Current Channel".to_string());

let graph = match create_log_graph(logs, &channel_name, search_start_timestamp, now_timestamp.timestamp(), time_period) {
Ok(graph) => graph,
Err(err) => {
error!("Failed to generate log graph: {}", err);
return Err(err)
}
};

ctx.send(|reply| {
reply.attachment(AttachmentType::Bytes {
data: graph.into(),
filename: "graph.png".to_string(),
})
})
.await?;


// Time it took for command to be run.
let finished_timestamp = Timestamp::now().timestamp_millis() - now_timestamp.timestamp_millis();

info!("Command took {}ms", finished_timestamp);

Ok(())
}
87 changes: 64 additions & 23 deletions src/database.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,24 @@
use sqlx::{postgres::PgPoolOptions, Pool, Postgres, migrate::MigrateDatabase, query_as, query};
use sqlx::migrate::MigrateDatabase;
use sqlx::postgres::PgPoolOptions;
use sqlx::{query, query_as, Pool, Postgres};
use tracing::info;
use uuid::Uuid;

use crate::{Error, log::Log};
use crate::log::Log;
use crate::Error;

/// Core data within bot, containing a
/// connection to its corresponding database
pub struct App {
pool: Pool<Postgres>,
}

impl App {
/// Create a postgres connection, creating database if nessessary.
///
/// # Errors
/// This function requires that the environment variable `DATABASE_URL`
/// is set to a url to a postgres database.
#[allow(clippy::cognitive_complexity)]
pub async fn new() -> Result<Self, Error> {
let db_url = std::env::var("DATABASE_URL")?;
Expand All @@ -19,76 +29,107 @@ impl App {
info!("Database created");
}

info!("Attempting to connect to database");
info!("Attempting to connect to database");

let pool = PgPoolOptions::new()
.max_connections(5)
.connect(&db_url)
.await?;

info!("Connected to database");
info!("Connected to database");

Ok(Self { pool })
}

/// Creates a new log for a channel at the given timstamp
///
/// # Assumptions
/// This function assumes that `timestamp` is a UNIX timestamp
pub async fn new_log(&self, timestamp: i64, channel_id: i64) -> Result<(), Error> {


query!(r#"
query!(
r#"
INSERT INTO logs (id, channel_id, count, time)
VALUES ($1, $2 , 1, $3)
"#,
Uuid::new_v4(),
channel_id,
timestamp
).execute(&self.pool).await?;
)
.execute(&self.pool)
.await?;

info!("New log created");

Ok(())
}

/// Updates a given log with a new count.
///
/// # Errors
/// This functions assumes that there is an entry with `log_id`
pub async fn update_log(&self, log_id: Uuid, new_count: i32) -> Result<(), Error> {
query!(r#"
query!(
r#"
UPDATE logs
SET count = $1
WHERE id = $2;
"#,
new_count,
log_id
).execute(&self.pool).await?;
new_count,
log_id
)
.execute(&self.pool)
.await?;

info!("Log {log_id} updated");

Ok(())
}

/// Fetches a log for a channel at `timestamp`.
///
/// # Assumptions
/// This function assumes that `timestamp` is a factor of `RESOLUTION`.
/// If it is not, this function will never return Some.
///
/// `timestamp` is expected to be a UNIX timestamp
pub async fn fetch_log(&self, channel_id: i64, timestamp: i64) -> Option<Log> {
let log = query_as! (Log,
let log = query_as!(
Log,
r#"SELECT *
FROM logs
WHERE channel_id = $1
AND time = $2"#,
channel_id,
timestamp
).fetch_one(&self.pool).await.ok();
channel_id,
timestamp
)
.fetch_one(&self.pool)
.await
.ok();

info!("Recent log fetched");

log
}

pub async fn fetch_logs(&self, channel_id: i64, lower_time_bound: i64, upper_time_bound: i64) -> Result<Vec<Log>, Error> {
#[allow(clippy::cast_possible_wrap)]
let logs: Vec<Log> = query_as! (Log,
/// Fetch all of the logs from a channel between two UNIX timestamps.
pub async fn fetch_logs_between(
&self,
channel_id: i64,
lower_time_bound: i64,
upper_time_bound: i64,
) -> Result<Vec<Log>, Error> {
let logs: Vec<Log> = query_as!(
Log,
r#"SELECT *
FROM logs
WHERE channel_id = $1
AND time BETWEEN $2 AND $3"#,
channel_id,
lower_time_bound,
upper_time_bound
).fetch_all(&self.pool).await?;
channel_id,
lower_time_bound,
upper_time_bound
)
.fetch_all(&self.pool)
.await?;

info!("Logs between `{lower_time_bound}` and `{upper_time_bound}` fetched");

Expand Down
Loading