aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorIlion Beyst <ilion.beyst@gmail.com>2022-07-18 21:03:34 +0200
committerIlion Beyst <ilion.beyst@gmail.com>2022-07-18 21:03:34 +0200
commit7daf8f643798ce76733006f8469890bf1a3fd05e (patch)
treed755c4f0979f56792684b109d263624aef4baaad
parent608d05bc167c57d190d3c06f250b5e4a5662e77e (diff)
parentd092f5d89c0fda5cc67349d5489b4ef1b294e053 (diff)
downloadplanetwars.dev-7daf8f643798ce76733006f8469890bf1a3fd05e.tar.xz
planetwars.dev-7daf8f643798ce76733006f8469890bf1a3fd05e.zip
Merge branch 'next'
-rw-r--r--Cargo.toml2
-rw-r--r--planetwars-client/Cargo.toml18
-rw-r--r--planetwars-client/build.rs9
-rw-r--r--planetwars-client/simplebot.toml2
-rw-r--r--planetwars-client/src/main.rs72
-rw-r--r--planetwars-matchrunner/src/docker_runner.rs14
-rw-r--r--planetwars-server/Cargo.toml11
-rw-r--r--planetwars-server/build.rs9
-rw-r--r--planetwars-server/configuration.toml12
-rw-r--r--planetwars-server/migrations/2022-06-10-180418_nullable_match_player_code_bundle/down.sql1
-rw-r--r--planetwars-server/migrations/2022-06-10-180418_nullable_match_player_code_bundle/up.sql1
-rw-r--r--planetwars-server/migrations/2022-07-04-200149_code_bundle_to_bot_version/down.sql6
-rw-r--r--planetwars-server/migrations/2022-07-04-200149_code_bundle_to_bot_version/up.sql6
-rw-r--r--planetwars-server/src/db/bots.rs40
-rw-r--r--planetwars-server/src/db/matches.rs37
-rw-r--r--planetwars-server/src/lib.rs111
-rw-r--r--planetwars-server/src/modules/bot_api.rs283
-rw-r--r--planetwars-server/src/modules/bots.rs17
-rw-r--r--planetwars-server/src/modules/matches.rs123
-rw-r--r--planetwars-server/src/modules/mod.rs2
-rw-r--r--planetwars-server/src/modules/ranking.rs37
-rw-r--r--planetwars-server/src/modules/registry.rs440
-rw-r--r--planetwars-server/src/routes/bots.rs32
-rw-r--r--planetwars-server/src/routes/demo.rs44
-rw-r--r--planetwars-server/src/routes/matches.rs113
-rw-r--r--planetwars-server/src/routes/users.rs6
-rw-r--r--planetwars-server/src/schema.rs23
-rw-r--r--proto/bot_api.proto36
-rw-r--r--web/pw-visualizer/assets/res/earth.pngbin0 -> 12873 bytes
-rw-r--r--web/pw-visualizer/assets/res/ship.pngbin0 -> 4963 bytes
-rw-r--r--web/pw-visualizer/assets/res/ship.svg51
-rw-r--r--web/pw-visualizer/assets/shaders/frag/image.glsl1
-rw-r--r--web/pw-visualizer/assets/shaders/frag/masked_image.glsl21
-rw-r--r--web/pw-visualizer/src/assets.ts5
-rw-r--r--web/pw-visualizer/src/index.ts260
-rw-r--r--web/pw-visualizer/src/webgl/index.ts2
-rw-r--r--web/pw-visualizer/src/webgl/text.ts22
-rw-r--r--web/pw-visualizer/src/webgl/texture.ts26
38 files changed, 1445 insertions, 450 deletions
diff --git a/Cargo.toml b/Cargo.toml
index cebf247..0fe450a 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -3,6 +3,6 @@
members = [
"planetwars-rules",
"planetwars-matchrunner",
- "planetwars-cli",
"planetwars-server",
+ "planetwars-client",
]
diff --git a/planetwars-client/Cargo.toml b/planetwars-client/Cargo.toml
new file mode 100644
index 0000000..9c68391
--- /dev/null
+++ b/planetwars-client/Cargo.toml
@@ -0,0 +1,18 @@
+[package]
+name = "planetwars-client"
+version = "0.0.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+tokio = { version = "1.15", features = ["full"] }
+tokio-stream = "0.1.9"
+prost = "0.10"
+tonic = "0.7.2"
+serde = { version = "1.0", features = ["derive"] }
+toml = "0.5"
+planetwars-matchrunner = { path = "../planetwars-matchrunner" }
+
+[build-dependencies]
+tonic-build = "0.7.2"
diff --git a/planetwars-client/build.rs b/planetwars-client/build.rs
new file mode 100644
index 0000000..acabd08
--- /dev/null
+++ b/planetwars-client/build.rs
@@ -0,0 +1,9 @@
+extern crate tonic_build;
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ tonic_build::configure()
+ .build_server(false)
+ .build_client(true)
+ .compile(&["../proto/bot_api.proto"], &["../proto"])?;
+ Ok(())
+}
diff --git a/planetwars-client/simplebot.toml b/planetwars-client/simplebot.toml
new file mode 100644
index 0000000..dfee25c
--- /dev/null
+++ b/planetwars-client/simplebot.toml
@@ -0,0 +1,2 @@
+name = "simplebot"
+command = ["python", "../simplebot/simplebot.py"]
diff --git a/planetwars-client/src/main.rs b/planetwars-client/src/main.rs
new file mode 100644
index 0000000..3ece5b3
--- /dev/null
+++ b/planetwars-client/src/main.rs
@@ -0,0 +1,72 @@
+pub mod pb {
+ tonic::include_proto!("grpc.planetwars.bot_api");
+}
+
+use pb::bot_api_service_client::BotApiServiceClient;
+use planetwars_matchrunner::bot_runner::Bot;
+use serde::Deserialize;
+use std::{path::PathBuf, time::Duration};
+use tokio::sync::mpsc;
+use tokio_stream::wrappers::UnboundedReceiverStream;
+use tonic::{metadata::MetadataValue, transport::Channel, Request, Status};
+
+#[derive(Deserialize)]
+struct BotConfig {
+ #[allow(dead_code)]
+ name: String,
+ command: Vec<String>,
+}
+
+#[tokio::main]
+async fn main() {
+ let content = std::fs::read_to_string("simplebot.toml").unwrap();
+ let bot_config: BotConfig = toml::from_str(&content).unwrap();
+
+ let channel = Channel::from_static("http://localhost:50051")
+ .connect()
+ .await
+ .unwrap();
+
+ let created_match = create_match(channel.clone()).await.unwrap();
+ run_player(bot_config, created_match.player_key, channel).await;
+ tokio::time::sleep(Duration::from_secs(1)).await;
+}
+
+async fn create_match(channel: Channel) -> Result<pb::CreatedMatch, Status> {
+ let mut client = BotApiServiceClient::new(channel);
+ let res = client
+ .create_match(Request::new(pb::MatchRequest {
+ opponent_name: "simplebot".to_string(),
+ }))
+ .await;
+ res.map(|response| response.into_inner())
+}
+
+async fn run_player(bot_config: BotConfig, player_key: String, channel: Channel) {
+ let mut client = BotApiServiceClient::with_interceptor(channel, |mut req: Request<()>| {
+ let player_key: MetadataValue<_> = player_key.parse().unwrap();
+ req.metadata_mut().insert("player_key", player_key);
+ Ok(req)
+ });
+
+ let mut bot_process = Bot {
+ working_dir: PathBuf::from("."),
+ argv: bot_config.command,
+ }
+ .spawn_process();
+
+ let (tx, rx) = mpsc::unbounded_channel();
+ let mut stream = client
+ .connect_bot(UnboundedReceiverStream::new(rx))
+ .await
+ .unwrap()
+ .into_inner();
+ while let Some(message) = stream.message().await.unwrap() {
+ let moves = bot_process.communicate(&message.content).await.unwrap();
+ tx.send(pb::PlayerRequestResponse {
+ request_id: message.request_id,
+ content: moves.as_bytes().to_vec(),
+ })
+ .unwrap();
+ }
+}
diff --git a/planetwars-matchrunner/src/docker_runner.rs b/planetwars-matchrunner/src/docker_runner.rs
index 63a7a67..2d93273 100644
--- a/planetwars-matchrunner/src/docker_runner.rs
+++ b/planetwars-matchrunner/src/docker_runner.rs
@@ -1,5 +1,4 @@
use std::io;
-use std::path::PathBuf;
use std::pin::Pin;
use std::sync::{Arc, Mutex};
@@ -19,8 +18,9 @@ use crate::BotSpec;
#[derive(Clone, Debug)]
pub struct DockerBotSpec {
pub image: String,
- pub code_path: PathBuf,
- pub argv: Vec<String>,
+ pub binds: Option<Vec<String>>,
+ pub argv: Option<Vec<String>>,
+ pub working_dir: Option<String>,
}
#[async_trait]
@@ -42,14 +42,12 @@ async fn spawn_docker_process(
params: &DockerBotSpec,
) -> Result<ContainerProcess, bollard::errors::Error> {
let docker = Docker::connect_with_socket_defaults()?;
- let bot_code_dir = std::fs::canonicalize(&params.code_path).unwrap();
- let code_dir_str = bot_code_dir.as_os_str().to_str().unwrap();
let memory_limit = 512 * 1024 * 1024; // 512MB
let config = container::Config {
image: Some(params.image.clone()),
host_config: Some(bollard::models::HostConfig {
- binds: Some(vec![format!("{}:{}", code_dir_str, "/workdir")]),
+ binds: params.binds.clone(),
network_mode: Some("none".to_string()),
memory: Some(memory_limit),
memory_swap: Some(memory_limit),
@@ -59,8 +57,8 @@ async fn spawn_docker_process(
// cpu_quota: Some(10_000),
..Default::default()
}),
- working_dir: Some("/workdir".to_string()),
- cmd: Some(params.argv.clone()),
+ working_dir: params.working_dir.clone(),
+ cmd: params.argv.clone(),
attach_stdin: Some(true),
attach_stdout: Some(true),
attach_stderr: Some(true),
diff --git a/planetwars-server/Cargo.toml b/planetwars-server/Cargo.toml
index 4c6ddfc..f2444c1 100644
--- a/planetwars-server/Cargo.toml
+++ b/planetwars-server/Cargo.toml
@@ -6,9 +6,11 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
+futures = "0.3"
tokio = { version = "1.15", features = ["full"] }
+tokio-stream = "0.1.9"
hyper = "0.14"
-axum = { version = "0.4", features = ["json", "headers", "multipart"] }
+axum = { version = "0.5", features = ["json", "headers", "multipart"] }
diesel = { version = "1.4.4", features = ["postgres", "chrono"] }
diesel-derive-enum = { version = "1.1", features = ["postgres"] }
bb8 = "0.7"
@@ -26,9 +28,16 @@ toml = "0.5"
planetwars-matchrunner = { path = "../planetwars-matchrunner" }
config = { version = "0.12", features = ["toml"] }
thiserror = "1.0.31"
+sha2 = "0.10"
+tokio-util = { version="0.7.3", features=["io"] }
+prost = "0.10"
+tonic = "0.7.2"
# TODO: remove me
shlex = "1.1"
+[build-dependencies]
+tonic-build = "0.7.2"
+
[dev-dependencies]
parking_lot = "0.11"
diff --git a/planetwars-server/build.rs b/planetwars-server/build.rs
new file mode 100644
index 0000000..97bf355
--- /dev/null
+++ b/planetwars-server/build.rs
@@ -0,0 +1,9 @@
+extern crate tonic_build;
+
+fn main() -> Result<(), Box<dyn std::error::Error>> {
+ tonic_build::configure()
+ .build_server(true)
+ .build_client(false)
+ .compile(&["../proto/bot_api.proto"], &["../proto"])?;
+ Ok(())
+}
diff --git a/planetwars-server/configuration.toml b/planetwars-server/configuration.toml
index ee7002e..13012f9 100644
--- a/planetwars-server/configuration.toml
+++ b/planetwars-server/configuration.toml
@@ -1 +1,13 @@
database_url = "postgresql://planetwars:planetwars@localhost/planetwars"
+
+python_runner_image = "python:3.10-slim-buster"
+container_registry_url = "localhost:9001"
+
+bots_directory = "./data/bots"
+match_logs_directory = "./data/matches"
+maps_directory = "./data/maps"
+
+registry_directory = "./data/registry"
+registry_admin_password ="verysecretadminpassword"
+
+ranker_enabled = false
diff --git a/planetwars-server/migrations/2022-06-10-180418_nullable_match_player_code_bundle/down.sql b/planetwars-server/migrations/2022-06-10-180418_nullable_match_player_code_bundle/down.sql
new file mode 100644
index 0000000..bb0b613
--- /dev/null
+++ b/planetwars-server/migrations/2022-06-10-180418_nullable_match_player_code_bundle/down.sql
@@ -0,0 +1 @@
+ALTER TABLE match_players ALTER COLUMN code_bundle_id SET NOT NULL;
diff --git a/planetwars-server/migrations/2022-06-10-180418_nullable_match_player_code_bundle/up.sql b/planetwars-server/migrations/2022-06-10-180418_nullable_match_player_code_bundle/up.sql
new file mode 100644
index 0000000..86ab65d
--- /dev/null
+++ b/planetwars-server/migrations/2022-06-10-180418_nullable_match_player_code_bundle/up.sql
@@ -0,0 +1 @@
+ALTER TABLE match_players ALTER COLUMN code_bundle_id DROP NOT NULL;
diff --git a/planetwars-server/migrations/2022-07-04-200149_code_bundle_to_bot_version/down.sql b/planetwars-server/migrations/2022-07-04-200149_code_bundle_to_bot_version/down.sql
new file mode 100644
index 0000000..89058fe
--- /dev/null
+++ b/planetwars-server/migrations/2022-07-04-200149_code_bundle_to_bot_version/down.sql
@@ -0,0 +1,6 @@
+ALTER TABLE match_players RENAME COLUMN bot_version_id TO code_bundle_id;
+
+ALTER TABLE bot_versions DROP COLUMN container_digest;
+ALTER TABLE bot_versions RENAME COLUMN code_bundle_path TO path;
+ALTER TABLE bot_versions ALTER COLUMN path SET NOT NULL;
+ALTER TABLE bot_versions RENAME TO code_bundles;
diff --git a/planetwars-server/migrations/2022-07-04-200149_code_bundle_to_bot_version/up.sql b/planetwars-server/migrations/2022-07-04-200149_code_bundle_to_bot_version/up.sql
new file mode 100644
index 0000000..91afc0c
--- /dev/null
+++ b/planetwars-server/migrations/2022-07-04-200149_code_bundle_to_bot_version/up.sql
@@ -0,0 +1,6 @@
+ALTER TABLE code_bundles RENAME TO bot_versions;
+ALTER TABLE bot_versions RENAME COLUMN path to code_bundle_path;
+ALTER TABLE bot_versions ALTER COLUMN code_bundle_path DROP NOT NULL;
+ALTER TABLE bot_versions ADD COLUMN container_digest TEXT;
+
+ALTER TABLE match_players RENAME COLUMN code_bundle_id TO bot_version_id;
diff --git a/planetwars-server/src/db/bots.rs b/planetwars-server/src/db/bots.rs
index 108c692..a112a9a 100644
--- a/planetwars-server/src/db/bots.rs
+++ b/planetwars-server/src/db/bots.rs
@@ -1,7 +1,7 @@
use diesel::prelude::*;
use serde::{Deserialize, Serialize};
-use crate::schema::{bots, code_bundles};
+use crate::schema::{bot_versions, bots};
use chrono;
#[derive(Insertable)]
@@ -44,38 +44,40 @@ pub fn find_all_bots(conn: &PgConnection) -> QueryResult<Vec<Bot>> {
}
#[derive(Insertable)]
-#[table_name = "code_bundles"]
-pub struct NewCodeBundle<'a> {
+#[table_name = "bot_versions"]
+pub struct NewBotVersion<'a> {
pub bot_id: Option<i32>,
- pub path: &'a str,
+ pub code_bundle_path: Option<&'a str>,
+ pub container_digest: Option<&'a str>,
}
-#[derive(Queryable, Serialize, Deserialize, Debug)]
-pub struct CodeBundle {
+#[derive(Queryable, Serialize, Deserialize, Clone, Debug)]
+pub struct BotVersion {
pub id: i32,
pub bot_id: Option<i32>,
- pub path: String,
+ pub code_bundle_path: Option<String>,
pub created_at: chrono::NaiveDateTime,
+ pub container_digest: Option<String>,
}
-pub fn create_code_bundle(
- new_code_bundle: &NewCodeBundle,
+pub fn create_bot_version(
+ new_bot_version: &NewBotVersion,
conn: &PgConnection,
-) -> QueryResult<CodeBundle> {
- diesel::insert_into(code_bundles::table)
- .values(new_code_bundle)
+) -> QueryResult<BotVersion> {
+ diesel::insert_into(bot_versions::table)
+ .values(new_bot_version)
.get_result(conn)
}
-pub fn find_bot_code_bundles(bot_id: i32, conn: &PgConnection) -> QueryResult<Vec<CodeBundle>> {
- code_bundles::table
- .filter(code_bundles::bot_id.eq(bot_id))
+pub fn find_bot_versions(bot_id: i32, conn: &PgConnection) -> QueryResult<Vec<BotVersion>> {
+ bot_versions::table
+ .filter(bot_versions::bot_id.eq(bot_id))
.get_results(conn)
}
-pub fn active_code_bundle(bot_id: i32, conn: &PgConnection) -> QueryResult<CodeBundle> {
- code_bundles::table
- .filter(code_bundles::bot_id.eq(bot_id))
- .order(code_bundles::created_at.desc())
+pub fn active_bot_version(bot_id: i32, conn: &PgConnection) -> QueryResult<BotVersion> {
+ bot_versions::table
+ .filter(bot_versions::bot_id.eq(bot_id))
+ .order(bot_versions::created_at.desc())
.first(conn)
}
diff --git a/planetwars-server/src/db/matches.rs b/planetwars-server/src/db/matches.rs
index ee25e85..6590a37 100644
--- a/planetwars-server/src/db/matches.rs
+++ b/planetwars-server/src/db/matches.rs
@@ -6,9 +6,9 @@ use diesel::{
};
use diesel::{Connection, GroupedBy, PgConnection, QueryResult};
-use crate::schema::{bots, code_bundles, match_players, matches};
+use crate::schema::{bot_versions, bots, match_players, matches};
-use super::bots::{Bot, CodeBundle};
+use super::bots::{Bot, BotVersion};
#[derive(Insertable)]
#[table_name = "matches"]
@@ -25,7 +25,7 @@ pub struct NewMatchPlayer {
/// player id within the match
pub player_id: i32,
/// id of the bot behind this player
- pub code_bundle_id: i32,
+ pub bot_version_id: Option<i32>,
}
#[derive(Queryable, Identifiable)]
@@ -44,11 +44,11 @@ pub struct MatchBase {
pub struct MatchPlayer {
pub match_id: i32,
pub player_id: i32,
- pub code_bundle_id: i32,
+ pub code_bundle_id: Option<i32>,
}
pub struct MatchPlayerData {
- pub code_bundle_id: i32,
+ pub code_bundle_id: Option<i32>,
}
pub fn create_match(
@@ -67,7 +67,7 @@ pub fn create_match(
.map(|(num, player_data)| NewMatchPlayer {
match_id: match_base.id,
player_id: num as i32,
- code_bundle_id: player_data.code_bundle_id,
+ bot_version_id: player_data.code_bundle_id,
})
.collect::<Vec<_>>();
@@ -92,8 +92,11 @@ pub fn list_matches(conn: &PgConnection) -> QueryResult<Vec<FullMatchData>> {
let matches = matches::table.get_results::<MatchBase>(conn)?;
let match_players = MatchPlayer::belonging_to(&matches)
- .inner_join(code_bundles::table)
- .left_join(bots::table.on(code_bundles::bot_id.eq(bots::id.nullable())))
+ .left_join(
+ bot_versions::table
+ .on(match_players::bot_version_id.eq(bot_versions::id.nullable())),
+ )
+ .left_join(bots::table.on(bot_versions::bot_id.eq(bots::id.nullable())))
.load::<FullMatchPlayerData>(conn)?
.grouped_by(&matches);
@@ -120,7 +123,7 @@ pub struct FullMatchData {
// #[primary_key(base.match_id, base::player_id)]
pub struct FullMatchPlayerData {
pub base: MatchPlayer,
- pub code_bundle: CodeBundle,
+ pub bot_version: Option<BotVersion>,
pub bot: Option<Bot>,
}
@@ -142,8 +145,11 @@ pub fn find_match(id: i32, conn: &PgConnection) -> QueryResult<FullMatchData> {
let match_base = matches::table.find(id).get_result::<MatchBase>(conn)?;
let match_players = MatchPlayer::belonging_to(&match_base)
- .inner_join(code_bundles::table)
- .left_join(bots::table.on(code_bundles::bot_id.eq(bots::id.nullable())))
+ .left_join(
+ bot_versions::table
+ .on(match_players::bot_version_id.eq(bot_versions::id.nullable())),
+ )
+ .left_join(bots::table.on(bot_versions::bot_id.eq(bots::id.nullable())))
.load::<FullMatchPlayerData>(conn)?;
let res = FullMatchData {
@@ -160,14 +166,17 @@ pub fn find_match_base(id: i32, conn: &PgConnection) -> QueryResult<MatchBase> {
}
pub enum MatchResult {
- Finished { winner: Option<i32> }
+ Finished { winner: Option<i32> },
}
pub fn save_match_result(id: i32, result: MatchResult, conn: &PgConnection) -> QueryResult<()> {
let MatchResult::Finished { winner } = result;
diesel::update(matches::table.find(id))
- .set((matches::winner.eq(winner), matches::state.eq(MatchState::Finished)))
+ .set((
+ matches::winner.eq(winner),
+ matches::state.eq(MatchState::Finished),
+ ))
.execute(conn)?;
Ok(())
-} \ No newline at end of file
+}
diff --git a/planetwars-server/src/lib.rs b/planetwars-server/src/lib.rs
index 28d7a76..7bc50f3 100644
--- a/planetwars-server/src/lib.rs
+++ b/planetwars-server/src/lib.rs
@@ -8,33 +8,63 @@ pub mod routes;
pub mod schema;
pub mod util;
-use std::net::SocketAddr;
use std::ops::Deref;
+use std::path::PathBuf;
+use std::sync::Arc;
+use std::{fs, net::SocketAddr};
use bb8::{Pool, PooledConnection};
use bb8_diesel::{self, DieselConnectionManager};
use config::ConfigError;
use diesel::{Connection, PgConnection};
use modules::ranking::run_ranker;
-use serde::Deserialize;
+use modules::registry::registry_service;
+use serde::{Deserialize, Serialize};
use axum::{
async_trait,
extract::{Extension, FromRequest, RequestParts},
http::StatusCode,
routing::{get, post},
- AddExtensionLayer, Router,
+ Router,
};
-// TODO: make these configurable
-const BOTS_DIR: &str = "./data/bots";
-const MATCHES_DIR: &str = "./data/matches";
-const MAPS_DIR: &str = "./data/maps";
-const SIMPLEBOT_PATH: &str = "../simplebot/simplebot.py";
-
type ConnectionPool = bb8::Pool<DieselConnectionManager<PgConnection>>;
-pub async fn seed_simplebot(pool: &ConnectionPool) {
+// this should probably be modularized a bit as the config grows
+#[derive(Serialize, Deserialize)]
+pub struct GlobalConfig {
+ /// url for the postgres database
+ pub database_url: String,
+
+ /// which image to use for running python bots
+ pub python_runner_image: String,
+
+ /// url for the internal container registry
+ /// this will be used when running bots
+ pub container_registry_url: String,
+
+ /// directory where bot code will be stored
+ pub bots_directory: String,
+ /// directory where match logs will be stored
+ pub match_logs_directory: String,
+ /// directory where map files will be stored
+ pub maps_directory: String,
+
+ /// base directory for registry data
+ pub registry_directory: String,
+ /// secret admin password for internal docker login
+ /// used to pull bots when running matches
+ pub registry_admin_password: String,
+
+ /// Whether to run the ranker
+ pub ranker_enabled: bool,
+}
+
+// TODO: do we still need this? Is there a better way?
+const SIMPLEBOT_PATH: &str = "../simplebot/simplebot.py";
+
+pub async fn seed_simplebot(config: &GlobalConfig, pool: &ConnectionPool) {
let conn = pool.get().await.expect("could not get database connection");
// This transaction is expected to fail when simplebot already exists.
let _res = conn.transaction::<(), diesel::result::Error, _>(|| {
@@ -50,7 +80,7 @@ pub async fn seed_simplebot(pool: &ConnectionPool) {
let simplebot_code =
std::fs::read_to_string(SIMPLEBOT_PATH).expect("could not read simplebot code");
- modules::bots::save_code_bundle(&simplebot_code, Some(simplebot.id), &conn)?;
+ modules::bots::save_code_string(&simplebot_code, Some(simplebot.id), &conn, config)?;
println!("initialized simplebot");
@@ -60,13 +90,26 @@ pub async fn seed_simplebot(pool: &ConnectionPool) {
pub type DbPool = Pool<DieselConnectionManager<PgConnection>>;
-pub async fn prepare_db(database_url: &str) -> DbPool {
- let manager = DieselConnectionManager::<PgConnection>::new(database_url);
+pub async fn prepare_db(config: &GlobalConfig) -> DbPool {
+ let manager = DieselConnectionManager::<PgConnection>::new(&config.database_url);
let pool = bb8::Pool::builder().build(manager).await.unwrap();
- seed_simplebot(&pool).await;
+ seed_simplebot(config, &pool).await;
pool
}
+// create all directories required for further operation
+fn init_directories(config: &GlobalConfig) -> std::io::Result<()> {
+ fs::create_dir_all(&config.bots_directory)?;
+ fs::create_dir_all(&config.maps_directory)?;
+ fs::create_dir_all(&config.match_logs_directory)?;
+
+ let registry_path = PathBuf::from(&config.registry_directory);
+ fs::create_dir_all(registry_path.join("sha256"))?;
+ fs::create_dir_all(registry_path.join("manifests"))?;
+ fs::create_dir_all(registry_path.join("uploads"))?;
+ Ok(())
+}
+
pub fn api() -> Router {
Router::new()
.route("/register", post(routes::users::register))
@@ -82,10 +125,7 @@ pub fn api() -> Router {
"/bots/:bot_id/upload",
post(routes::bots::upload_code_multipart),
)
- .route(
- "/matches",
- get(routes::matches::list_matches).post(routes::matches::play_match),
- )
+ .route("/matches", get(routes::matches::list_matches))
.route("/matches/:match_id", get(routes::matches::get_match_data))
.route(
"/matches/:match_id/log",
@@ -96,7 +136,7 @@ pub fn api() -> Router {
.route("/save_bot", post(routes::bots::save_bot))
}
-pub fn get_config() -> Result<Configuration, ConfigError> {
+pub fn get_config() -> Result<GlobalConfig, ConfigError> {
config::Config::builder()
.add_source(config::File::with_name("configuration.toml"))
.add_source(config::Environment::with_prefix("PLANETWARS"))
@@ -104,15 +144,35 @@ pub fn get_config() -> Result<Configuration, ConfigError> {
.try_deserialize()
}
+async fn run_registry(config: Arc<GlobalConfig>, db_pool: DbPool) {
+ // TODO: put in config
+ let addr = SocketAddr::from(([127, 0, 0, 1], 9001));
+
+ axum::Server::bind(&addr)
+ .serve(
+ registry_service()
+ .layer(Extension(db_pool))
+ .layer(Extension(config))
+ .into_make_service(),
+ )
+ .await
+ .unwrap();
+}
+
pub async fn run_app() {
- let configuration = get_config().unwrap();
- let db_pool = prepare_db(&configuration.database_url).await;
+ let global_config = Arc::new(get_config().unwrap());
+ let db_pool = prepare_db(&global_config).await;
+ init_directories(&global_config).unwrap();
- tokio::spawn(run_ranker(db_pool.clone()));
+ if global_config.ranker_enabled {
+ tokio::spawn(run_ranker(global_config.clone(), db_pool.clone()));
+ }
+ tokio::spawn(run_registry(global_config.clone(), db_pool.clone()));
let api_service = Router::new()
.nest("/api", api())
- .layer(AddExtensionLayer::new(db_pool))
+ .layer(Extension(db_pool))
+ .layer(Extension(global_config))
.into_make_service();
// TODO: put in config
@@ -121,11 +181,6 @@ pub async fn run_app() {
axum::Server::bind(&addr).serve(api_service).await.unwrap();
}
-#[derive(Deserialize)]
-pub struct Configuration {
- pub database_url: String,
-}
-
// we can also write a custom extractor that grabs a connection from the pool
// which setup is appropriate depends on your application
pub struct DatabaseConnection(PooledConnection<'static, DieselConnectionManager<PgConnection>>);
diff --git a/planetwars-server/src/modules/bot_api.rs b/planetwars-server/src/modules/bot_api.rs
new file mode 100644
index 0000000..33f5d87
--- /dev/null
+++ b/planetwars-server/src/modules/bot_api.rs
@@ -0,0 +1,283 @@
+pub mod pb {
+ tonic::include_proto!("grpc.planetwars.bot_api");
+}
+
+use std::collections::HashMap;
+use std::net::SocketAddr;
+use std::sync::{Arc, Mutex};
+use std::time::Duration;
+
+use runner::match_context::{EventBus, PlayerHandle, RequestError, RequestMessage};
+use runner::match_log::MatchLogger;
+use tokio::sync::{mpsc, oneshot};
+use tokio_stream::wrappers::UnboundedReceiverStream;
+use tonic;
+use tonic::transport::Server;
+use tonic::{Request, Response, Status, Streaming};
+
+use planetwars_matchrunner as runner;
+
+use crate::db;
+use crate::util::gen_alphanumeric;
+use crate::ConnectionPool;
+use crate::GlobalConfig;
+
+use super::matches::{MatchPlayer, RunMatch};
+
+pub struct BotApiServer {
+ conn_pool: ConnectionPool,
+ runner_config: Arc<GlobalConfig>,
+ router: PlayerRouter,
+}
+
+/// Routes players to their handler
+#[derive(Clone)]
+struct PlayerRouter {
+ routing_table: Arc<Mutex<HashMap<String, SyncThingData>>>,
+}
+
+impl PlayerRouter {
+ pub fn new() -> Self {
+ PlayerRouter {
+ routing_table: Arc::new(Mutex::new(HashMap::new())),
+ }
+ }
+}
+
+impl Default for PlayerRouter {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+// TODO: implement a way to expire entries
+impl PlayerRouter {
+ fn put(&self, player_key: String, entry: SyncThingData) {
+ let mut routing_table = self.routing_table.lock().unwrap();
+ routing_table.insert(player_key, entry);
+ }
+
+ fn take(&self, player_key: &str) -> Option<SyncThingData> {
+ // TODO: this design does not allow for reconnects. Is this desired?
+ let mut routing_table = self.routing_table.lock().unwrap();
+ routing_table.remove(player_key)
+ }
+}
+
+#[tonic::async_trait]
+impl pb::bot_api_service_server::BotApiService for BotApiServer {
+ type ConnectBotStream = UnboundedReceiverStream<Result<pb::PlayerRequest, Status>>;
+
+ async fn connect_bot(
+ &self,
+ req: Request<Streaming<pb::PlayerRequestResponse>>,
+ ) -> Result<Response<Self::ConnectBotStream>, Status> {
+ // TODO: clean up errors
+ let player_key = req
+ .metadata()
+ .get("player_key")
+ .ok_or_else(|| Status::unauthenticated("no player_key provided"))?;
+
+ let player_key_str = player_key
+ .to_str()
+ .map_err(|_| Status::invalid_argument("unreadable string"))?;
+
+ let sync_data = self
+ .router
+ .take(player_key_str)
+ .ok_or_else(|| Status::not_found("player_key not found"))?;
+
+ let stream = req.into_inner();
+
+ sync_data.tx.send(stream).unwrap();
+ Ok(Response::new(UnboundedReceiverStream::new(
+ sync_data.server_messages,
+ )))
+ }
+
+ async fn create_match(
+ &self,
+ req: Request<pb::MatchRequest>,
+ ) -> Result<Response<pb::CreatedMatch>, Status> {
+ // TODO: unify with matchrunner module
+ let conn = self.conn_pool.get().await.unwrap();
+
+ let match_request = req.get_ref();
+
+ let opponent_bot = db::bots::find_bot_by_name(&match_request.opponent_name, &conn)
+ .map_err(|_| Status::not_found("opponent not found"))?;
+ let opponent_bot_version = db::bots::active_bot_version(opponent_bot.id, &conn)
+ .map_err(|_| Status::not_found("no opponent version found"))?;
+
+ let player_key = gen_alphanumeric(32);
+
+ let remote_bot_spec = Box::new(RemoteBotSpec {
+ player_key: player_key.clone(),
+ router: self.router.clone(),
+ });
+ let run_match = RunMatch::from_players(
+ self.runner_config.clone(),
+ vec![
+ MatchPlayer::BotSpec {
+ spec: remote_bot_spec,
+ },
+ MatchPlayer::BotVersion {
+ bot: Some(opponent_bot),
+ version: opponent_bot_version,
+ },
+ ],
+ );
+ let (created_match, _) = run_match
+ .run(self.conn_pool.clone())
+ .await
+ .expect("failed to create match");
+
+ Ok(Response::new(pb::CreatedMatch {
+ match_id: created_match.base.id,
+ player_key,
+ }))
+ }
+}
+
+// TODO: please rename me
+struct SyncThingData {
+ tx: oneshot::Sender<Streaming<pb::PlayerRequestResponse>>,
+ server_messages: mpsc::UnboundedReceiver<Result<pb::PlayerRequest, Status>>,
+}
+
+struct RemoteBotSpec {
+ player_key: String,
+ router: PlayerRouter,
+}
+
+#[tonic::async_trait]
+impl runner::BotSpec for RemoteBotSpec {
+ async fn run_bot(
+ &self,
+ player_id: u32,
+ event_bus: Arc<Mutex<EventBus>>,
+ _match_logger: MatchLogger,
+ ) -> Box<dyn PlayerHandle> {
+ let (tx, rx) = oneshot::channel();
+ let (server_msg_snd, server_msg_recv) = mpsc::unbounded_channel();
+ self.router.put(
+ self.player_key.clone(),
+ SyncThingData {
+ tx,
+ server_messages: server_msg_recv,
+ },
+ );
+
+ let fut = tokio::time::timeout(Duration::from_secs(10), rx);
+ match fut.await {
+ Ok(Ok(client_messages)) => {
+ // let client_messages = rx.await.unwrap();
+ tokio::spawn(handle_bot_messages(
+ player_id,
+ event_bus.clone(),
+ client_messages,
+ ));
+ }
+ _ => {
+ // ensure router cleanup
+ self.router.take(&self.player_key);
+ }
+ };
+
+ // If the player did not connect, the receiving half of `sender`
+ // will be dropped here, resulting in a time-out for every turn.
+ // This is fine for now, but
+ // TODO: provide a formal mechanism for player startup failure
+ Box::new(RemoteBotHandle {
+ sender: server_msg_snd,
+ player_id,
+ event_bus,
+ })
+ }
+}
+
+async fn handle_bot_messages(
+ player_id: u32,
+ event_bus: Arc<Mutex<EventBus>>,
+ mut messages: Streaming<pb::PlayerRequestResponse>,
+) {
+ while let Some(message) = messages.message().await.unwrap() {
+ let request_id = (player_id, message.request_id as u32);
+ event_bus
+ .lock()
+ .unwrap()
+ .resolve_request(request_id, Ok(message.content));
+ }
+}
+
+struct RemoteBotHandle {
+ sender: mpsc::UnboundedSender<Result<pb::PlayerRequest, Status>>,
+ player_id: u32,
+ event_bus: Arc<Mutex<EventBus>>,
+}
+
+impl PlayerHandle for RemoteBotHandle {
+ fn send_request(&mut self, r: RequestMessage) {
+ let res = self.sender.send(Ok(pb::PlayerRequest {
+ request_id: r.request_id as i32,
+ content: r.content,
+ }));
+ match res {
+ Ok(()) => {
+ // schedule a timeout. See comments at method implementation
+ tokio::spawn(schedule_timeout(
+ (self.player_id, r.request_id),
+ r.timeout,
+ self.event_bus.clone(),
+ ));
+ }
+ Err(_send_error) => {
+ // cannot contact the remote bot anymore;
+ // directly mark all requests as timed out.
+ // TODO: create a dedicated error type for this.
+ // should it be logged?
+ println!("send error: {:?}", _send_error);
+ self.event_bus
+ .lock()
+ .unwrap()
+ .resolve_request((self.player_id, r.request_id), Err(RequestError::Timeout));
+ }
+ }
+ }
+}
+
+// TODO: this will spawn a task for every request, which might not be ideal.
+// Some alternatives:
+// - create a single task that manages all time-outs.
+// - intersperse timeouts with incoming client messages
+// - push timeouts upwards, into the matchrunner logic (before we hit the playerhandle).
+// This was initially not done to allow timer start to be delayed until the message actually arrived
+// with the player. Is this still needed, or is there a different way to do this?
+//
+async fn schedule_timeout(
+ request_id: (u32, u32),
+ duration: Duration,
+ event_bus: Arc<Mutex<EventBus>>,
+) {
+ tokio::time::sleep(duration).await;
+ event_bus
+ .lock()
+ .unwrap()
+ .resolve_request(request_id, Err(RequestError::Timeout));
+}
+
+pub async fn run_bot_api(runner_config: Arc<GlobalConfig>, pool: ConnectionPool) {
+ let router = PlayerRouter::new();
+ let server = BotApiServer {
+ router,
+ conn_pool: pool,
+ runner_config,
+ };
+
+ let addr = SocketAddr::from(([127, 0, 0, 1], 50051));
+ Server::builder()
+ .add_service(pb::bot_api_service_server::BotApiServiceServer::new(server))
+ .serve(addr)
+ .await
+ .unwrap()
+}
diff --git a/planetwars-server/src/modules/bots.rs b/planetwars-server/src/modules/bots.rs
index 843e48d..5513539 100644
--- a/planetwars-server/src/modules/bots.rs
+++ b/planetwars-server/src/modules/bots.rs
@@ -2,22 +2,25 @@ use std::path::PathBuf;
use diesel::{PgConnection, QueryResult};
-use crate::{db, util::gen_alphanumeric, BOTS_DIR};
+use crate::{db, util::gen_alphanumeric, GlobalConfig};
-pub fn save_code_bundle(
+/// Save a string containing bot code as a code bundle.
+pub fn save_code_string(
bot_code: &str,
bot_id: Option<i32>,
conn: &PgConnection,
-) -> QueryResult<db::bots::CodeBundle> {
+ config: &GlobalConfig,
+) -> QueryResult<db::bots::BotVersion> {
let bundle_name = gen_alphanumeric(16);
- let code_bundle_dir = PathBuf::from(BOTS_DIR).join(&bundle_name);
+ let code_bundle_dir = PathBuf::from(&config.bots_directory).join(&bundle_name);
std::fs::create_dir(&code_bundle_dir).unwrap();
std::fs::write(code_bundle_dir.join("bot.py"), bot_code).unwrap();
- let new_code_bundle = db::bots::NewCodeBundle {
+ let new_code_bundle = db::bots::NewBotVersion {
bot_id,
- path: &bundle_name,
+ code_bundle_path: Some(&bundle_name),
+ container_digest: None,
};
- db::bots::create_code_bundle(&new_code_bundle, conn)
+ db::bots::create_bot_version(&new_code_bundle, conn)
}
diff --git a/planetwars-server/src/modules/matches.rs b/planetwars-server/src/modules/matches.rs
index a254bac..a1fe63d 100644
--- a/planetwars-server/src/modules/matches.rs
+++ b/planetwars-server/src/modules/matches.rs
@@ -1,4 +1,4 @@
-use std::path::PathBuf;
+use std::{path::PathBuf, sync::Arc};
use diesel::{PgConnection, QueryResult};
use planetwars_matchrunner::{self as runner, docker_runner::DockerBotSpec, BotSpec, MatchConfig};
@@ -11,77 +11,126 @@ use crate::{
matches::{MatchData, MatchResult},
},
util::gen_alphanumeric,
- ConnectionPool, BOTS_DIR, MAPS_DIR, MATCHES_DIR,
+ ConnectionPool, GlobalConfig,
};
-const PYTHON_IMAGE: &str = "python:3.10-slim-buster";
-
-pub struct RunMatch<'a> {
+pub struct RunMatch {
log_file_name: String,
- player_code_bundles: Vec<&'a db::bots::CodeBundle>,
- match_id: Option<i32>,
+ players: Vec<MatchPlayer>,
+ config: Arc<GlobalConfig>,
+}
+
+pub enum MatchPlayer {
+ BotVersion {
+ bot: Option<db::bots::Bot>,
+ version: db::bots::BotVersion,
+ },
+ BotSpec {
+ spec: Box<dyn BotSpec>,
+ },
}
-impl<'a> RunMatch<'a> {
- pub fn from_players(player_code_bundles: Vec<&'a db::bots::CodeBundle>) -> Self {
+impl RunMatch {
+ pub fn from_players(config: Arc<GlobalConfig>, players: Vec<MatchPlayer>) -> Self {
let log_file_name = format!("{}.log", gen_alphanumeric(16));
RunMatch {
+ config,
log_file_name,
- player_code_bundles,
- match_id: None,
+ players,
}
}
- pub fn runner_config(&self) -> runner::MatchConfig {
+ fn into_runner_config(self) -> runner::MatchConfig {
runner::MatchConfig {
- map_path: PathBuf::from(MAPS_DIR).join("hex.json"),
+ map_path: PathBuf::from(&self.config.maps_directory).join("hex.json"),
map_name: "hex".to_string(),
- log_path: PathBuf::from(MATCHES_DIR).join(&self.log_file_name),
+ log_path: PathBuf::from(&self.config.match_logs_directory).join(&self.log_file_name),
players: self
- .player_code_bundles
- .iter()
- .map(|b| runner::MatchPlayer {
- bot_spec: code_bundle_to_botspec(b),
+ .players
+ .into_iter()
+ .map(|player| runner::MatchPlayer {
+ bot_spec: match player {
+ MatchPlayer::BotVersion { bot, version } => {
+ bot_version_to_botspec(&self.config, bot.as_ref(), &version)
+ }
+ MatchPlayer::BotSpec { spec } => spec,
+ },
})
.collect(),
}
}
- pub fn store_in_database(&mut self, db_conn: &PgConnection) -> QueryResult<MatchData> {
- // don't store the same match twice
- assert!(self.match_id.is_none());
+ pub async fn run(
+ self,
+ conn_pool: ConnectionPool,
+ ) -> QueryResult<(MatchData, JoinHandle<MatchOutcome>)> {
+ let match_data = {
+ // TODO: it would be nice to get an already-open connection here when possible.
+ // Maybe we need an additional abstraction, bundling a connection and connection pool?
+ let db_conn = conn_pool.get().await.expect("could not get a connection");
+ self.store_in_database(&db_conn)?
+ };
+
+ let runner_config = self.into_runner_config();
+ let handle = tokio::spawn(run_match_task(conn_pool, runner_config, match_data.base.id));
+ Ok((match_data, handle))
+ }
+
+ fn store_in_database(&self, db_conn: &PgConnection) -> QueryResult<MatchData> {
let new_match_data = db::matches::NewMatch {
state: db::matches::MatchState::Playing,
log_path: &self.log_file_name,
};
let new_match_players = self
- .player_code_bundles
+ .players
.iter()
- .map(|b| db::matches::MatchPlayerData {
- code_bundle_id: b.id,
+ .map(|p| db::matches::MatchPlayerData {
+ code_bundle_id: match p {
+ MatchPlayer::BotVersion { version, .. } => Some(version.id),
+ MatchPlayer::BotSpec { .. } => None,
+ },
})
.collect::<Vec<_>>();
- let match_data = db::matches::create_match(&new_match_data, &new_match_players, &db_conn)?;
- self.match_id = Some(match_data.base.id);
- Ok(match_data)
+ db::matches::create_match(&new_match_data, &new_match_players, db_conn)
}
+}
- pub fn spawn(self, pool: ConnectionPool) -> JoinHandle<MatchOutcome> {
- let match_id = self.match_id.expect("match must be saved before running");
- let runner_config = self.runner_config();
- tokio::spawn(run_match_task(pool, runner_config, match_id))
+pub fn bot_version_to_botspec(
+ runner_config: &GlobalConfig,
+ bot: Option<&db::bots::Bot>,
+ bot_version: &db::bots::BotVersion,
+) -> Box<dyn BotSpec> {
+ if let Some(code_bundle_path) = &bot_version.code_bundle_path {
+ python_docker_bot_spec(runner_config, code_bundle_path)
+ } else if let (Some(container_digest), Some(bot)) = (&bot_version.container_digest, bot) {
+ Box::new(DockerBotSpec {
+ image: format!(
+ "{}/{}@{}",
+ runner_config.container_registry_url, bot.name, container_digest
+ ),
+ binds: None,
+ argv: None,
+ working_dir: None,
+ })
+ } else {
+ // TODO: ideally this would not be possible
+ panic!("bad bot version")
}
}
-pub fn code_bundle_to_botspec(code_bundle: &db::bots::CodeBundle) -> Box<dyn BotSpec> {
- let bundle_path = PathBuf::from(BOTS_DIR).join(&code_bundle.path);
+fn python_docker_bot_spec(config: &GlobalConfig, code_bundle_path: &str) -> Box<dyn BotSpec> {
+ let code_bundle_rel_path = PathBuf::from(&config.bots_directory).join(code_bundle_path);
+ let code_bundle_abs_path = std::fs::canonicalize(&code_bundle_rel_path).unwrap();
+ let code_bundle_path_str = code_bundle_abs_path.as_os_str().to_str().unwrap();
+ // TODO: it would be good to simplify this configuration
Box::new(DockerBotSpec {
- code_path: bundle_path,
- image: PYTHON_IMAGE.to_string(),
- argv: vec!["python".to_string(), "bot.py".to_string()],
+ image: config.python_runner_image.clone(),
+ binds: Some(vec![format!("{}:{}", code_bundle_path_str, "/workdir")]),
+ argv: Some(vec!["python".to_string(), "bot.py".to_string()]),
+ working_dir: Some("/workdir".to_string()),
})
}
@@ -104,5 +153,5 @@ async fn run_match_task(
db::matches::save_match_result(match_id, result, &conn).expect("could not save match result");
- return outcome;
+ outcome
}
diff --git a/planetwars-server/src/modules/mod.rs b/planetwars-server/src/modules/mod.rs
index bea28e0..1200f9d 100644
--- a/planetwars-server/src/modules/mod.rs
+++ b/planetwars-server/src/modules/mod.rs
@@ -1,5 +1,7 @@
// This module implements general domain logic, not directly
// tied to the database or API layers.
+pub mod bot_api;
pub mod bots;
pub mod matches;
pub mod ranking;
+pub mod registry;
diff --git a/planetwars-server/src/modules/ranking.rs b/planetwars-server/src/modules/ranking.rs
index 5d496d7..a9f6419 100644
--- a/planetwars-server/src/modules/ranking.rs
+++ b/planetwars-server/src/modules/ranking.rs
@@ -1,17 +1,18 @@
-use crate::{db::bots::Bot, DbPool};
+use crate::{db::bots::Bot, DbPool, GlobalConfig};
use crate::db;
-use crate::modules::matches::RunMatch;
+use crate::modules::matches::{MatchPlayer, RunMatch};
use diesel::{PgConnection, QueryResult};
use rand::seq::SliceRandom;
use std::collections::HashMap;
use std::mem;
+use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio;
const RANKER_INTERVAL: u64 = 60;
-pub async fn run_ranker(db_pool: DbPool) {
+pub async fn run_ranker(config: Arc<GlobalConfig>, db_pool: DbPool) {
// TODO: make this configurable
// play at most one match every n seconds
let mut interval = tokio::time::interval(Duration::from_secs(RANKER_INTERVAL));
@@ -30,30 +31,30 @@ pub async fn run_ranker(db_pool: DbPool) {
let mut rng = &mut rand::thread_rng();
bots.choose_multiple(&mut rng, 2).cloned().collect()
};
- play_ranking_match(selected_bots, db_pool.clone()).await;
+ play_ranking_match(config.clone(), selected_bots, db_pool.clone()).await;
recalculate_ratings(&db_conn).expect("could not recalculate ratings");
}
}
-async fn play_ranking_match(selected_bots: Vec<Bot>, db_pool: DbPool) {
+async fn play_ranking_match(config: Arc<GlobalConfig>, selected_bots: Vec<Bot>, db_pool: DbPool) {
let db_conn = db_pool.get().await.expect("could not get db pool");
- let mut code_bundles = Vec::new();
+ let mut players = Vec::new();
for bot in &selected_bots {
- let code_bundle = db::bots::active_code_bundle(bot.id, &db_conn)
- .expect("could not get active code bundle");
- code_bundles.push(code_bundle);
+ let version = db::bots::active_bot_version(bot.id, &db_conn)
+ .expect("could not get active bot version");
+ let player = MatchPlayer::BotVersion {
+ bot: Some(bot.clone()),
+ version,
+ };
+ players.push(player);
}
- let code_bundle_refs = code_bundles.iter().collect::<Vec<_>>();
-
- let mut run_match = RunMatch::from_players(code_bundle_refs);
- run_match
- .store_in_database(&db_conn)
- .expect("could not store match in db");
- run_match
- .spawn(db_pool.clone())
+ let (_, handle) = RunMatch::from_players(config, players)
+ .run(db_pool.clone())
.await
- .expect("running match failed");
+ .expect("failed to run match");
+ // wait for match to complete, so that only one ranking match can be running
+ let _outcome = handle.await;
}
fn recalculate_ratings(db_conn: &PgConnection) -> QueryResult<()> {
diff --git a/planetwars-server/src/modules/registry.rs b/planetwars-server/src/modules/registry.rs
new file mode 100644
index 0000000..3f6dad2
--- /dev/null
+++ b/planetwars-server/src/modules/registry.rs
@@ -0,0 +1,440 @@
+// TODO: this module is functional, but it needs a good refactor for proper error handling.
+
+use axum::body::{Body, StreamBody};
+use axum::extract::{BodyStream, FromRequest, Path, Query, RequestParts, TypedHeader};
+use axum::headers::authorization::Basic;
+use axum::headers::Authorization;
+use axum::response::{IntoResponse, Response};
+use axum::routing::{get, head, post, put};
+use axum::{async_trait, Extension, Router};
+use futures::StreamExt;
+use hyper::StatusCode;
+use serde::Serialize;
+use sha2::{Digest, Sha256};
+use std::path::PathBuf;
+use std::sync::Arc;
+use tokio::io::AsyncWriteExt;
+use tokio_util::io::ReaderStream;
+
+use crate::db::bots::NewBotVersion;
+use crate::util::gen_alphanumeric;
+use crate::{db, DatabaseConnection, GlobalConfig};
+
+use crate::db::users::{authenticate_user, Credentials, User};
+
+pub fn registry_service() -> Router {
+ Router::new()
+ // The docker API requires this trailing slash
+ .nest("/v2/", registry_api_v2())
+}
+
+fn registry_api_v2() -> Router {
+ Router::new()
+ .route("/", get(get_root))
+ .route(
+ "/:name/manifests/:reference",
+ get(get_manifest).put(put_manifest),
+ )
+ .route(
+ "/:name/blobs/:digest",
+ head(check_blob_exists).get(get_blob),
+ )
+ .route("/:name/blobs/uploads/", post(create_upload))
+ .route(
+ "/:name/blobs/uploads/:uuid",
+ put(put_upload).patch(patch_upload),
+ )
+}
+
+const ADMIN_USERNAME: &str = "admin";
+
+type AuthorizationHeader = TypedHeader<Authorization<Basic>>;
+
+enum RegistryAuth {
+ User(User),
+ Admin,
+}
+
+enum RegistryAuthError {
+ NoAuthHeader,
+ InvalidCredentials,
+}
+
+impl IntoResponse for RegistryAuthError {
+ fn into_response(self) -> Response {
+ // TODO: create enum for registry errors
+ let err = RegistryErrors {
+ errors: vec![RegistryError {
+ code: "UNAUTHORIZED".to_string(),
+ message: "please log in".to_string(),
+ detail: serde_json::Value::Null,
+ }],
+ };
+
+ (
+ StatusCode::UNAUTHORIZED,
+ [
+ ("Docker-Distribution-API-Version", "registry/2.0"),
+ ("WWW-Authenticate", "Basic"),
+ ],
+ serde_json::to_string(&err).unwrap(),
+ )
+ .into_response()
+ }
+}
+
+#[async_trait]
+impl<B> FromRequest<B> for RegistryAuth
+where
+ B: Send,
+{
+ type Rejection = RegistryAuthError;
+
+ async fn from_request(req: &mut RequestParts<B>) -> Result<Self, Self::Rejection> {
+ let TypedHeader(Authorization(basic)) = AuthorizationHeader::from_request(req)
+ .await
+ .map_err(|_| RegistryAuthError::NoAuthHeader)?;
+
+ // TODO: Into<Credentials> would be nice
+ let credentials = Credentials {
+ username: basic.username(),
+ password: basic.password(),
+ };
+
+ let Extension(config) = Extension::<Arc<GlobalConfig>>::from_request(req)
+ .await
+ .unwrap();
+
+ if credentials.username == ADMIN_USERNAME {
+ if credentials.password == config.registry_admin_password {
+ Ok(RegistryAuth::Admin)
+ } else {
+ Err(RegistryAuthError::InvalidCredentials)
+ }
+ } else {
+ let db_conn = DatabaseConnection::from_request(req).await.unwrap();
+ let user = authenticate_user(&credentials, &db_conn)
+ .ok_or(RegistryAuthError::InvalidCredentials)?;
+
+ Ok(RegistryAuth::User(user))
+ }
+ }
+}
+
+// Since async file io just calls spawn_blocking internally, it does not really make sense
+// to make this an async function
+fn file_sha256_digest(path: &std::path::Path) -> std::io::Result<String> {
+ let mut file = std::fs::File::open(path)?;
+ let mut hasher = Sha256::new();
+ let _n = std::io::copy(&mut file, &mut hasher)?;
+ Ok(format!("{:x}", hasher.finalize()))
+}
+
+/// Get the index of the last byte in a file
+async fn last_byte_pos(file: &tokio::fs::File) -> std::io::Result<u64> {
+ let n_bytes = file.metadata().await?.len();
+ let pos = if n_bytes == 0 { 0 } else { n_bytes - 1 };
+ Ok(pos)
+}
+
+async fn get_root(_auth: RegistryAuth) -> impl IntoResponse {
+ // root should return 200 OK to confirm api compliance
+ Response::builder()
+ .status(StatusCode::OK)
+ .header("Docker-Distribution-API-Version", "registry/2.0")
+ .body(Body::empty())
+ .unwrap()
+}
+
+#[derive(Serialize)]
+pub struct RegistryErrors {
+ errors: Vec<RegistryError>,
+}
+
+#[derive(Serialize)]
+pub struct RegistryError {
+ code: String,
+ message: String,
+ detail: serde_json::Value,
+}
+
+async fn check_blob_exists(
+ db_conn: DatabaseConnection,
+ auth: RegistryAuth,
+ Path((repository_name, raw_digest)): Path<(String, String)>,
+ Extension(config): Extension<Arc<GlobalConfig>>,
+) -> Result<impl IntoResponse, StatusCode> {
+ check_access(&repository_name, &auth, &db_conn)?;
+
+ let digest = raw_digest.strip_prefix("sha256:").unwrap();
+ let blob_path = PathBuf::from(&config.registry_directory)
+ .join("sha256")
+ .join(&digest);
+ if blob_path.exists() {
+ let metadata = std::fs::metadata(&blob_path).unwrap();
+ Ok((StatusCode::OK, [("Content-Length", metadata.len())]))
+ } else {
+ Err(StatusCode::NOT_FOUND)
+ }
+}
+
+async fn get_blob(
+ db_conn: DatabaseConnection,
+ auth: RegistryAuth,
+ Path((repository_name, raw_digest)): Path<(String, String)>,
+ Extension(config): Extension<Arc<GlobalConfig>>,
+) -> Result<impl IntoResponse, StatusCode> {
+ check_access(&repository_name, &auth, &db_conn)?;
+
+ let digest = raw_digest.strip_prefix("sha256:").unwrap();
+ let blob_path = PathBuf::from(&config.registry_directory)
+ .join("sha256")
+ .join(&digest);
+ if !blob_path.exists() {
+ return Err(StatusCode::NOT_FOUND);
+ }
+ let file = tokio::fs::File::open(&blob_path).await.unwrap();
+ let reader_stream = ReaderStream::new(file);
+ let stream_body = StreamBody::new(reader_stream);
+ Ok(stream_body)
+}
+
+async fn create_upload(
+ db_conn: DatabaseConnection,
+ auth: RegistryAuth,
+ Path(repository_name): Path<String>,
+ Extension(config): Extension<Arc<GlobalConfig>>,
+) -> Result<impl IntoResponse, StatusCode> {
+ check_access(&repository_name, &auth, &db_conn)?;
+
+ let uuid = gen_alphanumeric(16);
+ tokio::fs::File::create(
+ PathBuf::from(&config.registry_directory)
+ .join("uploads")
+ .join(&uuid),
+ )
+ .await
+ .unwrap();
+
+ Ok(Response::builder()
+ .status(StatusCode::ACCEPTED)
+ .header(
+ "Location",
+ format!("/v2/{}/blobs/uploads/{}", repository_name, uuid),
+ )
+ .header("Docker-Upload-UUID", uuid)
+ .header("Range", "bytes=0-0")
+ .body(Body::empty())
+ .unwrap())
+}
+
+async fn patch_upload(
+ db_conn: DatabaseConnection,
+ auth: RegistryAuth,
+ Path((repository_name, uuid)): Path<(String, String)>,
+ mut stream: BodyStream,
+ Extension(config): Extension<Arc<GlobalConfig>>,
+) -> Result<impl IntoResponse, StatusCode> {
+ check_access(&repository_name, &auth, &db_conn)?;
+
+ // TODO: support content range header in request
+ let upload_path = PathBuf::from(&config.registry_directory)
+ .join("uploads")
+ .join(&uuid);
+ let mut file = tokio::fs::OpenOptions::new()
+ .read(false)
+ .write(true)
+ .append(true)
+ .create(false)
+ .open(upload_path)
+ .await
+ .unwrap();
+ while let Some(Ok(chunk)) = stream.next().await {
+ file.write_all(&chunk).await.unwrap();
+ }
+
+ let last_byte = last_byte_pos(&file).await.unwrap();
+
+ Ok(Response::builder()
+ .status(StatusCode::ACCEPTED)
+ .header(
+ "Location",
+ format!("/v2/{}/blobs/uploads/{}", repository_name, uuid),
+ )
+ .header("Docker-Upload-UUID", uuid)
+ // range indicating current progress of the upload
+ .header("Range", format!("0-{}", last_byte))
+ .body(Body::empty())
+ .unwrap())
+}
+
+use serde::Deserialize;
+#[derive(Deserialize)]
+struct UploadParams {
+ digest: String,
+}
+
+async fn put_upload(
+ db_conn: DatabaseConnection,
+ auth: RegistryAuth,
+ Path((repository_name, uuid)): Path<(String, String)>,
+ Query(params): Query<UploadParams>,
+ mut stream: BodyStream,
+ Extension(config): Extension<Arc<GlobalConfig>>,
+) -> Result<impl IntoResponse, StatusCode> {
+ check_access(&repository_name, &auth, &db_conn)?;
+
+ let upload_path = PathBuf::from(&config.registry_directory)
+ .join("uploads")
+ .join(&uuid);
+ let mut file = tokio::fs::OpenOptions::new()
+ .read(false)
+ .write(true)
+ .append(true)
+ .create(false)
+ .open(&upload_path)
+ .await
+ .unwrap();
+
+ let range_begin = last_byte_pos(&file).await.unwrap();
+ while let Some(Ok(chunk)) = stream.next().await {
+ file.write_all(&chunk).await.unwrap();
+ }
+ file.flush().await.unwrap();
+ let range_end = last_byte_pos(&file).await.unwrap();
+
+ let expected_digest = params.digest.strip_prefix("sha256:").unwrap();
+ let digest = file_sha256_digest(&upload_path).unwrap();
+ if digest != expected_digest {
+ // TODO: return a docker error body
+ return Err(StatusCode::BAD_REQUEST);
+ }
+
+ let target_path = PathBuf::from(&config.registry_directory)
+ .join("sha256")
+ .join(&digest);
+ tokio::fs::rename(&upload_path, &target_path).await.unwrap();
+
+ Ok(Response::builder()
+ .status(StatusCode::CREATED)
+ .header(
+ "Location",
+ format!("/v2/{}/blobs/{}", repository_name, digest),
+ )
+ .header("Docker-Upload-UUID", uuid)
+ // content range for bytes that were in the body of this request
+ .header("Content-Range", format!("{}-{}", range_begin, range_end))
+ .header("Docker-Content-Digest", params.digest)
+ .body(Body::empty())
+ .unwrap())
+}
+
+async fn get_manifest(
+ db_conn: DatabaseConnection,
+ auth: RegistryAuth,
+ Path((repository_name, reference)): Path<(String, String)>,
+ Extension(config): Extension<Arc<GlobalConfig>>,
+) -> Result<impl IntoResponse, StatusCode> {
+ check_access(&repository_name, &auth, &db_conn)?;
+
+ let manifest_path = PathBuf::from(&config.registry_directory)
+ .join("manifests")
+ .join(&repository_name)
+ .join(&reference)
+ .with_extension("json");
+ let data = tokio::fs::read(&manifest_path).await.unwrap();
+
+ let manifest: serde_json::Map<String, serde_json::Value> =
+ serde_json::from_slice(&data).unwrap();
+ let media_type = manifest.get("mediaType").unwrap().as_str().unwrap();
+ Ok(Response::builder()
+ .status(StatusCode::OK)
+ .header("Content-Type", media_type)
+ .body(axum::body::Full::from(data))
+ .unwrap())
+}
+
+async fn put_manifest(
+ db_conn: DatabaseConnection,
+ auth: RegistryAuth,
+ Path((repository_name, reference)): Path<(String, String)>,
+ mut stream: BodyStream,
+ Extension(config): Extension<Arc<GlobalConfig>>,
+) -> Result<impl IntoResponse, StatusCode> {
+ let bot = check_access(&repository_name, &auth, &db_conn)?;
+
+ let repository_dir = PathBuf::from(&config.registry_directory)
+ .join("manifests")
+ .join(&repository_name);
+
+ tokio::fs::create_dir_all(&repository_dir).await.unwrap();
+
+ let mut hasher = Sha256::new();
+ let manifest_path = repository_dir.join(&reference).with_extension("json");
+ {
+ let mut file = tokio::fs::OpenOptions::new()
+ .write(true)
+ .create(true)
+ .truncate(true)
+ .open(&manifest_path)
+ .await
+ .unwrap();
+ while let Some(Ok(chunk)) = stream.next().await {
+ hasher.update(&chunk);
+ file.write_all(&chunk).await.unwrap();
+ }
+ }
+ let digest = hasher.finalize();
+ // TODO: store content-adressable manifests separately
+ let content_digest = format!("sha256:{:x}", digest);
+ let digest_path = repository_dir.join(&content_digest).with_extension("json");
+ tokio::fs::copy(manifest_path, digest_path).await.unwrap();
+
+ // Register the new image as a bot version
+ // TODO: how should tags be handled?
+ let new_version = NewBotVersion {
+ bot_id: Some(bot.id),
+ code_bundle_path: None,
+ container_digest: Some(&content_digest),
+ };
+ db::bots::create_bot_version(&new_version, &db_conn).expect("could not save bot version");
+
+ Ok(Response::builder()
+ .status(StatusCode::CREATED)
+ .header(
+ "Location",
+ format!("/v2/{}/manifests/{}", repository_name, reference),
+ )
+ .header("Docker-Content-Digest", content_digest)
+ .body(Body::empty())
+ .unwrap())
+}
+
+/// Ensure that the accessed repository exists
+/// and the user is allowed to access it.
+/// Returns the associated bot.
+fn check_access(
+ repository_name: &str,
+ auth: &RegistryAuth,
+ db_conn: &DatabaseConnection,
+) -> Result<db::bots::Bot, StatusCode> {
+ use diesel::OptionalExtension;
+
+ // TODO: it would be nice to provide the found repository
+ // to the route handlers
+ let bot = db::bots::find_bot_by_name(repository_name, db_conn)
+ .optional()
+ .expect("could not run query")
+ .ok_or(StatusCode::NOT_FOUND)?;
+
+ match &auth {
+ RegistryAuth::Admin => Ok(bot),
+ RegistryAuth::User(user) => {
+ if bot.owner_id == Some(user.id) {
+ Ok(bot)
+ } else {
+ Err(StatusCode::FORBIDDEN)
+ }
+ }
+ }
+}
diff --git a/planetwars-server/src/routes/bots.rs b/planetwars-server/src/routes/bots.rs
index 3bbaa1a..9ddb109 100644
--- a/planetwars-server/src/routes/bots.rs
+++ b/planetwars-server/src/routes/bots.rs
@@ -1,7 +1,7 @@
use axum::extract::{Multipart, Path};
use axum::http::StatusCode;
use axum::response::{IntoResponse, Response};
-use axum::{body, Json};
+use axum::{body, Extension, Json};
use diesel::OptionalExtension;
use rand::distributions::Alphanumeric;
use rand::Rng;
@@ -9,13 +9,14 @@ use serde::{Deserialize, Serialize};
use serde_json::{self, json, value::Value as JsonValue};
use std::io::Cursor;
use std::path::PathBuf;
+use std::sync::Arc;
use thiserror;
-use crate::db::bots::{self, CodeBundle};
-use crate::db::ratings::{RankedBot, self};
+use crate::db::bots::{self, BotVersion};
+use crate::db::ratings::{self, RankedBot};
use crate::db::users::User;
-use crate::modules::bots::save_code_bundle;
-use crate::{DatabaseConnection, BOTS_DIR};
+use crate::modules::bots::save_code_string;
+use crate::{DatabaseConnection, GlobalConfig};
use bots::Bot;
#[derive(Serialize, Deserialize, Debug)]
@@ -96,6 +97,7 @@ pub async fn save_bot(
Json(params): Json<SaveBotParams>,
user: User,
conn: DatabaseConnection,
+ Extension(config): Extension<Arc<GlobalConfig>>,
) -> Result<Json<Bot>, SaveBotError> {
let res = bots::find_bot_by_name(&params.bot_name, &conn)
.optional()
@@ -119,8 +121,8 @@ pub async fn save_bot(
bots::create_bot(&new_bot, &conn).expect("could not create bot")
}
};
- let _code_bundle =
- save_code_bundle(&params.code, Some(bot.id), &conn).expect("failed to save code bundle");
+ let _code_bundle = save_code_string(&params.code, Some(bot.id), &conn, &config)
+ .expect("failed to save code bundle");
Ok(Json(bot))
}
@@ -148,8 +150,8 @@ pub async fn get_bot(
Path(bot_id): Path<i32>,
) -> Result<Json<JsonValue>, StatusCode> {
let bot = bots::find_bot(bot_id, &conn).map_err(|_| StatusCode::NOT_FOUND)?;
- let bundles = bots::find_bot_code_bundles(bot.id, &conn)
- .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
+ let bundles =
+ bots::find_bot_versions(bot.id, &conn).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
Ok(Json(json!({
"bot": bot,
"bundles": bundles,
@@ -183,8 +185,9 @@ pub async fn upload_code_multipart(
user: User,
Path(bot_id): Path<i32>,
mut multipart: Multipart,
-) -> Result<Json<CodeBundle>, StatusCode> {
- let bots_dir = PathBuf::from(BOTS_DIR);
+ Extension(config): Extension<Arc<GlobalConfig>>,
+) -> Result<Json<BotVersion>, StatusCode> {
+ let bots_dir = PathBuf::from(&config.bots_directory);
let bot = bots::find_bot(bot_id, &conn).map_err(|_| StatusCode::NOT_FOUND)?;
@@ -213,12 +216,13 @@ pub async fn upload_code_multipart(
.extract(bots_dir.join(&folder_name))
.map_err(|_| StatusCode::BAD_REQUEST)?;
- let bundle = bots::NewCodeBundle {
+ let bot_version = bots::NewBotVersion {
bot_id: Some(bot.id),
- path: &folder_name,
+ code_bundle_path: Some(&folder_name),
+ container_digest: None,
};
let code_bundle =
- bots::create_code_bundle(&bundle, &conn).expect("Failed to create code bundle");
+ bots::create_bot_version(&bot_version, &conn).expect("Failed to create code bundle");
Ok(Json(code_bundle))
}
diff --git a/planetwars-server/src/routes/demo.rs b/planetwars-server/src/routes/demo.rs
index 7f7ba71..69838f3 100644
--- a/planetwars-server/src/routes/demo.rs
+++ b/planetwars-server/src/routes/demo.rs
@@ -1,8 +1,11 @@
+use std::sync::Arc;
+
use crate::db;
use crate::db::matches::{FullMatchData, FullMatchPlayerData};
-use crate::modules::bots::save_code_bundle;
-use crate::modules::matches::RunMatch;
+use crate::modules::bots::save_code_string;
+use crate::modules::matches::{MatchPlayer, RunMatch};
use crate::ConnectionPool;
+use crate::GlobalConfig;
use axum::extract::Extension;
use axum::Json;
use hyper::StatusCode;
@@ -30,6 +33,7 @@ pub struct SubmitBotResponse {
pub async fn submit_bot(
Json(params): Json<SubmitBotParams>,
Extension(pool): Extension<ConnectionPool>,
+ Extension(config): Extension<Arc<GlobalConfig>>,
) -> Result<Json<SubmitBotResponse>, StatusCode> {
let conn = pool.get().await.expect("could not get database connection");
@@ -37,20 +41,32 @@ pub async fn submit_bot(
.opponent_name
.unwrap_or_else(|| DEFAULT_OPPONENT_NAME.to_string());
- let opponent =
+ let opponent_bot =
db::bots::find_bot_by_name(&opponent_name, &conn).map_err(|_| StatusCode::BAD_REQUEST)?;
- let opponent_code_bundle =
- db::bots::active_code_bundle(opponent.id, &conn).map_err(|_| StatusCode::BAD_REQUEST)?;
+ let opponent_bot_version = db::bots::active_bot_version(opponent_bot.id, &conn)
+ .map_err(|_| StatusCode::BAD_REQUEST)?;
- let player_code_bundle = save_code_bundle(&params.code, None, &conn)
+ let player_bot_version = save_code_string(&params.code, None, &conn, &config)
// TODO: can we recover from this?
.expect("could not save bot code");
- let mut run_match = RunMatch::from_players(vec![&player_code_bundle, &opponent_code_bundle]);
- let match_data = run_match
- .store_in_database(&conn)
- .expect("failed to save match");
- run_match.spawn(pool.clone());
+ let run_match = RunMatch::from_players(
+ config,
+ vec![
+ MatchPlayer::BotVersion {
+ bot: None,
+ version: player_bot_version.clone(),
+ },
+ MatchPlayer::BotVersion {
+ bot: Some(opponent_bot.clone()),
+ version: opponent_bot_version.clone(),
+ },
+ ],
+ );
+ let (match_data, _) = run_match
+ .run(pool.clone())
+ .await
+ .expect("failed to run match");
// TODO: avoid clones
let full_match_data = FullMatchData {
@@ -58,13 +74,13 @@ pub async fn submit_bot(
match_players: vec![
FullMatchPlayerData {
base: match_data.match_players[0].clone(),
- code_bundle: player_code_bundle,
+ bot_version: Some(player_bot_version),
bot: None,
},
FullMatchPlayerData {
base: match_data.match_players[1].clone(),
- code_bundle: opponent_code_bundle,
- bot: Some(opponent),
+ bot_version: Some(opponent_bot_version),
+ bot: Some(opponent_bot),
},
],
};
diff --git a/planetwars-server/src/routes/matches.rs b/planetwars-server/src/routes/matches.rs
index b61008d..a980daa 100644
--- a/planetwars-server/src/routes/matches.rs
+++ b/planetwars-server/src/routes/matches.rs
@@ -1,102 +1,13 @@
-use std::path::PathBuf;
-
-use axum::{
- extract::{Extension, Path},
- Json,
-};
+use axum::{extract::Path, Extension, Json};
use hyper::StatusCode;
-use planetwars_matchrunner::{docker_runner::DockerBotSpec, run_match, MatchConfig, MatchPlayer};
-use rand::{distributions::Alphanumeric, Rng};
use serde::{Deserialize, Serialize};
+use std::{path::PathBuf, sync::Arc};
use crate::{
- db::{
- bots,
- matches::{self, MatchState},
- users::User,
- },
- ConnectionPool, DatabaseConnection, BOTS_DIR, MAPS_DIR, MATCHES_DIR,
+ db::matches::{self, MatchState},
+ DatabaseConnection, GlobalConfig,
};
-#[derive(Serialize, Deserialize, Debug)]
-pub struct MatchParams {
- // Just bot ids for now
- players: Vec<i32>,
-}
-
-pub async fn play_match(
- _user: User,
- Extension(pool): Extension<ConnectionPool>,
- Json(params): Json<MatchParams>,
-) -> Result<(), StatusCode> {
- let conn = pool.get().await.expect("could not get database connection");
- let map_path = PathBuf::from(MAPS_DIR).join("hex.json");
-
- let slug: String = rand::thread_rng()
- .sample_iter(&Alphanumeric)
- .take(16)
- .map(char::from)
- .collect();
- let log_file_name = format!("{}.log", slug);
-
- let mut players = Vec::new();
- let mut bot_ids = Vec::new();
- for bot_name in params.players {
- let bot = bots::find_bot(bot_name, &conn).map_err(|_| StatusCode::BAD_REQUEST)?;
- let code_bundle =
- bots::active_code_bundle(bot.id, &conn).map_err(|_| StatusCode::BAD_REQUEST)?;
-
- let bundle_path = PathBuf::from(BOTS_DIR).join(&code_bundle.path);
- let bot_config: BotConfig = std::fs::read_to_string(bundle_path.join("botconfig.toml"))
- .and_then(|config_str| toml::from_str(&config_str).map_err(|e| e.into()))
- .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
-
- players.push(MatchPlayer {
- bot_spec: Box::new(DockerBotSpec {
- code_path: PathBuf::from(BOTS_DIR).join(code_bundle.path),
- image: "python:3.10-slim-buster".to_string(),
- argv: shlex::split(&bot_config.run_command)
- .ok_or(StatusCode::INTERNAL_SERVER_ERROR)?,
- }),
- });
-
- bot_ids.push(matches::MatchPlayerData {
- code_bundle_id: code_bundle.id,
- });
- }
-
- let match_config = MatchConfig {
- map_name: "hex".to_string(),
- map_path,
- log_path: PathBuf::from(MATCHES_DIR).join(&log_file_name),
- players,
- };
-
- tokio::spawn(run_match_task(
- match_config,
- log_file_name,
- bot_ids,
- pool.clone(),
- ));
- Ok(())
-}
-
-async fn run_match_task(
- config: MatchConfig,
- log_file_name: String,
- match_players: Vec<matches::MatchPlayerData>,
- pool: ConnectionPool,
-) {
- let match_data = matches::NewMatch {
- state: MatchState::Finished,
- log_path: &log_file_name,
- };
-
- run_match(config).await;
- let conn = pool.get().await.expect("could not get database connection");
- matches::create_match(&match_data, &match_players, &conn).expect("could not create match");
-}
-
#[derive(Serialize, Deserialize)]
pub struct ApiMatch {
id: i32,
@@ -107,7 +18,7 @@ pub struct ApiMatch {
#[derive(Serialize, Deserialize)]
pub struct ApiMatchPlayer {
- code_bundle_id: i32,
+ bot_version_id: Option<i32>,
bot_id: Option<i32>,
bot_name: Option<String>,
}
@@ -127,7 +38,7 @@ pub fn match_data_to_api(data: matches::FullMatchData) -> ApiMatch {
.match_players
.iter()
.map(|_p| ApiMatchPlayer {
- code_bundle_id: _p.code_bundle.id,
+ bot_version_id: _p.bot_version.as_ref().map(|cb| cb.id),
bot_id: _p.bot.as_ref().map(|b| b.id),
bot_name: _p.bot.as_ref().map(|b| b.name.clone()),
})
@@ -135,15 +46,6 @@ pub fn match_data_to_api(data: matches::FullMatchData) -> ApiMatch {
}
}
-// TODO: this is duplicated from planetwars-cli
-// clean this up and move to matchrunner crate
-#[derive(Serialize, Deserialize)]
-pub struct BotConfig {
- pub name: String,
- pub run_command: String,
- pub build_command: Option<String>,
-}
-
pub async fn get_match_data(
Path(match_id): Path<i32>,
conn: DatabaseConnection,
@@ -157,10 +59,11 @@ pub async fn get_match_data(
pub async fn get_match_log(
Path(match_id): Path<i32>,
conn: DatabaseConnection,
+ Extension(config): Extension<Arc<GlobalConfig>>,
) -> Result<Vec<u8>, StatusCode> {
let match_base =
matches::find_match_base(match_id, &conn).map_err(|_| StatusCode::NOT_FOUND)?;
- let log_path = PathBuf::from(MATCHES_DIR).join(&match_base.log_path);
+ let log_path = PathBuf::from(&config.match_logs_directory).join(&match_base.log_path);
let log_contents = std::fs::read(log_path).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
Ok(log_contents)
}
diff --git a/planetwars-server/src/routes/users.rs b/planetwars-server/src/routes/users.rs
index 54ddd09..1989904 100644
--- a/planetwars-server/src/routes/users.rs
+++ b/planetwars-server/src/routes/users.rs
@@ -5,7 +5,7 @@ use axum::extract::{FromRequest, RequestParts, TypedHeader};
use axum::headers::authorization::Bearer;
use axum::headers::Authorization;
use axum::http::StatusCode;
-use axum::response::{Headers, IntoResponse, Response};
+use axum::response::{IntoResponse, Response};
use axum::{async_trait, Json};
use serde::{Deserialize, Serialize};
use serde_json::json;
@@ -163,9 +163,9 @@ pub async fn login(conn: DatabaseConnection, params: Json<LoginParams>) -> Respo
Some(user) => {
let session = sessions::create_session(&user, &conn);
let user_data: UserData = user.into();
- let headers = Headers(vec![("Token", &session.token)]);
+ let headers = [("Token", &session.token)];
- (headers, Json(user_data)).into_response()
+ (StatusCode::OK, headers, Json(user_data)).into_response()
}
}
}
diff --git a/planetwars-server/src/schema.rs b/planetwars-server/src/schema.rs
index be3e858..0606ac4 100644
--- a/planetwars-server/src/schema.rs
+++ b/planetwars-server/src/schema.rs
@@ -5,10 +5,12 @@ table! {
use diesel::sql_types::*;
use crate::db_types::*;
- bots (id) {
+ bot_versions (id) {
id -> Int4,
- owner_id -> Nullable<Int4>,
- name -> Text,
+ bot_id -> Nullable<Int4>,
+ code_bundle_path -> Nullable<Text>,
+ created_at -> Timestamp,
+ container_digest -> Nullable<Text>,
}
}
@@ -16,11 +18,10 @@ table! {
use diesel::sql_types::*;
use crate::db_types::*;
- code_bundles (id) {
+ bots (id) {
id -> Int4,
- bot_id -> Nullable<Int4>,
- path -> Text,
- created_at -> Timestamp,
+ owner_id -> Nullable<Int4>,
+ name -> Text,
}
}
@@ -31,7 +32,7 @@ table! {
match_players (match_id, player_id) {
match_id -> Int4,
player_id -> Int4,
- code_bundle_id -> Int4,
+ bot_version_id -> Nullable<Int4>,
}
}
@@ -81,16 +82,16 @@ table! {
}
}
+joinable!(bot_versions -> bots (bot_id));
joinable!(bots -> users (owner_id));
-joinable!(code_bundles -> bots (bot_id));
-joinable!(match_players -> code_bundles (code_bundle_id));
+joinable!(match_players -> bot_versions (bot_version_id));
joinable!(match_players -> matches (match_id));
joinable!(ratings -> bots (bot_id));
joinable!(sessions -> users (user_id));
allow_tables_to_appear_in_same_query!(
+ bot_versions,
bots,
- code_bundles,
match_players,
matches,
ratings,
diff --git a/proto/bot_api.proto b/proto/bot_api.proto
new file mode 100644
index 0000000..08839f0
--- /dev/null
+++ b/proto/bot_api.proto
@@ -0,0 +1,36 @@
+syntax = "proto3";
+
+package grpc.planetwars.bot_api;
+
+message Hello {
+ string hello_message = 1;
+}
+
+message HelloResponse {
+ string response = 1;
+}
+
+message PlayerRequest {
+ int32 request_id = 1;
+ bytes content = 2;
+}
+
+message PlayerRequestResponse {
+ int32 request_id = 1;
+ bytes content = 2;
+}
+
+message MatchRequest {
+ string opponent_name = 1;
+}
+
+message CreatedMatch {
+ int32 match_id = 1;
+ string player_key = 2;
+}
+
+service BotApiService {
+ rpc CreateMatch(MatchRequest) returns (CreatedMatch);
+ // server sends requests to the player, player responds
+ rpc ConnectBot(stream PlayerRequestResponse) returns (stream PlayerRequest);
+}
diff --git a/web/pw-visualizer/assets/res/earth.png b/web/pw-visualizer/assets/res/earth.png
new file mode 100644
index 0000000..7897e32
--- /dev/null
+++ b/web/pw-visualizer/assets/res/earth.png
Binary files differ
diff --git a/web/pw-visualizer/assets/res/ship.png b/web/pw-visualizer/assets/res/ship.png
new file mode 100644
index 0000000..fe289aa
--- /dev/null
+++ b/web/pw-visualizer/assets/res/ship.png
Binary files differ
diff --git a/web/pw-visualizer/assets/res/ship.svg b/web/pw-visualizer/assets/res/ship.svg
index ee202ff..41af3ea 100644
--- a/web/pw-visualizer/assets/res/ship.svg
+++ b/web/pw-visualizer/assets/res/ship.svg
@@ -2,20 +2,20 @@
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
- xmlns:dc="http://purl.org/dc/elements/1.1/"
- xmlns:cc="http://creativecommons.org/ns#"
- xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
- xmlns:svg="http://www.w3.org/2000/svg"
- xmlns="http://www.w3.org/2000/svg"
- xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
- xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
- width="100mm"
- height="100mm"
- viewBox="0 0 100 99.999999"
+ width="10.231839cm"
+ height="19.597593cm"
+ viewBox="0 0 102.31839 195.97593"
version="1.1"
id="svg8"
sodipodi:docname="ship.svg"
- inkscape:version="0.92.4 (f8dce91, 2019-08-02)">
+ inkscape:version="1.1.2 (0a00cf5339, 2022-02-04)"
+ xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
+ xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
+ xmlns="http://www.w3.org/2000/svg"
+ xmlns:svg="http://www.w3.org/2000/svg"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:cc="http://creativecommons.org/ns#"
+ xmlns:dc="http://purl.org/dc/elements/1.1/">
<defs
id="defs2" />
<sodipodi:namedview
@@ -26,8 +26,8 @@
inkscape:pageopacity="0.0"
inkscape:pageshadow="2"
inkscape:zoom="1.0993438"
- inkscape:cx="676.08563"
- inkscape:cy="474.10966"
+ inkscape:cx="424.34405"
+ inkscape:cy="384.32017"
inkscape:document-units="mm"
inkscape:current-layer="layer1"
showgrid="true"
@@ -35,15 +35,19 @@
fit-margin-left="0"
fit-margin-right="0"
fit-margin-bottom="0"
- inkscape:window-width="2560"
- inkscape:window-height="1417"
+ inkscape:window-width="1920"
+ inkscape:window-height="1048"
inkscape:window-x="0"
- inkscape:window-y="0"
- inkscape:window-maximized="0"
- gridtolerance="10">
+ inkscape:window-y="32"
+ inkscape:window-maximized="1"
+ gridtolerance="10"
+ inkscape:pagecheckerboard="0"
+ units="cm">
<inkscape:grid
type="xygrid"
- id="grid894" />
+ id="grid894"
+ originx="-160.50747"
+ originy="118.75037" />
</sodipodi:namedview>
<metadata
id="metadata5">
@@ -53,7 +57,6 @@
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
- <dc:title></dc:title>
</cc:Work>
</rdf:RDF>
</metadata>
@@ -61,17 +64,17 @@
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1"
- transform="translate(229.05372,-117.27915)">
+ transform="translate(68.546255,1.4712222)">
<ellipse
ry="79.47506"
rx="48.672089"
cy="39.779182"
cx="439.0813"
id="ellipse888"
- style="opacity:1;fill:#00ffff;fill-opacity:1;stroke:none;stroke-width:6.61458302;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" />
+ style="opacity:1;fill:#00ffff;fill-opacity:1;stroke:none;stroke-width:6.61458;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" />
<path
- style="opacity:1;fill:#000000;fill-opacity:1;stroke:none;stroke-width:24.99999809;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
- d="M 800 -448.82031 C 800 -448.82031 640 -342.04689 640 -92.046875 C 640 -16.101728 661.67774 51.924976 695.88672 97.775391 C 640.96482 152.90966 593.39426 234.74166 610 267.95312 C 620.00003 287.95314 720.00001 297.95311 730 287.95312 C 751.8315 266.12161 757.39662 198.03742 758.92773 149.62305 C 772.03579 155.04778 785.80002 157.95312 800 157.95312 C 814.19998 157.95312 827.96422 155.04778 841.07227 149.62305 C 842.60338 198.03742 848.1685 266.12161 870 287.95312 C 879.99999 297.95311 979.99997 287.95314 990 267.95312 C 1006.6057 234.74166 959.03518 152.90966 904.11328 97.775391 C 938.32226 51.924976 959.99999 -16.101728 960 -92.046875 C 960.00002 -342.04689 800 -448.82031 800 -448.82031 z M 800 -352.04688 C 800 -352.04688 908.96486 -279.33252 908.96484 -109.07617 C 908.96484 -15.046189 860.17918 61.179688 800 61.179688 C 739.82082 61.179688 691.03515 -15.046189 691.03516 -109.07617 C 691.03516 -279.33252 800 -352.04687 800 -352.04688 z "
+ style="opacity:1;fill:#000000;fill-opacity:1;stroke:none;stroke-width:25;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 800,-448.82031 c 0,0 -160,106.77342 -160,356.773435 0,75.945147 21.67774,143.971851 55.88672,189.822266 C 640.96482,152.90966 593.39426,234.74166 610,267.95312 c 10.00003,20.00002 110.00001,29.99999 120,20 21.8315,-21.83151 27.39662,-89.9157 28.92773,-138.33007 13.10806,5.42473 26.87229,8.33007 41.07227,8.33007 14.19998,0 27.96422,-2.90534 41.07227,-8.33007 1.53111,48.41437 7.09623,116.49856 28.92773,138.33007 9.99999,9.99999 109.99997,2e-5 120,-20 C 1006.6057,234.74166 959.03518,152.90966 904.11328,97.775391 938.32226,51.924976 959.99999,-16.101728 960,-92.046875 960.00002,-342.04689 800,-448.82031 800,-448.82031 Z m 0,96.77343 c 0,0 108.96486,72.71436 108.96484,242.97071 0,94.029981 -48.78566,170.255858 -108.96484,170.255858 -60.17918,0 -108.96485,-76.225877 -108.96484,-170.255858 C 691.03516,-279.33252 800,-352.04687 800,-352.04688 Z"
transform="matrix(0.26458333,0,0,0.26458333,-229.05372,117.27915)"
id="path4600" />
</g>
diff --git a/web/pw-visualizer/assets/shaders/frag/image.glsl b/web/pw-visualizer/assets/shaders/frag/image.glsl
index 69c8b91..f8d62c9 100644
--- a/web/pw-visualizer/assets/shaders/frag/image.glsl
+++ b/web/pw-visualizer/assets/shaders/frag/image.glsl
@@ -10,5 +10,4 @@ uniform sampler2D u_texture;
void main() {
gl_FragColor = texture2D(u_texture, v_texCoord);
-// gl_FragColor = vec4(0.7, 0.7, 0.0, 1.0);
}
diff --git a/web/pw-visualizer/assets/shaders/frag/masked_image.glsl b/web/pw-visualizer/assets/shaders/frag/masked_image.glsl
new file mode 100644
index 0000000..da0c787
--- /dev/null
+++ b/web/pw-visualizer/assets/shaders/frag/masked_image.glsl
@@ -0,0 +1,21 @@
+#ifdef GL_ES
+precision mediump float;
+#endif
+
+// Passed in from the vertex shader.
+varying vec2 v_texCoord;
+
+uniform float u_step_interval;
+uniform float u_time;
+uniform vec3 u_color;
+uniform vec3 u_color_next;
+
+
+// The texture.
+uniform sampler2D u_texture;
+
+void main() {
+ float alpha = texture2D(u_texture, v_texCoord).a;
+ vec3 color = mix(u_color, u_color_next, u_time);
+ gl_FragColor = vec4(color, alpha);
+}
diff --git a/web/pw-visualizer/src/assets.ts b/web/pw-visualizer/src/assets.ts
index e04f2c1..5fa5215 100644
--- a/web/pw-visualizer/src/assets.ts
+++ b/web/pw-visualizer/src/assets.ts
@@ -4,9 +4,14 @@ export {default as earthSvg} from "../assets/res/earth.svg";
export {default as marsSvg} from "../assets/res/mars.svg";
export {default as venusSvg} from "../assets/res/venus.svg";
+export {default as earthPng} from "../assets/res/earth.png";
+export {default as shipPng} from "../assets/res/ship.png";
+
export {default as fontPng} from "../assets/res/font.png";
export {default as imageFragmentShader} from "../assets/shaders/frag/image.glsl?url";
+export {default as maskedImageFragmentShader} from "../assets/shaders/frag/masked_image.glsl?url";
+
export {default as simpleFragmentShader} from "../assets/shaders/frag/simple.glsl?url";
export {default as vorFragmentShader} from "../assets/shaders/frag/vor.glsl?url";
diff --git a/web/pw-visualizer/src/index.ts b/web/pw-visualizer/src/index.ts
index cd58aa7..bee1bab 100644
--- a/web/pw-visualizer/src/index.ts
+++ b/web/pw-visualizer/src/index.ts
@@ -1,6 +1,4 @@
import { Game } from "planetwars-rs";
-// import { memory } from "planetwars-rs/planetwars_rs_bg";
-// const memory = planetwars_bg.memory;
import type { Dictionary } from './webgl/util';
import type { BBox } from "./voronoi/voronoi-core";
@@ -8,8 +6,6 @@ import {
Resizer,
resizeCanvasToDisplaySize,
FPSCounter,
- url_to_mesh,
- Mesh,
} from "./webgl/util";
import {
Shader,
@@ -22,12 +18,13 @@ import {
UniformMatrix3fv,
UniformBool,
} from "./webgl/shader";
-import { Renderer } from "./webgl/renderer";
+import { DefaultRenderable, Renderer } from "./webgl/renderer";
import { VertexBuffer, IndexBuffer } from "./webgl/buffer";
import { VertexBufferLayout, VertexArray } from "./webgl/vertexBufferLayout";
import { defaultLabelFactory, LabelFactory, Align, Label } from "./webgl/text";
import { VoronoiBuilder } from "./voronoi/voronoi";
import * as assets from "./assets";
+import { Texture } from "./webgl/texture";
function to_bbox(box: number[]): BBox {
@@ -39,14 +36,6 @@ function to_bbox(box: number[]): BBox {
};
}
-// function f32v(ptr: number, size: number): Float32Array {
-// return new Float32Array(memory.buffer, ptr, size);
-// }
-
-// function i32v(ptr: number, size: number): Int32Array {
-// return new Int32Array(memory.buffer, ptr, size);
-// }
-
export function set_game_name(name: string) {
ELEMENTS["name"].innerHTML = name;
}
@@ -133,6 +122,7 @@ export class GameInstance {
shader: Shader;
vor_shader: Shader;
image_shader: Shader;
+ masked_image_shader: Shader;
text_factory: LabelFactory;
planet_labels: Label[];
@@ -140,6 +130,7 @@ export class GameInstance {
ship_ibo: IndexBuffer;
ship_vao: VertexArray;
+ ship_texture: Texture;
// TODO: find a better way
max_num_ships: number;
@@ -159,8 +150,9 @@ export class GameInstance {
constructor(
game: Game,
- meshes: Mesh[],
- ship_mesh: Mesh,
+ planets_textures: Texture[],
+ ship_texture: Texture,
+ font_texture: Texture,
shaders: Dictionary<ShaderFactory>
) {
this.game = game;
@@ -174,11 +166,14 @@ export class GameInstance {
this.vor_shader = shaders["vor"].create_shader(GL, {
PLANETS: "" + planets.length,
});
+ this.masked_image_shader = shaders["masked_image"].create_shader(GL);
- this.text_factory = defaultLabelFactory(GL, this.image_shader);
+ this.text_factory = defaultLabelFactory(GL, font_texture, this.image_shader);
this.planet_labels = [];
this.ship_labels = [];
+ this.ship_texture = ship_texture
+
this.resizer = new Resizer(CANVAS, [...game.get_viewbox()], true);
this.renderer = new Renderer();
this.game.update_turn(0);
@@ -188,15 +183,8 @@ export class GameInstance {
// List of [(x, y, r)] for all planets
this._create_voronoi(planets);
- this._create_planets(planets, meshes);
-
- // create_shipes
- this.ship_ibo = new IndexBuffer(GL, ship_mesh.cells);
- const ship_positions = new VertexBuffer(GL, ship_mesh.positions);
- const ship_layout = new VertexBufferLayout();
- ship_layout.push(GL.FLOAT, 3, 4, "a_position");
- this.ship_vao = new VertexArray();
- this.ship_vao.addBuffer(ship_positions, ship_layout);
+ this._create_planets(planets, planets_textures);
+
this.max_num_ships = 0;
// Set slider correctly
@@ -233,46 +221,52 @@ export class GameInstance {
this.renderer.addRenderable(this.vor_builder.getRenderable(), LAYERS.vor);
}
- _create_planets(planets: Float32Array, meshes: Mesh[]) {
+ _create_planets(planets: Float32Array, planets_textures: Texture[]) {
for (let i = 0; i < this.planet_count; i++) {
{
const transform = new UniformMatrix3fv([
- 1,
- 0,
- 0,
- 0,
- 1,
- 0,
- -planets[i * 3],
- -planets[i * 3 + 1],
- 1,
+ 1, 0, 0,
+ 0, 1, 0,
+ -planets[i * 3], -planets[i * 3 + 1], 1, // TODO: why are negations needed?
]);
- const indexBuffer = new IndexBuffer(
- GL,
- meshes[i % meshes.length].cells
- );
- const positionBuffer = new VertexBuffer(
- GL,
- meshes[i % meshes.length].positions
- );
-
- const layout = new VertexBufferLayout();
- layout.push(GL.FLOAT, 3, 4, "a_position");
+ const gl = GL;
+ const ib = new IndexBuffer(gl, [
+ 0, 1, 2,
+ 1, 2, 3
+ ]);
+ const vb_pos = new VertexBuffer(gl, [
+ -1, 1,
+ 1, 1,
+ -1, -1,
+ 1, -1
+ ]);
+ const vb_tex = new VertexBuffer(gl, [
+ 0, 0,
+ 1, 0,
+ 0, 1,
+ 1, 1]);
+
+ const layout_pos = new VertexBufferLayout();
+ // 2?
+ layout_pos.push(gl.FLOAT, 2, 4, "a_position");
+
+ const layout_tex = new VertexBufferLayout();
+ layout_tex.push(gl.FLOAT, 2, 4, "a_texCoord");
+
const vao = new VertexArray();
- vao.addBuffer(positionBuffer, layout);
-
- this.renderer.addToDraw(
- indexBuffer,
- vao,
- this.shader,
- {
- u_trans: transform,
- u_trans_next: transform,
- },
- [],
- LAYERS.planet
- );
+ vao.addBuffer(vb_pos, layout_pos);
+ vao.addBuffer(vb_tex, layout_tex);
+
+ const uniforms = {
+ u_trans: transform,
+ u_trans_next: transform,
+ };
+
+ const renderable = new DefaultRenderable(ib, vao, this.masked_image_shader, [planets_textures[0]], uniforms);
+
+ this.renderer.addRenderable(renderable, LAYERS.planet);
+
}
{
@@ -350,16 +344,39 @@ export class GameInstance {
const ship_colours = this.game.get_ship_colours();
for (let i = this.max_num_ships; i < ship_counts.length; i++) {
- this.renderer.addToDraw(
- this.ship_ibo,
- this.ship_vao,
- this.shader,
- {},
- [],
- LAYERS.ship
- );
+ const gl = GL;
+ const ib = new IndexBuffer(gl, [
+ 0, 1, 2,
+ 1, 2, 3
+ ]);
+ const ratio = this.ship_texture.getWidth() / this.ship_texture.getHeight();
+ const vb_pos = new VertexBuffer(gl, [
+ -ratio, 1,
+ ratio, 1,
+ -ratio, -1,
+ ratio, -1
+ ]);
+ const vb_tex = new VertexBuffer(gl, [
+ 0, 0,
+ 1, 0,
+ 0, 1,
+ 1, 1,
+ ]);
+
+ const layout_pos = new VertexBufferLayout();
+ layout_pos.push(gl.FLOAT, 2, 4, "a_position");
+
+ const layout_tex = new VertexBufferLayout();
+ layout_tex.push(gl.FLOAT, 2, 4, "a_texCoord");
+
+ const vao = new VertexArray();
+ vao.addBuffer(vb_pos, layout_pos);
+ vao.addBuffer(vb_tex, layout_tex);
+ const renderable = new DefaultRenderable(ib, vao, this.masked_image_shader, [this.ship_texture], {});
+ this.renderer.addRenderable(renderable, LAYERS.ship);
const label = this.text_factory.build(GL);
+
this.ship_labels.push(label);
this.renderer.addRenderable(label.getRenderable(), LAYERS.ship_label);
}
@@ -430,25 +447,30 @@ export class GameInstance {
this.use_vor = false;
}
+ const shaders_to_update = [
+ this.shader,
+ this.image_shader,
+ this.masked_image_shader,
+ ];
+
+
// If not playing, still reder with different viewbox, so people can still pan etc.
if (!this.playing) {
this.last_time = time;
- this.shader.uniform(
- GL,
- "u_viewbox",
- new Uniform4f(this.resizer.get_viewbox())
- );
+ shaders_to_update.forEach((shader) => {
+ shader.uniform(
+ GL,
+ "u_viewbox",
+ new Uniform4f(this.resizer.get_viewbox())
+ );
+ })
+
this.vor_shader.uniform(
GL,
"u_viewbox",
new Uniform4f(this.resizer.get_viewbox())
);
- this.image_shader.uniform(
- GL,
- "u_viewbox",
- new Uniform4f(this.resizer.get_viewbox())
- );
this.renderer.render(GL);
return;
@@ -481,39 +503,24 @@ export class GameInstance {
this.vor_shader.uniform(GL, "u_resolution", new Uniform2f(RESOLUTION));
this.vor_shader.uniform(GL, "u_vor", new UniformBool(this.use_vor));
- this.shader.uniform(
- GL,
- "u_time",
- new Uniform1f((time - this.last_time) / ms_per_frame)
- );
- this.shader.uniform(
- GL,
- "u_mouse",
- new Uniform2f(this.resizer.get_mouse_pos())
- );
- this.shader.uniform(
- GL,
- "u_viewbox",
- new Uniform4f(this.resizer.get_viewbox())
- );
- this.shader.uniform(GL, "u_resolution", new Uniform2f(RESOLUTION));
-
- this.image_shader.uniform(
- GL,
- "u_time",
- new Uniform1f((time - this.last_time) / ms_per_frame)
- );
- this.image_shader.uniform(
- GL,
- "u_mouse",
- new Uniform2f(this.resizer.get_mouse_pos())
- );
- this.image_shader.uniform(
- GL,
- "u_viewbox",
- new Uniform4f(this.resizer.get_viewbox())
- );
- this.image_shader.uniform(GL, "u_resolution", new Uniform2f(RESOLUTION));
+ shaders_to_update.forEach((shader) => {
+ shader.uniform(
+ GL,
+ "u_time",
+ new Uniform1f((time - this.last_time) / ms_per_frame)
+ );
+ shader.uniform(
+ GL,
+ "u_mouse",
+ new Uniform2f(this.resizer.get_mouse_pos())
+ );
+ shader.uniform(
+ GL,
+ "u_viewbox",
+ new Uniform4f(this.resizer.get_viewbox())
+ );
+ shader.uniform(GL, "u_resolution", new Uniform2f(RESOLUTION));
+ });
// Render
this.renderer.render(GL);
@@ -578,18 +585,17 @@ export class GameInstance {
}
var game_instance: GameInstance;
-var meshes: Mesh[];
+var textures: Texture[];
var shaders: Dictionary<ShaderFactory>;
export async function set_instance(source: string): Promise<GameInstance> {
// TODO: embed shader programs
- if (!meshes || !shaders) {
- const mesh_promises = [
- assets.shipSvg,
- assets.earthSvg,
- assets.marsSvg,
- assets.venusSvg,
- ].map(url_to_mesh);
+ if (!textures || !shaders) {
+ const texture_promises = [
+ Texture.fromImage(GL, assets.fontPng, "font"),
+ Texture.fromImage(GL, assets.shipPng, "ship"),
+ Texture.fromImage(GL, assets.earthPng, "earth")
+ ];
const shader_promies = [
(async () =>
@@ -616,10 +622,19 @@ export async function set_instance(source: string): Promise<GameInstance> {
assets.simpleVertexShader,
),
])(),
+ (async () =>
+ <[string, ShaderFactory]>[
+ "masked_image",
+ await ShaderFactory.create_factory(
+ assets.maskedImageFragmentShader,
+ assets.simpleVertexShader,
+ ),
+ ])(),
+
];
let shaders_array: [string, ShaderFactory][];
- [meshes, shaders_array] = await Promise.all([
- Promise.all(mesh_promises),
+ [textures, shaders_array] = await Promise.all([
+ Promise.all(texture_promises),
Promise.all(shader_promies),
]);
@@ -631,8 +646,9 @@ export async function set_instance(source: string): Promise<GameInstance> {
game_instance = new GameInstance(
Game.new(source),
- meshes.slice(1),
- meshes[0],
+ textures.slice(2),
+ textures[1],
+ textures[0],
shaders
);
diff --git a/web/pw-visualizer/src/webgl/index.ts b/web/pw-visualizer/src/webgl/index.ts
index 1742713..8d785ef 100644
--- a/web/pw-visualizer/src/webgl/index.ts
+++ b/web/pw-visualizer/src/webgl/index.ts
@@ -57,8 +57,8 @@ async function main() {
return;
}
+ // TODO: do we still need this?
const mesh = await url_to_mesh("static/res/images/earth.svg");
- console.log(Math.max(...mesh.positions), Math.min(...mesh.positions));
const renderer = new Renderer();
const factory = await ShaderFactory.create_factory(assets.simpleFragmentShader, assets.simpleVertexShader);
diff --git a/web/pw-visualizer/src/webgl/text.ts b/web/pw-visualizer/src/webgl/text.ts
index fdfbc55..1ae6f37 100644
--- a/web/pw-visualizer/src/webgl/text.ts
+++ b/web/pw-visualizer/src/webgl/text.ts
@@ -33,8 +33,8 @@ export class LabelFactory {
font: FontInfo;
shader: Shader;
- constructor(gl: WebGLRenderingContext, loc: string, font: FontInfo, shader: Shader) {
- this.texture = Texture.fromImage(gl, loc, 'font');
+ constructor(gl: WebGLRenderingContext, fontTexture: Texture, font: FontInfo, shader: Shader) {
+ this.texture = fontTexture;
this.font = font;
this.shader = shader;
}
@@ -79,7 +79,6 @@ export class Label {
const verts_pos = [];
const verts_tex = [];
- const letterHeight = this.font.letterHeight / this.font.textureHeight;
let xPos = 0;
switch (h_align) {
@@ -108,10 +107,17 @@ export class Label {
for (let i = 0; i < text.length; i++) {
const info = this.font.glyphInfos[text[i]];
if (info) {
+
const dx = info.width / this.font.letterHeight;
- const letterWidth = info.width / this.font.textureWidth;
- const x0 = info.x / this.font.textureWidth;
- const y0 = info.y / this.font.textureHeight;
+
+ // apply half-pixel correction to prevent texture bleeding
+ // we should address the center of each texel, not the border
+ // https://gamedev.stackexchange.com/questions/46963/how-to-avoid-texture-bleeding-in-a-texture-atlas
+ const x0 = (info.x + 0.5) / this.font.textureWidth;
+ const y0 = (info.y + 0.5) / this.font.textureHeight;
+ const letterWidth = (info.width - 1) / this.font.textureWidth;
+ const letterHeight = (this.font.letterHeight - 1) / this.font.textureHeight;
+
verts_pos.push(xPos, yStart);
verts_pos.push(xPos + dx, yStart);
verts_pos.push(xPos, yStart-1);
@@ -138,7 +144,7 @@ export class Label {
}
}
-export function defaultLabelFactory(gl: WebGLRenderingContext, shader: Shader): LabelFactory {
+export function defaultLabelFactory(gl: WebGLRenderingContext, fontTexture: Texture, shader: Shader): LabelFactory {
const fontInfo = {
letterHeight: 8,
spaceWidth: 8,
@@ -189,5 +195,5 @@ export function defaultLabelFactory(gl: WebGLRenderingContext, shader: Shader):
},
};
- return new LabelFactory(gl, fontPng, fontInfo, shader);
+ return new LabelFactory(gl, fontTexture, fontInfo, shader);
}
diff --git a/web/pw-visualizer/src/webgl/texture.ts b/web/pw-visualizer/src/webgl/texture.ts
index 9d6adcf..faafe76 100644
--- a/web/pw-visualizer/src/webgl/texture.ts
+++ b/web/pw-visualizer/src/webgl/texture.ts
@@ -11,15 +11,18 @@ export class Texture {
gl: WebGLRenderingContext,
path: string,
name: string,
- ): Texture {
- const out = new Texture(gl, name);
-
- const image = new Image();
- image.onload = out.setImage.bind(out, gl, image);
- image.onerror = error;
- image.src = path;
-
- return out;
+ ): Promise<Texture> {
+ return new Promise((resolve, reject) => {
+ const out = new Texture(gl, name);
+
+ const image = new Image();
+ image.onload = () => {
+ out.setImage(gl, image);
+ resolve(out);
+ }
+ image.onerror = reject;
+ image.src = path;
+ })
}
static fromRenderer(
@@ -99,8 +102,3 @@ export class Texture {
return this.height;
}
}
-
-function error(e: any) {
- console.error("IMAGE LOAD ERROR");
- console.error(e);
-}