Compare commits

...

182 commits

Author SHA1 Message Date
4a76fc88fd
fix(lfs): re-add assets
Some checks failed
Build Zenyx ⚡ / 🔧 Setup Environment (push) Successful in 13s
Build Zenyx ⚡ / 🧪 Run Cargo Tests (push) Failing after 53s
Build Zenyx ⚡ / 🏗️ Build aarch64-unknown-linux-gnu (push) Has been skipped
Build Zenyx ⚡ / 🏗️ Build x86_64-unknown-linux-gnu (push) Has been skipped
Build Zenyx ⚡ / 🏗️ Build x86_64-pc-windows-msvc (push) Has been skipped
2025-04-21 18:46:37 -04:00
f0ccea2a95
fix(lfs): move assets to Git LFS 2025-04-21 18:44:31 -04:00
d1625af8eb feat(ci): windows build support 2025-04-21 17:52:56 -04:00
a8fb89c4a9 Merge branch 'cicd' (#9)
Closes #6

Reviewed-on: #9
2025-04-21 23:24:47 +02:00
09ad151b5e fix(ci): caching and vulkan builds, add tests 2025-04-21 17:08:22 -04:00
22a61ea824 Merge branch 'sparse-sets' (#5)
Closes #2

Reviewed-on: #5
2025-04-21 00:07:59 +02:00
4c02b4b783 chore: fix typo in function name 2025-04-21 00:05:33 +02:00
a4f3d5b9ef feat(ecs): add rudimentary sparse set impl 2025-04-20 23:54:16 +02:00
1aa2c048b1 fix(ci): remove unneeded -y argument from cargo-install 2025-04-20 15:57:42 -04:00
8e0039c033 fix(ci): use cargo-install instead of cargo-binstall 2025-04-20 15:55:38 -04:00
41dff49da2 fix(ci): correct mismatched cache keys 2025-04-20 15:39:18 -04:00
4678061693 fix(ci): incorrect tar arguments 2025-04-20 15:32:42 -04:00
6a33f76325 fix(ci): use correct link for zig 2025-04-20 15:28:27 -04:00
54ff61804d fix(ci): do not pipe tar when extracting zig 2025-04-20 15:24:14 -04:00
14312e2a09 feat(rendering): add default checkerboard texture 2025-04-20 14:47:30 -04:00
d5c85402a5 Merge branch 'zlog-json' (#1)
Reviewed-on: #1
2025-04-20 20:22:45 +02:00
559368684e fix(zlog): remove unused LogJsonStructure struct 2025-04-19 16:39:49 -04:00
7856664a91 fix: use string logging by default 2025-04-19 16:16:01 -04:00
786ff95853 feat(zlog): add tests 2025-04-19 16:07:04 -04:00
778776e807 feat(zlog)!: JSON logging support 2025-04-19 15:39:21 -04:00
1a35140c12 Revert "feat(ci): use cargo zibuild & xbuild"
This reverts commit 87d810aff3.
2025-04-19 21:12:04 +02:00
d0871bdc15 Revert "fix(ci): ensure node is installed"
This reverts commit cb91bde7f8.
2025-04-19 21:12:03 +02:00
cfd1ed06a2 fix(ci): ensure node is installed 2025-04-19 21:12:01 +02:00
ee70585127 feat(ci): use cargo zibuild & xbuild 2025-04-19 21:11:59 +02:00
13a7170b25 feat(ci): seperate build and setup jobs 2025-04-19 21:11:57 +02:00
6b0fad2509 fix(cicd): remove unneeded docker setup 2025-04-19 21:11:55 +02:00
4704284b2d chore(nix): cleanup and update flake 2025-04-19 21:11:52 +02:00
4f4920bd32 feat: add obj model loading
Co-authored-by: Chance <caznix01@gmail.com>
2025-04-19 21:11:51 +02:00
a5cc577043 fix: LICENSE copyright attribution 2025-04-19 21:11:49 +02:00
98d500a6eb fix(ci): renamed .gitlab to .forgejo, corrected workflow name 2025-04-19 21:11:46 +02:00
1521cad5cd wip(rendering): obj model loading 2025-04-19 21:11:44 +02:00
69cc0b264c fix(rendering): enable alpha blending 2025-04-19 21:11:42 +02:00
f2bb6e235c fix(android): properly destroy windows on suspend 2025-04-19 21:11:38 +02:00
89e4e5e11f feat(android): basic android support via winit native activity 2025-04-19 21:11:37 +02:00
0938c7d5fa feat(rendering): rendering textures with camera
Co-authored-by: BitSyndicate <contact@bitsyndicate.de>
2025-04-19 21:11:35 +02:00
1a9a8d6d3b feat: set max loglevel in logger configuration 2025-04-19 21:11:30 +02:00
2c8b4e8831 feat: add the ability to open and close new windows 2025-04-19 21:11:28 +02:00
4e3b4cd674 fix(ci): add missing vulkan libraries 2025-04-19 21:11:26 +02:00
b7a4befd47 fix: add .vscode to .gitignore 2025-04-19 21:11:20 +02:00
d30b0a2d2a feat: basic triangle rendering 2025-04-19 21:11:18 +02:00
b10568484e feat: event based non-blocking logger 2025-04-19 21:11:16 +02:00
30c11215b6 feat: fix apt not finding gcc in ci/cd 2025-04-19 21:11:13 +02:00
00d9be487d feat: add cross compilation without cargo-cross 2025-04-19 21:11:12 +02:00
8902b1522d feat: make ci/cd pipeline multi-target 2025-04-19 21:11:10 +02:00
196d015d92 feat: simplify ci/cd pipeline 2025-04-19 21:11:07 +02:00
369e54eb60 feat: add a WIP ci/cd pipeline 2025-04-19 21:11:05 +02:00
93da141584 Update .gitlab-ci.yml file 2025-04-19 21:11:03 +02:00
a302f47b2b chore: modify readme to use up to date information 2025-04-19 21:11:00 +02:00
947f6c4cad feat: modify flakenix to include cargo utils 2025-04-19 21:10:59 +02:00
af67d9e589 chore: broaden the support of ci/cd 2025-04-19 21:10:57 +02:00
f0deb7951e feat: push logs to a dedicated buffer 2025-04-19 21:10:37 +02:00
d5890818c6 refactor: A new beginning 2025-04-19 21:10:33 +02:00
b0e682e454 move Cross.toml to engine folder 2025-04-19 21:10:30 +02:00
00488d1791 seperate nested cross commands 2025-04-19 21:10:29 +02:00
cb8ab0afe6 fix: temporarily switch to codeberg-small 2025-04-19 21:10:27 +02:00
258b7374fd fix: configure cross with Cross.toml 2025-04-19 21:10:24 +02:00
f23d50c66a fix: specify docker in docker 2025-04-19 21:10:22 +02:00
ad8828fac0 fix: cross cant find cargo 2025-04-19 21:10:20 +02:00
d3256b70e5 Merge branch 'main' of codeberg.org:Caznix/Zenyx 2025-04-19 21:10:09 +02:00
d50e47ba54 fix:manually set up docker-in-docker 2025-04-19 21:10:06 +02:00
Speedy_Lex
d25ffc6276 Fix getting cache info on aarch64 2025-04-19 21:10:03 +02:00
d105ff745b properly specify artifact uses 2025-04-19 21:10:01 +02:00
2aa5f2157a feat:rework ci/cd to use cargo-cross 2025-04-19 21:10:00 +02:00
655a5607fe build: remove unused .cargo folder 2025-04-19 21:09:58 +02:00
Speedy_Lex
40714a390c Try fix CI artifacts 2025-04-19 21:09:56 +02:00
Speedy_Lex
2be426cb02 Fix windows hanging on main window close 2025-04-19 21:09:54 +02:00
ae5cb17b7d fix: typo in arrch in workflow 2025-04-19 21:09:52 +02:00
3458028064 fix: sudo not intalled on runners 2025-04-19 21:09:50 +02:00
f67211234c fix: specify github source for actions-rs 2025-04-19 21:09:48 +02:00
0b3b91ad19 fix: pkg-config cannot cross compile to different architectures 2025-04-19 21:09:46 +02:00
7d79e13f51 fix: dont use codeberg's broken ci/cd runners 2025-04-19 21:09:44 +02:00
494683c0e8 fix: workflow fails due to long file names 2025-04-19 21:09:42 +02:00
b5ae2a73a5 fix: ci/cd workflow doesnt properly compile wayland 2025-04-19 21:09:40 +02:00
242520098b change github workflow to be forgejo compatible 2025-04-19 21:09:38 +02:00
43e157a3d0 build: remove unused rust-toolchain.toml 2025-04-19 21:09:36 +02:00
00ec1350b7 feat: basic GUI terminal when pressing F12 2025-04-19 21:09:32 +02:00
07871b77f3 chore: run clippy and cargo fmt on pending changes 2025-04-19 21:09:10 +02:00
db3d078935 fix: build script generates dead code warnings if versioning data is not used 2025-04-19 21:08:47 +02:00
71bdff0854 build: remove regex dependency in favor of rust iterators 2025-04-19 21:08:46 +02:00
ead0c12749 fix: running default Nix package doesnt provide runtime libraries 2025-04-19 21:08:44 +02:00
970e9757c9 feat: add more useful debug information to system metadata 2025-04-19 21:08:40 +02:00
ed23402212 update panic handler with system information 2025-04-19 21:08:00 +02:00
ed93baa404 add window icon 2025-04-19 21:07:54 +02:00
81e6b5cc4b fix creating two event loops at once 2025-04-19 21:07:16 +02:00
1413bd213f Merge branch 'main' of codeberg.org:Caznix/Zenyx 2025-04-19 21:06:07 +02:00
42f9c669c8 improve error handling and add metadata 2025-04-19 21:05:05 +02:00
6eae536478 improve error handling 2025-04-19 21:05:03 +02:00
a990d1c9c8 cross compile for windows and macOS 2025-04-19 21:05:01 +02:00
2d4736f12e show dialog on panic 2025-04-19 21:04:59 +02:00
Caznix
43b8e75ad7 Merge pull request 'Improve rendering and reduce rendering related crashes' from error_handling into main 2025-04-19 21:04:50 +02:00
11194e863e formatting 2025-04-19 21:04:46 +02:00
d0d0e7c016 remove unneeded clone 2025-04-19 21:04:43 +02:00
cae3b40541 load arbitrary model data 2025-04-19 21:04:41 +02:00
a853c24bc3 refactor renderer into smaller functions 2025-04-19 21:04:40 +02:00
2d40291d77 include runtime dependencies in default package 2025-04-19 21:04:34 +02:00
6b1b0d30f4 close all windows if the main window is destroyed 2025-04-19 21:04:32 +02:00
ceefdaf46b fix selecting wrong window for key events 2025-04-19 21:04:31 +02:00
Caznix
ce4702ec5f Merge pull request 'multi window support' from rendering into main 2025-04-19 21:04:22 +02:00
Caznix
0992586b54 Merge branch 'main' into rendering 2025-04-19 21:04:16 +02:00
8c6051c79d fix multi window support 2025-04-19 21:04:11 +02:00
883bd7ea69 exit event loop properly 2025-04-19 21:04:09 +02:00
06de4a85e5 convert the vec of windows to a hashmap 2025-04-19 21:04:08 +02:00
8f0c8dbb3d feat: attempt to close winit window (?) 2025-04-19 21:04:02 +02:00
d6806d4dbe feat: handle spawning multiple windows 2025-04-19 21:03:54 +02:00
f147330692 rename build.nix to default.nix 2025-04-19 21:03:51 +02:00
Caznix
75e09747c7 Merge pull request 'input_test' from input_test into main 2025-04-19 21:03:31 +02:00
9aa5079bce Merge branch 'rendering' 2025-04-19 21:03:25 +02:00
49178e89c9 press esc to change bg color 2025-04-19 21:03:21 +02:00
a6fa240e5d format rust and nix files 2025-04-19 21:03:19 +02:00
d694fa1eb9 deprecate and remove zlua 2025-04-19 21:03:17 +02:00
40792592b0 proper 3d projection 2025-04-19 21:03:15 +02:00
852d3f855d Merge pull request from Zenyx-Engine/core_update
core update
2025-04-19 21:03:09 +02:00
15b148df37 Merge branch 'main' into core_update 2025-04-19 21:03:03 +02:00
32a5c46f8c cube 2025-04-19 21:02:42 +02:00
593b9ef119 remove welcome message to save terminal space 2025-04-19 21:02:41 +02:00
5ae67d0162 update workflow to use nightly 2025-04-19 21:02:37 +02:00
487897cdb9 finally fix workflow i hope 2025-04-19 21:02:35 +02:00
c563119c77 remove categories 2025-04-19 21:02:34 +02:00
efd863d444 Merge pull request from Zenyx-Engine/dependabot/cargo/colored-3.0.0
Update colored requirement from 2.2.0 to 3.0.0
2025-04-19 21:02:28 +02:00
dependabot[bot]
6770d28454 Update colored requirement from 2.2.0 to 3.0.0
Updates the requirements on [colored](https://github.com/mackwic/colored) to permit the latest version.
- [Release notes](https://github.com/mackwic/colored/releases)
- [Changelog](https://github.com/colored-rs/colored/blob/master/CHANGELOG.md)
- [Commits](https://github.com/mackwic/colored/compare/v2.2.0...v3.0.0)

---
updated-dependencies:
- dependency-name: colored
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-04-19 21:02:24 +02:00
7093448efe Merge pull request from GhostedGaming/main
Added Lua standard library
2025-04-19 21:02:17 +02:00
9768d8177c Merge pull request from GhostedGaming/main
Added a file for zlua and made a new function
2025-04-19 21:02:08 +02:00
GhostedGaming
96c383cb39 Removed Problems 2025-04-19 21:02:04 +02:00
GhostedGaming
8d6bb42e83 add rudimentary Text editor 2025-04-19 21:01:53 +02:00
GhostedGaming
9e7d8dff13 Added better syntax 2025-04-19 21:01:49 +02:00
GhostedGaming
cb52ce8de0 Imported the whole library and now we have alot more functtions (sorry caz) 2025-04-19 21:01:48 +02:00
GhostedGaming
f39df7d29d Working if statement
partially
2025-04-19 21:01:45 +02:00
GhostedGaming
7e940200c6 Added a file for zlua and made a new function 2025-04-19 21:01:43 +02:00
173da4c14a ZLUA REPL!!!!! (#18) 2025-04-19 21:01:42 +02:00
c57e5c2d49 polish repl (#17)
* add less daunting panic message on release builds
2025-04-19 21:01:37 +02:00
40769ec693 change cargo.toml name (#14)
* 🔥🔥🔥🔥🔥

* make workflow use nightly

* remove code quality checks temporarily

* fix check version i think

* build for windowsARM

* forgot to specify target

* change cargo.toml to zenyx to fix workflow
2025-04-19 21:01:36 +02:00
cc9beed5d5 specify windows arm target (#13)
* 🔥🔥🔥🔥🔥

* make workflow use nightly

* remove code quality checks temporarily

* fix check version i think

* build for windowsARM

* forgot to specify target
2025-04-19 21:01:34 +02:00
7359780b57 windows ARM workflow (#12) 2025-04-19 21:01:31 +02:00
08f090b6fe burn everything to the ground (#11)
🔥🔥🔥🔥🔥
2025-04-19 21:01:29 +02:00
3d87381f55 Improve repl autocorrect and error handling
* Combine comparison algorithims for autocorrect

* clear zephyr functions

* remove redundant comments because co-pilot is stupid and i probably will never try to use it again

* implement basic tab completion

* fix unused items

* Make workflow check code quality

* split code quality into its own file

* make action fail on bad formatting

* change workflow to nightly

* f it, code quality is considered breaking

* fix forgetting to set toolchain back to nightly when rewriting workflow (😔)

* Add condition for too little arguments

* run cargo fmt

* remove unneeded feature directive
2025-04-19 21:01:28 +02:00
3ac4dbddc2 Merge pull request from Zenyx-Engine/improve-repl
Improve repl
2025-04-19 21:01:12 +02:00
2c389c356a apply clippy changes 2025-04-19 21:01:08 +02:00
ae62990e90 fix formatting 2025-04-19 21:01:05 +02:00
c163860c0a fix tokio runtime stack overflow due to recursion 2025-04-19 21:01:04 +02:00
7adf770d54 exec .zenshell files + shell extensions
Co-authored-by: Tristan Poland (Trident_For_U) <tristanpoland@users.noreply.github.com>
2025-04-19 21:01:02 +02:00
acf22483a8 check command similarity 2025-04-19 21:00:59 +02:00
a599ef7df9 change the look of autocompletion 2025-04-19 21:00:57 +02:00
6c1e53ade4 make repl support strings via both double and single quotes 2025-04-19 21:00:56 +02:00
9baae0c875 fix unwrap errors 2025-04-19 21:00:52 +02:00
bab618708f add rust formatting rules 2025-04-19 21:00:50 +02:00
6f264d9278 print logging state upon change 2025-04-19 21:00:49 +02:00
2e026cfc85 Merge pull request from eatmynerds/rustyline-keyevents
Rustyline KeyEvents
2025-04-19 21:00:43 +02:00
eatmynerds
f982c78c5b Control logging using key events 2025-04-19 21:00:39 +02:00
1be431cf6f Merge branch 'main' of https://github.com/Caznix/Zenyx 2025-04-19 21:00:33 +02:00
69410274d7 Add release checking to rust workflow 2025-04-19 21:00:25 +02:00
33e304c68b Create dependabot.yml 2025-04-19 21:00:24 +02:00
c96a8ea2e5 update main readme and add readme for Zephyr 2025-04-19 21:00:22 +02:00
e0b84814f7 add zephyr ECS subcrate 2025-04-19 21:00:19 +02:00
506755d06f Add press kit section 2025-04-19 21:00:17 +02:00
0618d08076 Remove seperate editor and xtask 2025-04-19 21:00:15 +02:00
b7d727d9b5 replace reedline with rustyline 2025-04-19 21:00:10 +02:00
64e08c592d improve formatting 2025-04-19 21:00:08 +02:00
a7690ee3ae bump logo size 2025-04-19 21:00:07 +02:00
44c6f394ec add readme with branding 2025-04-19 21:00:00 +02:00
45fe541093 Merge branch 'main' of https://github.com/Caznix/Zenyx 2025-04-19 20:59:54 +02:00
8efa695ad0 remove --verbose from tests 2025-04-19 20:59:49 +02:00
Jason Spalti
23817b5b95 Refactor logging system to switch between stdout and file logging
* Refactor logging to switch between stdout and file logging

* Use "clear" instead of "tput reset" for unix

* Remove redundant comments
2025-04-19 20:59:48 +02:00
3ad52908e6 Merge pull request from eatmynerds/repl
Add descriptions to commands and improve REPL display formatting
2025-04-19 20:58:56 +02:00
Jason Spalti
aa75aa4157 Add descriptions to commands and improve REPL display formatting 2025-04-19 20:58:50 +02:00
cbc7b3a2e2 draw with wgpu 2025-04-19 20:58:49 +02:00
f9863934d5 WINIT WINDOW!!!! 2025-04-19 20:58:42 +02:00
b777761d54 no windows on arm 😔 2025-04-19 20:58:41 +02:00
abdf457981 test of github actions will actually use aarch64 this time 2025-04-19 20:58:39 +02:00
b1ef40d631 rename output file to bin instead of binary 2025-04-19 20:58:30 +02:00
b6b2eb6176 un seperate them cuz they compile twice? 2025-04-19 20:58:28 +02:00
3f0da1b539 seperate jobs 2025-04-19 20:58:27 +02:00
019ba263a9 run aarch64 in qemu??? im running out of ideas 2025-04-19 20:58:25 +02:00
4a71c4a988 disable fail-fast 2025-04-19 20:58:23 +02:00
2d9ef17fa0 idfk at this point 2025-04-19 20:58:22 +02:00
359bfd94c5 cross tools 2025-04-19 20:58:17 +02:00
fab19db2d7 only include binaries 2025-04-19 20:58:15 +02:00
a040ebd069 nvm 2025-04-19 20:58:14 +02:00
b1bea01210 maybe arm? 2025-04-19 20:58:05 +02:00
f4d934f07a fix cross compilation? 2025-04-19 20:58:04 +02:00
dad45beda2 fix foreign architectures 2025-04-19 20:58:02 +02:00
bd1895a2fb Merge branch 'main' of https://github.com/Caznix/Zenyx 2025-04-19 20:57:41 +02:00
941c91eb0f Rename cargo.toml to Cargo.toml 2024-12-01 18:10:14 -05:00
52 changed files with 6464 additions and 563 deletions

View file

@ -1,2 +0,0 @@
[alias]
xtask = "run --quiet --package xtask --"

1
.envrc Normal file
View file

@ -0,0 +1 @@
use nix

View file

@ -0,0 +1,167 @@
name: Build Zenyx ⚡
on:
push:
pull_request:
jobs:
setup:
name: 🔧 Setup Environment
runs-on: ubuntu-latest
outputs:
cache-hit: ${{ steps.cache-tools.outputs.cache-hit }}
steps:
- name: 📥 Checkout source
uses: https://github.com/actions/checkout@v4
- name: 🗄️ Cache tools
uses: https://github.com/actions/cache@v4
id: cache-tools
with:
path: |
~/.cargo/bin
/tmp/zig
key: cargo-tools-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
cargo-tools-
- name: 🦀 Install Rust toolchain
if: steps.cache-tools.outputs.cache-hit != 'true'
uses: https://github.com/actions-rs/toolchain@v1
with:
toolchain: stable
components: rust-src
override: true
- name: 📦 Install Build Tools
if: steps.cache-tools.outputs.cache-hit != 'true'
run: |
mkdir -p /tmp/zig
cd /tmp/zig
curl -Lo zig-linux-x86_64.tar.xz https://ziglang.org/builds/zig-linux-x86_64-0.15.0-dev.377+f01833e03.tar.xz
tar -Jxf zig-linux-x86_64.tar.xz -C /tmp/zig --strip-components=1
cargo install cargo-zigbuild cargo-xwin --force
cargo-test:
name: 🧪 Run Cargo Tests
needs: [setup]
runs-on: ubuntu-latest
steps:
- name: 📥 Checkout source
uses: https://github.com/actions/checkout@v4
- name: 🗄️ Restore Cargo cache
uses: https://github.com/actions/cache@v4
with:
path: |
~/.cargo/registry
~/.cargo/git
target
key: cargo-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
cargo-
- name: 🦀 Install Rust toolchain
if: steps.restore-cargo-cache.outputs.cache-hit != 'true'
uses: https://github.com/actions-rs/toolchain@v1
with:
toolchain: stable
components: rust-src
override: true
- name: 🌋 Install Vulkan tools
run: |
apt update
apt install -y vulkan-tools glslc
- name: 🚀 Run tests
uses: https://github.com/actions-rs/cargo@v1
with:
command: test
args: --release --all
build:
name: 🏗️ Build ${{ matrix.target }}
needs: [setup, cargo-test]
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
target:
- x86_64-unknown-linux-gnu
- aarch64-unknown-linux-gnu
- x86_64-pc-windows-msvc
include:
- target: x86_64-unknown-linux-gnu
binary_name: zenyx-x86_64-linux
ext: ""
command: zigbuild
args: --target x86_64-unknown-linux-gnu --release
- target: aarch64-unknown-linux-gnu
binary_name: zenyx-aarch64-linux
ext: ""
command: zigbuild
args: --target aarch64-unknown-linux-gnu --release
- target: x86_64-pc-windows-msvc
binary_name: zenyx-x86_64-windows-msvc.exe
ext: ".exe"
command: xwin
args: build --target x86_64-pc-windows-msvc --release
steps:
- name: 📥 Checkout source
uses: https://github.com/actions/checkout@v4
- name: 🗄️ Restore tool cache
uses: https://github.com/actions/cache@v4
with:
path: |
~/.cargo/bin
/tmp/zig
key: cargo-tools-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
cargo-tools-
- name: 📍 Add Zig to PATH
run: echo "/tmp/zig" >> $GITHUB_PATH
- name: 🎯 Install Rust target
uses: https://github.com/actions-rs/toolchain@v1
with:
toolchain: stable
target: ${{ matrix.target }}
override: true
- name: 🗄️ Restore Cargo cache
uses: https://github.com/actions/cache@v4
with:
path: |
~/.cargo/registry
~/.cargo/git
target
key: cargo-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
cargo-
- name: 🌋 Install Vulkan tools
run: |
apt update
apt install -y vulkan-tools glslc
- name: 🚀 Build release binary
uses: https://github.com/actions-rs/cargo@v1
with:
command: ${{ matrix.command }}
args: ${{ matrix.args }}
- name: 📦 Package artifact
run: |
mkdir -p artifacts
cp target/${{ matrix.target }}/release/zenyx${{ matrix.ext }} artifacts/${{ matrix.binary_name }}
- name: ⬆️ Upload artifact
uses: https://code.forgejo.org/forgejo/upload-artifact@v4
with:
name: ${{ matrix.binary_name }}.zip
path: artifacts

2
.gitattributes vendored Normal file
View file

@ -0,0 +1,2 @@
*.obj filter=lfs diff=lfs merge=lfs -text
*.mtl filter=lfs diff=lfs merge=lfs -text

View file

@ -1,47 +0,0 @@
name: Rust
on:
push:
branches: [ "main", "master" ]
pull_request:
branches: [ "main", "master" ]
env:
CARGO_TERM_COLOR: always
jobs:
build:
strategy:
matrix:
os: [ubuntu-latest, windows-latest]
arch: [x86_64, aarch64]
include:
- arch: x86_64
target: x86_64-unknown-linux-gnu
- arch: aarch64
target: aarch64-unknown-linux-gnu
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
target: ${{ matrix.target }}
- name: Build
run: cargo build --release --target ${{ matrix.target }}
- name: Run tests
run: cargo test --verbose --target ${{ matrix.target }}
- name: Upload artifacts
uses: actions/upload-artifact@v3
with:
name: Zenyx-${{ runner.os }}-${{ matrix.arch }}-binary
path: |
target/${{ matrix.target }}/release/*.exe
target/${{ matrix.target }}/release/*[^.]*

11
.gitignore vendored
View file

@ -1,4 +1,7 @@
/target
.idea
Cargo.lock
*.log
/target
.idea
# Cargo.lock
*.log
.direnv
**/result
.vscode/

0
CONTRIBUTING.md Normal file
View file

3778
Cargo.lock generated Normal file

File diff suppressed because it is too large Load diff

60
Cargo.toml Normal file
View file

@ -0,0 +1,60 @@
[package]
name = "zenyx"
version = "0.1.0"
edition = "2024"
authors = ["Caznix (Chance) <Caznix01@gmail.com>"]
description = "A memory safe, opinionated Game Engine/Framework, written in Rust."
keywords = ["engine", "graphics", "game"]
categories = ["game-development", "graphics"]
license = "MIT"
homepage = "https://zenyx-engine.github.io/"
repository = "https://codeberg.org/Caznix/Zenyx"
[lib]
crate-type = ["cdylib"]
[workspace]
resolver = "2"
members = ["subcrates/renderer", "subcrates/zlog"]
[workspace.dependencies]
zlog = { path = "subcrates/zlog" }
[profile.release]
lto = true
codegen-units = 1
panic = "abort"
split-debuginfo = "off"
[profile.dev]
debug = 0
[profile.mini]
inherits = "release"
opt-level = "z"
debug = false
strip = true
lto = true
codegen-units = 1
incremental = false
panic = "abort"
[dependencies]
bytemuck = "1.22.0"
cgmath = "0.18.0"
image = "0.25.6"
smol = "2.0.2"
winit = { version = "0.30.9" }
terminator = "0.3.2"
thiserror = "2.0.12"
tobj = "4.0.3"
tracing = "0.1.41"
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
vulkano = "0.35.1"
wgpu = { version = "25.0.0", features = ["spirv"] }
zlog.workspace = true
allocator-api2 = "0.2.21"
[target.aarch64-linux-android.dependencies]
winit = { version = "0.30.9", features = ["android-native-activity"] }

View file

@ -1,21 +1,21 @@
# MIT License
## Copyright (c) 2024 Caznix
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.**
# MIT License
## Copyright (c) 2025 Nonsensical Dev
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.**

148
README.md
View file

@ -0,0 +1,148 @@
<div align="center">
<img src="assets/Logo.svg" alt="Zenyx engine" width="350"/>
![Version](https://img.shields.io/badge/version-0.1.0-blue)
![License](https://img.shields.io/gitlab/license/Caznix/Zenyx)
![Build](https://img.shields.io/github/actions/workflow/status/Caznix/Zenyx/rust.yml?branch=main)
![platform](https://img.shields.io/badge/platform-windows%20%7C%20linux%20%7C%20macos-informational)
</div>
---
# What is Zenyx?
Zenyx is an in-development game engine leveraging the power of [wgpu](https://github.com/gfx-rs/wgpu) for graphics rendering and [winit](https://github.com/rust-windowing/winit) for window management. Built with Rust, Zenyx aims to be an intuitive and innovative game engine, designed to adapt to various workflows while introducing new possibilities for game development.
## Planned Features
> 🚀 Note: The following features represent the intended direction of Zenyx and are not fully implemented at this stage.
### **Zenyx is being designed with the following capabilities in mind:**
- ✨ **Cross-Platform Compatibility:** Targeting Windows, Linux, and macOS.
- ⚡ **Performance and Safety:** Built with Rust to provide both speed and memory safety.
- 🎯 **Intuitive and Extensible Design:** Focusing on user-friendly workflows and highly customizable tools.
- 🔧 **Modular Architecture:** Designed with a modular structure for straightforward customization.
- 🛠️ **Integrated Toolset:** Envisioning built-in tools for spatial audio and level editing.
- 📦 **Efficient Asset Management:** Developing a robust resource handling system.
- 🎲 **Performant Physics Engine:** Zenyx Makes use of [Rapier2D](https://docs.rs/rapier2d/latest/rapier2d/) & [Rapier3D](https://docs.rs/rapier3d/latest/rapier3d/)
- 🗂️ **Advanced Scene Management:** Designing flexible tools for organizing game scenes.
### **Current Limitations:**
Currently, Zenyx faces certain limitations due to its early stage of development and the capabilities of its underlying rendering backend, wgpu:
- **No Hardware Ray Tracing:** As wgpu currently only supports standard rasterization pipelines, Zenyx inherits this limitation. Hardware ray tracing is not yet available. You can follow the progress of ray tracing support in wgpu [here](https://github.com/gfx-rs/wgpu/issues/6762).
- **Alpha software:** Zenyx is in the initial phases of development. It is not yet suitable for any practical use cases. Many of the features listed above are still goals and have not been implemented.
- **Limited macOS Support:** While macOS (and or IOS) is a target platform, it currently requires MoltenVK due to wgpu's lack of native Metal support. This might introduce additional dependencies, potential compatibility issues, or generally unintended behavior on such platforms
- **Incomplete Documentation and Tooling:** As the engine is under active development, comprehensive documentation and a full suite of built-in tools are not yet available, however they are still planned for the near future.
## FAQ
<details>
<summary><b>What platforms does Zenyx support?</b></summary>
Zenyx primarily supports Windows and Linux, with secondary support for macOS. See the
[Platform support table](#what-platforms-will-be-supported-in-the-future) for more
information.
</details>
<details>
<summary><b>Is Zenyx ready for production use?</b></summary>
Zenyx is currently in early development and is not yet ready for any simple use
cases, but we're working hard to make it the best it can be before we release
1.0. If this interests you, and you're interested in helping, please check out
the [contribution section](CONTRIBUTING.md) for the ways you can help.
</details>
<details>
<summary><b>How can I contribute to Zenyx?</b></summary>
We welcome contributions! Please check our contribution guidelines and open a
pull request on GitHub, if you aren't a developer, you can also report bugs or
feature requests on our [issue tracker](https://codeberg.org/Zenyx/zenyx/issues).
For more information, please see the [Contributing section](#contributing).
</details>
<details>
<summary><b>What are the system requirements?</b></summary>
Detailed system requirements will be provided as the engine matures. Currently,
the only requirement is a modern OS and a system with atleast a semi-recent GPU.
</details>
<details>
<summary><b>Is Zenyx free to use?</b></summary>
Yes, Zenyx is open-source software licensed under MIT. You can Modify,
Distribute, and use Zenyx for any purpose you wish.
</details>
## What platforms will be supported in the future?
| Platform | Support Priority | Status | Notes |
|:----------:|:----------------:|:------:|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| Windows | Primary | ✅ | |
| Linux/*BSD | Primary | ✅ | |
| macOS | Secondary | ⚠️ | The current main set of maintainers do not own any native MacOS devices to perform tests with. |
| Android | Help wanted | ❓ | |
| iOS | Not planned | ❓ | |
| Web | TBD | ❓ | |
| Consoles | Not planned | ⛔ | Consoles require specific dev kits,proprietary licenses, and substantial fees that we (Nonsensical-dev,the Zenyx maintainers,and the open source contributors) currently do not have the capacity for. This may change in the future but most likely will not be soon. |
# Documentation
## Getting Started
Zenyx is currently in a pre-release state and is not yet ready for general use.
Keep an eye on this section for updates on when you can start using Zenyx.
# Contributing
We encourage community involvement in the development of Zenyx! If you're interested in contributing,
please review our [Contribution Guide](CONTRIBUTING.md) for our coding standards and community guidelines.
You can contribute in various ways, even if you're not a developer. Spreading the word about Zenyx,
reporting any bugs you encounter, suggesting new features,
and helping us improve our documentation are all valuable contributions.
If you are interested in contributing code to Zenyx, please follow the platform-specific instructions below:
## Prerequisites (all platforms)
1. Ensure you have [Rust](https://www.rust-lang.org/tools/install) installed on your system.
2. Make sure you have [git](https://git-scm.com/downloads) installed for version control.
## Building
**1**. Clone the Zenyx repository from your favorite mirror:
git clone https://codeberg.org/Zenyx/zenyx
**2**. Navigate to the project directory:
cd ./Zenyx
**3**. Build the project using Cargo:
cargo run
# Press kit
### Text
<img src="assets/Logo.png" width="256" alt="Zenyx Typeface">
### Colored badges
<img src="assets/Badge.png" width="128" alt="Zenyx Logo">

BIN
assets/Badge.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

9
assets/Badge.svg Normal file
View file

@ -0,0 +1,9 @@
<svg width="256" height="256" viewBox="0 0 256 256" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M256 128C256 198.692 198.692 256 128 256C57.3075 256 0 198.692 0 128C0 57.3075 57.3075 0 128 0C198.692 0 256 57.3075 256 128Z" fill="#FFCD4E"/>
<mask id="mask0_117_566" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="12" y="12" width="232" height="232">
<circle cx="128" cy="128" r="116" fill="#E0A100"/>
</mask>
<g mask="url(#mask0_117_566)">
<path d="M107.569 201C97.5992 201 89.5173 192.918 89.5173 182.949C89.5173 180.278 90.1099 177.64 91.2524 175.226L114.486 126.134C123.278 107.558 109.729 86.1563 89.1777 86.1563H-13.9219C-22.8016 86.1563 -30 78.9578 -30 70.0781C-30 61.1984 -22.8016 54 -13.9219 54H147.456C157.426 54 165.508 62.0819 165.508 72.0515C165.508 74.7221 164.915 77.3596 163.773 79.7735L140.539 128.866C131.747 147.442 145.296 168.844 165.847 168.844H278.922C287.802 168.844 295 176.042 295 184.922C295 193.802 287.802 201 278.922 201H107.569Z" fill="black"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1,015 B

BIN
assets/Logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

10
assets/Logo.svg Normal file
View file

@ -0,0 +1,10 @@
<svg width="506" height="214" viewBox="0 0 506 214" fill="none" xmlns="http://www.w3.org/2000/svg">
<mask id="mask0_280_9" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="0" y="18" width="137" height="138">
<circle cx="68.5" cy="87" r="68.5" fill="#E0A100"/>
</mask>
<g mask="url(#mask0_280_9)">
<path d="M53.4665 133.076C47.5793 133.076 42.8068 128.304 42.8068 122.417C42.8068 120.84 43.1567 119.282 43.8314 117.857L57.9126 88.1036C62.9363 77.4889 55.194 65.2591 43.4505 65.2591H-18.2759C-23.5195 65.2591 -27.7703 61.0083 -27.7703 55.7647C-27.7703 50.5211 -23.5195 46.2703 -18.2758 46.2703H77.0208C82.908 46.2703 87.6805 51.0428 87.6805 56.93C87.6805 58.5071 87.3306 60.0645 86.6559 61.49L72.5746 91.243C67.551 101.858 75.2933 114.088 87.0368 114.088H154.653C159.897 114.088 164.148 118.338 164.148 123.582C164.148 128.826 159.897 133.076 154.653 133.076H53.4665Z" fill="white"/>
</g>
<path d="M138.064 51.44C138.064 49.4773 138.789 47.8133 140.24 46.448C141.776 45.0827 143.653 44.4 145.872 44.4H202.832C202.832 49.008 201.979 52.3787 200.272 54.512C198.651 56.6453 196.005 57.712 192.336 57.712H153.424V82.16H188.112C188.112 86.768 187.259 90.1387 185.552 92.272C183.931 94.4053 181.328 95.472 177.744 95.472H153.424V120.688H195.024C198.608 120.688 201.211 121.755 202.832 123.888C204.539 126.021 205.392 129.392 205.392 134H145.872C143.653 134 141.776 133.317 140.24 131.952C138.789 130.587 138.064 128.88 138.064 126.832V51.44ZM231.364 55.792C231.364 51.8667 232.601 48.7947 235.076 46.576C237.636 44.3573 241.135 43.248 245.572 43.248L290.884 113.52L292.036 112.496L287.3 104.688V43.248C292.335 43.248 296.132 44.2293 298.692 46.192C301.252 48.1547 302.532 51.0987 302.532 55.024V124.144C302.532 127.131 301.337 129.733 298.948 131.952C296.559 134.085 293.657 135.152 290.244 135.152L243.012 61.936L241.86 62.832L246.596 70.768V123.376C246.596 127.216 245.273 130.16 242.628 132.208C240.068 134.171 236.313 135.152 231.364 135.152V55.792ZM356.964 101.744L325.22 44.912C325.988 44.4853 327.14 44.1013 328.676 43.76C330.212 43.4187 331.62 43.248 332.9 43.248C335.716 43.248 338.063 43.8453 339.94 45.04C341.903 46.1493 343.567 47.9413 344.932 50.416L364.388 86.768L361.06 93.424L362.34 94.064L384.484 50.416C386.873 45.6373 390.884 43.248 396.516 43.248C397.796 43.248 399.204 43.4187 400.74 43.76C402.276 44.1013 403.428 44.4853 404.196 44.912L372.452 101.744V123.376C372.452 127.131 371.129 130.032 368.484 132.08C365.839 134.128 361.999 135.152 356.964 135.152V101.744ZM430.352 135.152C426.939 135.152 423.995 133.872 421.52 131.312L452.624 87.792L423.056 46.064C423.995 45.2107 425.189 44.528 426.64 44.016C428.091 43.504 429.627 43.248 431.248 43.248C433.467 43.248 435.515 43.632 437.392 44.4C439.355 45.168 440.763 46.192 441.616 47.472L460.304 75.248L456.976 79.856L458.128 80.624L480.656 47.472C481.595 46.192 483.003 45.168 484.88 44.4C486.757 43.632 488.848 43.248 491.152 43.248C492.688 43.248 494.181 43.504 495.632 44.016C497.083 44.528 498.277 45.2107 499.216 46.064L469.776 87.792L500.752 131.312C498.448 133.872 495.504 135.152 491.92 135.152C490.043 135.152 488.293 134.768 486.672 134C485.136 133.147 483.856 131.995 482.832 130.544L461.2 99.184L439.44 130.544C438.416 131.909 437.093 133.019 435.472 133.872C433.936 134.725 432.229 135.152 430.352 135.152Z" fill="white"/>
<path d="M125.52 156.488C125.52 155.293 125.968 154.291 126.864 153.48C127.803 152.627 128.933 152.2 130.256 152.2H158.864C158.864 155.016 158.395 157.064 157.456 158.344C156.517 159.624 155.003 160.264 152.912 160.264H134.928V170.376H151.696C151.696 173.192 151.227 175.24 150.288 176.52C149.349 177.8 147.835 178.44 145.744 178.44H134.928V188.936H154.256C156.304 188.936 157.797 189.576 158.736 190.856C159.675 192.136 160.144 194.184 160.144 197H130.256C128.933 197 127.803 196.595 126.864 195.784C125.968 194.973 125.52 193.949 125.52 192.712V156.488ZM172.295 158.984C172.295 156.723 173.063 154.931 174.599 153.608C176.135 152.285 178.204 151.624 180.807 151.624L201.351 184.136L202.055 183.56L199.175 178.76V151.624C202.204 151.624 204.508 152.2 206.087 153.352C207.666 154.504 208.455 156.189 208.455 158.408V191.368C208.455 192.435 208.114 193.459 207.431 194.44C206.791 195.379 205.916 196.147 204.807 196.744C203.698 197.299 202.524 197.576 201.287 197.576L179.399 162.952L178.695 163.528L181.575 168.328V190.792C181.575 193.011 180.786 194.696 179.207 195.848C177.628 197 175.324 197.576 172.295 197.576V158.984ZM240.194 197.576C236.354 197.576 232.919 196.829 229.89 195.336C226.861 193.843 224.493 191.773 222.786 189.128C221.122 186.44 220.29 183.389 220.29 179.976V169.096C220.29 165.683 221.101 162.653 222.722 160.008C224.386 157.363 226.69 155.315 229.634 153.864C232.621 152.371 236.013 151.624 239.81 151.624C243.607 151.624 246.999 152.2 249.986 153.352C252.973 154.461 255.277 156.019 256.898 158.024C258.562 160.029 259.394 162.333 259.394 164.936C259.394 166.6 258.839 167.923 257.73 168.904C256.621 169.843 255.106 170.312 253.186 170.312C252.077 170.312 250.989 170.163 249.922 169.864C250.05 168.968 250.114 167.901 250.114 166.664C250.114 164.616 249.154 162.952 247.234 161.672C245.314 160.349 242.839 159.688 239.81 159.688C236.823 159.688 234.391 160.563 232.514 162.312C230.637 164.061 229.698 166.323 229.698 169.096V179.976C229.698 182.792 230.658 185.096 232.578 186.888C234.541 188.637 237.079 189.512 240.194 189.512C243.181 189.512 245.613 188.829 247.49 187.464C249.367 186.099 250.306 184.349 250.306 182.216V180.744H245.634C243.885 180.744 242.455 180.061 241.346 178.696C240.279 177.331 239.746 175.56 239.746 173.384H255.298C256.578 173.384 257.623 173.789 258.434 174.6C259.245 175.368 259.65 176.392 259.65 177.672V182.408C259.65 185.352 258.818 187.976 257.154 190.28C255.533 192.584 253.25 194.376 250.306 195.656C247.362 196.936 243.991 197.576 240.194 197.576ZM273.062 158.472C273.062 156.339 273.873 154.675 275.494 153.48C277.158 152.243 279.484 151.624 282.47 151.624V190.728C282.47 192.904 281.638 194.589 279.974 195.784C278.353 196.979 276.049 197.576 273.062 197.576V158.472ZM297.183 158.984C297.183 156.723 297.951 154.931 299.487 153.608C301.023 152.285 303.092 151.624 305.695 151.624L326.239 184.136L326.943 183.56L324.063 178.76V151.624C327.092 151.624 329.396 152.2 330.975 153.352C332.553 154.504 333.343 156.189 333.343 158.408V191.368C333.343 192.435 333.001 193.459 332.319 194.44C331.679 195.379 330.804 196.147 329.695 196.744C328.585 197.299 327.412 197.576 326.175 197.576L304.287 162.952L303.583 163.528L306.463 168.328V190.792C306.463 193.011 305.673 194.696 304.095 195.848C302.516 197 300.212 197.576 297.183 197.576V158.984ZM346.77 156.488C346.77 155.293 347.218 154.291 348.114 153.48C349.053 152.627 350.183 152.2 351.506 152.2H380.114C380.114 155.016 379.645 157.064 378.706 158.344C377.767 159.624 376.253 160.264 374.162 160.264H356.178V170.376H372.946C372.946 173.192 372.477 175.24 371.538 176.52C370.599 177.8 369.085 178.44 366.994 178.44H356.178V188.936H375.506C377.554 188.936 379.047 189.576 379.986 190.856C380.925 192.136 381.394 194.184 381.394 197H351.506C350.183 197 349.053 196.595 348.114 195.784C347.218 194.973 346.77 193.949 346.77 192.712V156.488Z" fill="white"/>
</svg>

After

Width:  |  Height:  |  Size: 7 KiB

BIN
assets/Pumpkin.mtl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
assets/Pumpkin.obj (Stored with Git LFS) Normal file

Binary file not shown.

26
build.rs Normal file
View file

@ -0,0 +1,26 @@
use std::{env, process::Command};
fn main() {
println!("cargo::rerun-if-changed=shaders");
let outdir = env::var("OUT_DIR").unwrap();
let vert = Command::new("glslc")
.args(["shaders/shader.vert", "-o", &format!("{outdir}/vert.spv")])
.output()
.expect("Failed to execute 'glslc'");
let frag = Command::new("glslc")
.args(["shaders/shader.frag", "-o", &format!("{outdir}/frag.spv")])
.output()
.expect("Failed to execute 'glslc'");
if !vert.status.success() {
panic!(
"Failed to compile vertex shader: {}",
String::from_utf8(vert.stderr).unwrap()
)
}
if !frag.status.success() {
panic!(
"Failed to compile fragment shader: {}",
String::from_utf8(frag.stderr).unwrap()
)
}
}

View file

@ -1,7 +0,0 @@
[workspace]
resolver = "2"
members = [
"engine",
"editor",
"xtask",
]

75
default.nix Normal file
View file

@ -0,0 +1,75 @@
{ lib,
rustPlatform,
nix-gitignore,
bash,
makeWrapper,
dav1d,
vulkan-loader,
libGL,
wayland,
pkg-config,
libxkbcommon,
pkgs,
stdenv,
targetPackages ? pkgs,
}: let
version = (builtins.fromTOML (builtins.readFile ./engine/Cargo.toml)).package.version;
src = nix-gitignore.gitignoreSource [] ./.;
in
rustPlatform.buildRustPackage rec {
pname = "zenyx";
inherit src version;
cargoLock.lockFile = ./Cargo.lock;
nativeBuildInputs = [
pkg-config
] ++ lib.optionals stdenv.targetPlatform.isDarwin [
targetPackages.darwin.apple_sdk.frameworks.CoreServices
];
buildInputs = with targetPackages; [
dav1d
] ++ lib.optionals (stdenv.targetPlatform.isLinux || stdenv.targetPlatform.isWindows) [
vulkan-loader
] ++ lib.optionals stdenv.targetPlatform.isLinux [
makeWrapper
wayland
libxkbcommon
libGL
xorg.libXcursor
xorg.libXrandr
xorg.libXi
xorg.libX11
xorg.libxcb
bash
] ++ lib.optionals stdenv.targetPlatform.isDarwin [
makeWrapper
darwin.apple_sdk.frameworks.Cocoa
darwin.apple_sdk.frameworks.Metal
darwin.apple_sdk.frameworks.CoreVideo
darwin.apple_sdk.frameworks.QuartzCore
];
CARGO_TARGET_X86_64_PC_WINDOWS_GNU_LINKER =
lib.optionalString stdenv.targetPlatform.isWindows "${stdenv.cc.targetPrefix}gcc";
NIX_LDFLAGS = lib.optionalString stdenv.targetPlatform.isDarwin "-framework CoreFoundation";
postInstall = lib.optionalString stdenv.targetPlatform.isLinux ''
wrapProgram $out/bin/${pname} \
--prefix PATH : ${lib.makeBinPath [ bash ]} \
--set LD_LIBRARY_PATH ${lib.makeLibraryPath buildInputs}
'' + lib.optionalString stdenv.targetPlatform.isWindows ''
mkdir -p $out/bin
cp ${targetPackages.vulkan-loader}/bin/vulkan-1.dll $out/bin/
'';
doCheck = false;
meta = {
description = "Cross-platform WSYWIG Game Engine";
license = lib.licenses.mit;
platforms = lib.platforms.all;
mainProgram = "zenyx";
};
}

View file

@ -1,3 +0,0 @@
fn main() {
println!("editor")
}

1
engine/.gitignore vendored
View file

@ -1 +0,0 @@
/target

View file

@ -1,16 +0,0 @@
[package]
name = "zenyx"
version = "0.1.0"
edition = "2021"
[dependencies]
chrono = "0.4.38"
colored = "2.1.0"
lazy_static = "1.5.0"
#log = "0.4.22"
log2 = "0.1.14"
parking_lot = "0.12.3"
reedline = "0.37.0"
regex = "1.11.1"
thiserror = "2.0.3"
tokio = { version = "1.41.1", features = ["macros", "rt", "rt-multi-thread"] }

View file

@ -1,36 +0,0 @@
use std::process::Command;
use log2::{debug, info};
use crate::core::repl::COMMAND_LIST;
pub fn say_hello() {
println!("Hello from your new command!");
}
pub fn echo(args: Vec<String>) {
debug!("{}", args.join(" "));
println!("{}", args.join(" "))
}
pub fn exit() {
debug!("Exiting...");
std::process::exit(0)
}
pub fn clear() {
info!("Clearing screen..., running command");
let _result = if cfg!(target_os = "windows") {
debug!("target_os is windows");
Command::new("cmd").args(["/c", "cls"]).spawn()
} else {
debug!("target_os was unix");
// "clear" or "tput reset"
Command::new("tput").arg("reset").spawn()
};
}
pub fn cmds() {
println!("Commands:");
for cmd in COMMAND_LIST.commands.read().iter() {
println!("{:#}", cmd);
}
}

View file

@ -1,3 +0,0 @@
pub mod commands;
pub mod repl;
pub mod splash;

View file

@ -1,248 +0,0 @@
use super::commands;
use chrono::Local;
use lazy_static::lazy_static;
use log2::{debug, error, info};
use parking_lot::RwLock;
use reedline::{Prompt, Reedline, Signal};
use regex::Regex;
use std::{borrow::Borrow, collections::HashMap, sync::Arc};
struct ZPrompt {
left_text: String,
right_text: String,
}
#[derive(Clone, Debug)]
enum Callable {
Simple(fn()),
WithArgs(fn(Vec<String>)),
}
#[derive(Debug)]
pub struct Command {
pub name: &'static str,
pub description: Option<&'static str>,
function: Callable,
pub arg_count: u8,
}
impl Command {
pub fn execute(&self, args: Option<Vec<String>>) {
//debug!("Executing command: {}", self.name);
match &self.function {
Callable::Simple(f) => {
if let Some(args) = args {
error!(
"Command expected 0 arguments but {} args were given. Ignoring..",
args.len()
);
}
f()
}
Callable::WithArgs(f) => match args {
Some(args) => f(args),
None => error!("Command expected arguments but received 0"),
},
}
}
}
impl std::fmt::Display for Command {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Name: {}\n\t{}",
self.name,
self.description.unwrap_or("No description")
)
}
}
lazy_static! {
pub static ref COMMAND_LIST: Arc<CommandList> = Arc::new(CommandList::new());
}
pub struct CommandList {
pub commands: RwLock<Vec<Command>>,
pub aliases: RwLock<HashMap<String, String>>,
}
impl CommandList {
fn new() -> Self {
CommandList {
commands: RwLock::new(Vec::new()),
aliases: RwLock::new(HashMap::new()),
}
}
fn add_command(
&self,
name: &'static str,
description: Option<&'static str>,
func: Callable,
arg_count: Option<u8>,
) {
debug!("Adding command: {}", name);
let mut commands = self.commands.write();
commands.push(Command {
name,
description,
function: func,
arg_count: arg_count.unwrap_or(0),
});
}
fn add_alias(&self, name: String, alias: String) {
//println!("Input alias: {}", alias);
if self.aliases.read().contains_key(&alias) {
error!("Alias: '{}' already exists", alias);
return;
}
let mut commands = self.commands.write();
if let Some(command) = commands.iter_mut().find(|cmd| cmd.name == name) {
info!("Adding alias: {} for cmd: {}", alias, command.name);
self.aliases
.write()
.insert(alias.to_string(), name.to_string());
} else {
error!("Command: '{}' was not found", name);
}
}
fn execute_command(&self, mut name: String, args: Option<Vec<String>>) {
//info!("received input command: {}", name);
let commands = self.commands.borrow();
if self.aliases.read().contains_key(&name) {
name = self
.aliases
.read()
.get_key_value(&name)
.unwrap()
.1
.to_string();
debug!("changed to {}", name);
}
if let Some(command) = commands.read().iter().find(|cmd| cmd.name == name) { match (command.arg_count, args.as_ref()) {
(expected, Some(args_vec)) if args_vec.len() != expected as usize => {
eprintln!(
"Command: '{}' expected {} arguments but received {}",
name,
expected,
args_vec.len()
);
}
(_, _) => command.execute(args),
} }
}
}
impl Prompt for ZPrompt {
fn render_prompt_left(&self) -> std::borrow::Cow<str> {
std::borrow::Cow::Borrowed(&self.left_text)
}
fn render_prompt_right(&self) -> std::borrow::Cow<str> {
std::borrow::Cow::Borrowed(&self.right_text)
}
fn render_prompt_history_search_indicator(
&self,
_history_search: reedline::PromptHistorySearch,
) -> std::borrow::Cow<str> {
std::borrow::Cow::Borrowed("")
}
fn render_prompt_indicator(
&self,
prompt_mode: reedline::PromptEditMode,
) -> std::borrow::Cow<str> {
match prompt_mode {
reedline::PromptEditMode::Default => std::borrow::Cow::Borrowed(">>"),
reedline::PromptEditMode::Emacs => {
let timestamp = Local::now().format("[%H:%M:%S.%3f/SHELL] >>\t").to_string();
std::borrow::Cow::Owned(timestamp)
}
reedline::PromptEditMode::Vi(_) => std::borrow::Cow::Borrowed("vi>>"),
reedline::PromptEditMode::Custom(_) => std::borrow::Cow::Borrowed("custom>>"),
}
}
fn render_prompt_multiline_indicator(&self) -> std::borrow::Cow<str> {
std::borrow::Cow::Borrowed("><")
}
}
fn setup() {
COMMAND_LIST.add_command(
"hello",
Some("test"),
Callable::Simple(commands::say_hello),
None,
);
COMMAND_LIST.add_command("exit", None, Callable::Simple(commands::exit), None);
COMMAND_LIST.add_command("clear", None, Callable::Simple(commands::clear), None);
COMMAND_LIST.add_command("echo", None, Callable::WithArgs(commands::echo), Some(1));
COMMAND_LIST.add_command("cmds", None, Callable::Simple(commands::cmds), None);
COMMAND_LIST.add_alias("cmds".to_string(), "help".to_string());
COMMAND_LIST.add_alias("cmds".to_string(), "cmd_list".to_string());
COMMAND_LIST.add_alias("hello".to_string(), "exit".to_string());
COMMAND_LIST.add_alias("clear".to_string(), "exit".to_string());
}
pub async fn handle_repl() {
let mut line_editor = Reedline::create();
setup();
loop {
let sig = line_editor.read_line(&ZPrompt {
left_text: String::new(),
right_text: "<<".to_string(),
});
match sig {
Ok(Signal::Success(buffer)) => {
if buffer == "exit" {
std::process::exit(0);
} else {
evaluate_command(&buffer);
}
}
Ok(Signal::CtrlC) => {
println!("\nCONTROL+C RECEIVED, TERMINATING");
std::process::exit(0);
}
err => {
eprintln!("Error: {:?}", err);
}
}
}
}
fn evaluate_command(input: &str) {
if input.trim().is_empty() {
return;
}
let pattern = Regex::new(r"[;|\n]").unwrap();
let commands: Vec<&str> = pattern.split(input).collect();
for command in commands {
let command = command.trim();
if command.is_empty() {
println!("Empty command, skipping.");
continue;
}
let tokens: Vec<&str> = command.split_whitespace().collect();
if tokens.is_empty() {
return;
}
let cmd_name = tokens[0];
let args: Vec<String> = tokens[1..].iter().map(|&s| s.to_string()).collect();
COMMAND_LIST.execute_command(
cmd_name.to_string(),
if args.is_empty() { None } else { Some(args) },
);
}
}

View file

@ -1,26 +0,0 @@
use colored::Colorize;
pub fn print_splash() {
println!(
r#"
&&&&&&&&&&&
&&&&&&&&&&&&&&&&&
&&&&&&&&&&&&&&&&&&&&&
&& &&&&&&&&&
&& &&&&&&&&&
&&&&&&&&&&&& &&&&&&&&&&&
&&&&&&&&&&&&& &&&&&&&&&&&&
&&&&&&&&&&&&& &&&&&&&&&&&&&
&&&&&&&&&&&& &&&&&&&&&&&&&
&&&&&&&&&&& &&&&&&&&&&&&
&&&&&&&&& &&
&&&&&&&&& &&
&&&&&&&&&&&&&&&&&&&&&
&&&&&&&&&&&&&&&&&
&&&&&&&&&&&
Version: {}
"#,
env!("CARGO_PKG_VERSION").yellow().italic().underline()
);
}

View file

@ -1,21 +0,0 @@
use std::io;
use log2::info;
pub mod core;
#[tokio::main]
async fn main() -> Result<(), io::Error> {
let _log2 = log2::open("z.log").tee(true).level("trace").start();
info!("Initalizing Engine");
let shell_thread = tokio::task::spawn(async {
info!("Shell thread started");
core::repl::handle_repl().await;
}
);
core::splash::print_splash();
info!("Engine Initalized");
shell_thread.await?;
Ok(())
}

190
flake.lock generated Normal file
View file

@ -0,0 +1,190 @@
{
"nodes": {
"alejandra": {
"inputs": {
"fenix": "fenix",
"flakeCompat": "flakeCompat",
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1730688725,
"narHash": "sha256-g0SSfTWZ5mtMOpQic+eqq9sXMy1E/7yKxxfupZd9V4A=",
"owner": "kamadorueda",
"repo": "alejandra",
"rev": "2bb91e309ca99656addff5c74545acbf5813636d",
"type": "github"
},
"original": {
"owner": "kamadorueda",
"ref": "3.1.0",
"repo": "alejandra",
"type": "github"
}
},
"fenix": {
"inputs": {
"nixpkgs": [
"alejandra",
"nixpkgs"
],
"rust-analyzer-src": "rust-analyzer-src"
},
"locked": {
"lastModified": 1730615655,
"narHash": "sha256-2HBR3zLn57LXKNRtxBb+O+uDqHM4n0pz51rPayMl4cg=",
"owner": "nix-community",
"repo": "fenix",
"rev": "efeb50e2535b17ffd4a135e6e3e5fd60a525180c",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "fenix",
"type": "github"
}
},
"flake-compat": {
"locked": {
"lastModified": 1733328505,
"narHash": "sha256-NeCCThCEP3eCl2l/+27kNNK7QrwZB1IJCrXfrbv5oqU=",
"rev": "ff81ac966bb2cae68946d5ed5fc4994f96d0ffec",
"revCount": 69,
"type": "tarball",
"url": "https://api.flakehub.com/f/pinned/edolstra/flake-compat/1.1.0/01948eb7-9cba-704f-bbf3-3fa956735b52/source.tar.gz"
},
"original": {
"type": "tarball",
"url": "https://flakehub.com/f/edolstra/flake-compat/1.tar.gz"
}
},
"flakeCompat": {
"flake": false,
"locked": {
"lastModified": 1696426674,
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1744932701,
"narHash": "sha256-fusHbZCyv126cyArUwwKrLdCkgVAIaa/fQJYFlCEqiU=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "b024ced1aac25639f8ca8fdfc2f8c4fbd66c48ef",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1744536153,
"narHash": "sha256-awS2zRgF4uTwrOKwwiJcByDzDOdo3Q1rPZbiHQg/N38=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "18dd725c29603f582cf1900e0d25f9f1063dbf11",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"alejandra": "alejandra",
"flake-compat": "flake-compat",
"nixpkgs": "nixpkgs",
"rust-overlay": "rust-overlay",
"utils": "utils"
}
},
"rust-analyzer-src": {
"flake": false,
"locked": {
"lastModified": 1730555913,
"narHash": "sha256-KNHZUlqsEibg3YtfUyOFQSofP8hp1HKoY+laoesBxRM=",
"owner": "rust-lang",
"repo": "rust-analyzer",
"rev": "f17a5bbfd0969ba2e63a74505a80e55ecb174ed9",
"type": "github"
},
"original": {
"owner": "rust-lang",
"ref": "nightly",
"repo": "rust-analyzer",
"type": "github"
}
},
"rust-overlay": {
"inputs": {
"nixpkgs": "nixpkgs_2"
},
"locked": {
"lastModified": 1744943606,
"narHash": "sha256-VL4swGy4uBcHvX+UR5pMeNE9uQzXfA7B37lkwet1EmA=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "ec22cd63500f4832d1f3432d2425e0b31b0361b1",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

87
flake.nix Normal file
View file

@ -0,0 +1,87 @@
{
description = "Zenyx - A WSYWIG game engine written in rust ";
inputs = {
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
utils.url = "github:numtide/flake-utils";
rust-overlay.url = "github:oxalica/rust-overlay";
flake-compat.url = "https://flakehub.com/f/edolstra/flake-compat/1.tar.gz";
alejandra.url = "github:kamadorueda/alejandra/3.1.0";
alejandra.inputs.nixpkgs.follows = "nixpkgs";
};
outputs = {
self,
nixpkgs,
utils,
rust-overlay,
alejandra,
...
}:
{
overlays.default = final: prev: {
zenyx = final.callPackage ./default.nix {};
};
}
// utils.lib.eachDefaultSystem (
system: let
pkgs = import nixpkgs {
inherit system;
overlays = [
self.overlays.default
(import rust-overlay)
];
};
nativeBuildInputs = with pkgs; [
vulkan-tools
vulkan-tools-lunarg
shaderc
cargo-xbuild
cargo-pgo
cargo-cross
cargo-xwin
cargo-wizard
wine64
pkg-config
];
buildInputs = with pkgs; [
vulkan-loader
libGL
wayland
libxkbcommon
xorg.libXcursor
xorg.libXrandr
xorg.libXi
xorg.libX11
xorg.libxcb
];
in {
packages = {
inherit (pkgs) zenyx;
default = pkgs.zenyx;
windows = let
pkgsCross = import nixpkgs {
system = "x86_64-linux";
crossSystem = nixpkgs.lib.systems.examples.mingwW64;
};
in pkgsCross.callPackage ./default.nix {};
};
devShells.default = pkgs.mkShell {
name = "zenyx";
nativeBuildInputs = with pkgs; [
(rust-bin.stable.latest.default.override {
extensions = ["rust-src" "cargo" "rustfmt" "clippy"];
targets = [ "x86_64-pc-windows-msvc" "x86_64-unknown-linux-gnu" ];
})
pkg-config
] ++ nativeBuildInputs;
buildInputs = buildInputs;
LD_LIBRARY_PATH= pkgs.lib.makeLibraryPath buildInputs;
};
formatter = alejandra.packages.${system}.default;
}
);
}

7
rustfmt.toml Normal file
View file

@ -0,0 +1,7 @@
indent_style = "Block"
wrap_comments = true
format_code_in_doc_comments = true
trailing_comma = "Vertical"
group_imports = "StdExternalCrate"
reorder_impl_items = true
unstable_features = true

10
shaders/shader.frag Normal file
View file

@ -0,0 +1,10 @@
#version 450
layout(location = 0) in vec2 tex_coords;
layout(set = 1, binding = 0) uniform texture2D t_diffuse;
layout(set = 1, binding = 1) uniform sampler s_diffuse;
layout(location = 0) out vec4 out_color;
// layout(group = 0, binding = 0) out texture2D;
void main() {
out_color = texture(sampler2D(t_diffuse, s_diffuse), tex_coords);
}

15
shaders/shader.vert Normal file
View file

@ -0,0 +1,15 @@
#version 450
layout(location = 0) in vec3 position;
layout(location = 1) in vec3 color;
layout(location = 3) in vec2 tex_coords;
layout(location = 0) out vec2 tex_coord;
layout(set = 0, binding = 0) uniform UniformBufferObject {
mat4x4 projection;
} view;
void main() {
gl_Position = view.projection * vec4(position, 1.0);
tex_coord = tex_coords;
// gl_Position
// out_color = color;
}

15
shell.nix Normal file
View file

@ -0,0 +1,15 @@
(
import
(
let
lock = builtins.fromJSON (builtins.readFile ./flake.lock);
nodeName = lock.nodes.root.inputs.flake-compat;
in
fetchTarball {
url = lock.nodes.${nodeName}.locked.url or "https://github.com/edolstra/flake-compat/archive/${lock.nodes.${nodeName}.locked.rev}.tar.gz";
sha256 = lock.nodes.${nodeName}.locked.narHash;
}
)
{src = ./.;}
)
.shellNix

49
src/camera.rs Normal file
View file

@ -0,0 +1,49 @@
pub struct Camera {
pub eye: cgmath::Point3<f32>,
pub target: cgmath::Point3<f32>,
pub up: cgmath::Vector3<f32>,
pub aspect: f32,
pub fovy: f32,
pub znear: f32,
pub zfar: f32,
}
impl Camera {
fn build_view_projection_matrix(&self) -> cgmath::Matrix4<f32> {
let view = cgmath::Matrix4::look_at_rh(self.eye, self.target, self.up);
let proj = cgmath::perspective(cgmath::Deg(self.fovy), self.aspect, self.znear, self.zfar);
OPENGL_TO_WGPU_MATRIX * proj * view
}
pub fn update_aspect(&mut self, aspect: f32) {
self.aspect = aspect;
}
}
pub const OPENGL_TO_WGPU_MATRIX: cgmath::Matrix4<f32> = cgmath::Matrix4::new(
1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.5, 0.5, 0.0, 0.0, 0.0, 1.0,
);
unsafe impl bytemuck::Pod for CameraUniform {}
unsafe impl bytemuck::Zeroable for CameraUniform {}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct CameraUniform {
view_proj: cgmath::Matrix4<f32>,
}
impl Default for CameraUniform {
fn default() -> CameraUniform {
use cgmath::SquareMatrix;
Self {
view_proj: cgmath::Matrix4::identity(),
}
}
}
impl CameraUniform {
pub fn update_view_proj(&mut self, camera: &Camera) {
self.view_proj = camera.build_view_projection_matrix();
}
}

3
src/collections/mod.rs Normal file
View file

@ -0,0 +1,3 @@
mod sparse_set;
pub use sparse_set::SparseSet;

View file

@ -0,0 +1,273 @@
use core::{num::NonZeroUsize, usize};
use allocator_api2::{
alloc::{Allocator, Global},
boxed::Box,
vec::Vec,
};
use bytemuck::Contiguous;
const SPARSE_PAGESIZE: usize = (1 << 10) * 4;
type SparsePage<A> = Option<(Box<[Option<NonZeroUsize>; SPARSE_PAGESIZE], A>, usize)>;
pub struct SparseSet<T, PackedAlloc = Global, SparseAlloc = Global>
where
PackedAlloc: Allocator,
SparseAlloc: Allocator,
{
sparse: Vec<SparsePage<SparseAlloc>, SparseAlloc>,
dense: Vec<T, PackedAlloc>,
dense_to_id: Vec<usize, SparseAlloc>,
}
impl<T> SparseSet<T> {
pub const fn new() -> Self {
Self {
sparse: Vec::new(),
dense: Vec::new(),
dense_to_id: Vec::new(),
}
}
}
impl<T, PackedAlloc, SparseAlloc> SparseSet<T, PackedAlloc, SparseAlloc>
where
PackedAlloc: Allocator,
SparseAlloc: Allocator + Clone,
{
pub fn insert(&mut self, id: usize, value: T) -> Option<T> {
match self.get_dense_idx(id) {
Some(idx) => {
let previous = core::mem::replace(&mut self.dense[idx], value);
self.dense_to_id[idx] = id;
Some(previous)
}
None => {
self.increase_page_usage_count(id);
self.set_dense_idx(id, Some(self.dense.len()));
self.dense.push(value);
self.dense_to_id.push(id);
None
}
}
}
pub fn get(&self, id: usize) -> Option<&T> {
self.dense.get(self.get_dense_idx(id)?)
}
pub fn get_mut(&mut self, id: usize) -> Option<&mut T> {
let idx = self.get_dense_idx(id)?;
self.dense.get_mut(idx)
}
fn set_dense_idx(&mut self, id: usize, idx: Option<usize>) {
let page = id / SPARSE_PAGESIZE;
let sparse_index = id % SPARSE_PAGESIZE;
if page >= self.sparse.len() {
self.sparse.resize(page + 1, None);
}
if self.sparse[page].is_none() {
self.sparse[page] = Some((
Box::new_in([None; 4096], self.sparse.allocator().clone()),
1,
))
}
match &mut self.sparse[page] {
Some(page) => {
page.0[sparse_index] = idx.map(|i| NonZeroUsize::new(i + 1).unwrap());
}
None => unreachable!("wtf, failed to init sparse page 5 lines above??"),
}
}
pub fn get_dense_idx(&self, id: usize) -> Option<usize> {
let page = id / SPARSE_PAGESIZE;
let sparse_index = id % SPARSE_PAGESIZE;
let page = self.sparse.get(page)?.as_ref()?;
page.0[sparse_index].map(|idx| idx.into_integer() - 1)
}
fn reduce_page_usage_count(&mut self, id: usize) {
let page = id / SPARSE_PAGESIZE;
let Some(usage) = &mut self.sparse[page] else {
return;
};
usage.1 -= 1;
let usage = usage.1;
if usage == 0 {
self.sparse[page] = None;
}
}
fn increase_page_usage_count(&mut self, id: usize) {
let page = id / SPARSE_PAGESIZE;
if page >= self.sparse.len() {
return;
}
let Some(usage) = &mut self.sparse[page] else {
return;
};
usage.1 += 1;
}
pub fn remove(&mut self, id: usize) -> Option<T> {
let index = self.get_dense_idx(id)?;
if self.dense.is_empty() {
return None;
}
self.set_dense_idx(*self.dense_to_id.last().unwrap(), Some(index));
self.set_dense_idx(id, None);
self.reduce_page_usage_count(id);
let previous = self.dense.swap_remove(index);
self.dense_to_id.swap_remove(index);
Some(previous)
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn len(&self) -> usize {
self.dense.len()
}
pub fn contains(&self, id: usize) -> bool {
self.get_dense_idx(id).is_some()
}
pub fn keys(&self) -> &[usize] {
&self.dense_to_id
}
pub fn values(&self) -> &[T] {
&self.dense
}
pub fn new_in(packed_alloc: PackedAlloc, sparse_alloc: SparseAlloc) -> Self {
Self {
dense: Vec::new_in(packed_alloc),
sparse: Vec::new_in(sparse_alloc.clone()),
dense_to_id: Vec::new_in(sparse_alloc),
}
}
}
impl<T, PackedAlloc> SparseSet<T, PackedAlloc>
where
PackedAlloc: Allocator,
{
pub const fn new_in_packed(packed_alloc: PackedAlloc) -> Self {
Self {
sparse: Vec::new(),
dense: Vec::new_in(packed_alloc),
dense_to_id: Vec::new(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn insert() {
let mut sparse_set = SparseSet::<u32>::new();
sparse_set.insert(10, 1);
assert_eq!(sparse_set.keys(), &[10]);
assert_eq!(sparse_set.values(), &[1]);
assert_eq!(
sparse_set.sparse[0].as_ref().unwrap().0[10].unwrap(),
NonZeroUsize::new(1).unwrap()
);
assert_eq!(sparse_set.sparse[0].as_ref().unwrap().1, 1);
assert_eq!(sparse_set.insert(10, 2).unwrap(), 1);
assert_eq!(sparse_set.values(), &[2]);
assert_eq!(sparse_set.sparse[0].as_ref().unwrap().1, 1);
sparse_set.insert(11, 4);
assert_eq!(sparse_set.keys(), &[10, 11]);
assert_eq!(sparse_set.values(), &[2, 4]);
assert_eq!(
sparse_set.sparse[0].as_ref().unwrap().0[11].unwrap(),
NonZeroUsize::new(2).unwrap()
);
assert_eq!(sparse_set.sparse[0].as_ref().unwrap().1, 2);
sparse_set.insert(5000, 3);
assert_eq!(sparse_set.keys(), &[10, 11, 5000]);
assert_eq!(sparse_set.values(), &[2, 4, 3]);
assert_eq!(
sparse_set.sparse[5000 / SPARSE_PAGESIZE]
.as_ref()
.unwrap()
.0[5000 % SPARSE_PAGESIZE]
.unwrap(),
NonZeroUsize::new(3).unwrap()
);
assert_eq!(
sparse_set.sparse[5000 / SPARSE_PAGESIZE]
.as_ref()
.unwrap()
.1,
1
);
assert_eq!(*sparse_set.get(10).unwrap(), 2);
assert_eq!(*sparse_set.get(11).unwrap(), 4);
assert_eq!(*sparse_set.get(5000).unwrap(), 3);
}
#[test]
fn remove() {
let mut sparse_set = SparseSet::<u32>::new();
sparse_set.insert(10, 1);
sparse_set.insert(11, 2);
sparse_set.insert(12, 2);
sparse_set.insert(SPARSE_PAGESIZE, 1);
sparse_set.insert(SPARSE_PAGESIZE + 1, 2);
sparse_set.insert(SPARSE_PAGESIZE + 2, 3);
assert_eq!(sparse_set.remove(SPARSE_PAGESIZE + 2).unwrap(), 3);
assert_eq!(sparse_set.sparse[1].as_ref().unwrap().1, 2);
assert_eq!(sparse_set.keys(), [10, 11, 12, SPARSE_PAGESIZE, SPARSE_PAGESIZE + 1]);
assert_eq!(sparse_set.values(), [1, 2, 2, 1, 2]);
assert_eq!(sparse_set.remove(SPARSE_PAGESIZE + 1).unwrap(), 2);
assert_eq!(sparse_set.sparse[1].as_ref().unwrap().1, 1);
assert_eq!(sparse_set.keys(), [10, 11, 12, SPARSE_PAGESIZE]);
assert_eq!(sparse_set.values(), [1, 2, 2, 1]);
assert_eq!(sparse_set.remove(SPARSE_PAGESIZE).unwrap(), 1);
assert!(sparse_set.sparse[1].is_none());
assert_eq!(sparse_set.keys(), [10, 11, 12]);
assert_eq!(sparse_set.values(), [1, 2, 2]);
sparse_set.insert(SPARSE_PAGESIZE, 1);
sparse_set.insert(SPARSE_PAGESIZE + 1, 2);
sparse_set.insert(SPARSE_PAGESIZE + 2, 3);
assert_eq!(sparse_set.remove(10).unwrap(), 1);
assert_eq!(sparse_set.sparse[0].as_ref().unwrap().1, 2);
// swap-remove
assert_eq!(sparse_set.keys(), [SPARSE_PAGESIZE + 2, 11, 12, SPARSE_PAGESIZE, SPARSE_PAGESIZE + 1]);
assert_eq!(sparse_set.values(), [3, 2, 2, 1, 2]);
assert_eq!(sparse_set.remove(11).unwrap(), 2);
assert_eq!(sparse_set.sparse[0].as_ref().unwrap().1, 1);
assert_eq!(sparse_set.keys(), [SPARSE_PAGESIZE + 2, SPARSE_PAGESIZE + 1, 12, SPARSE_PAGESIZE]);
assert_eq!(sparse_set.values(), [3, 2, 2, 1]);
assert_eq!(sparse_set.remove(12).unwrap(), 2);
assert!(sparse_set.sparse[0].is_none());
assert_eq!(sparse_set.keys(), [SPARSE_PAGESIZE + 2, SPARSE_PAGESIZE + 1, SPARSE_PAGESIZE]);
assert_eq!(sparse_set.values(), [3, 2, 1]);
}
}

1
src/lib.rs Normal file
View file

@ -0,0 +1 @@
include!("main.rs");

547
src/main.rs Normal file
View file

@ -0,0 +1,547 @@
use bytemuck::bytes_of;
use std::collections::BTreeMap;
use std::io::BufReader;
use std::sync::Arc;
use std::time::Instant;
use tracing::info;
use wgpu::util::DeviceExt;
use wgpu::{
Backends, FragmentState, IndexFormat, Instance, InstanceDescriptor, PipelineCompilationOptions,
};
use winit::application::ApplicationHandler;
use winit::event::{ElementState, MouseButton};
use winit::event_loop::{ActiveEventLoop, EventLoop};
#[cfg(target_os = "android")]
use winit::platform::android::activity::AndroidApp;
use winit::window::{Window, WindowAttributes, WindowId};
use zlog::LogLevel;
use zlog::config::LoggerConfig;
pub mod camera;
pub mod collections;
pub mod model;
pub mod texture;
struct WindowContext<'window> {
window: Arc<Window>,
renderer: WgpuRenderer<'window>,
}
impl std::ops::Deref for WindowContext<'_> {
type Target = winit::window::Window;
fn deref(&self) -> &Self::Target {
self.window.as_ref()
}
}
struct WgpuRenderer<'surface> {
device: wgpu::Device,
queue: wgpu::Queue,
surface: wgpu::Surface<'surface>,
surface_config: wgpu::SurfaceConfiguration,
render_pipeline: wgpu::RenderPipeline,
depth_texture: wgpu::Texture,
depth_texture_view: wgpu::TextureView,
camera: camera::Camera,
camera_uniform: camera::CameraUniform,
camera_buffer: wgpu::Buffer,
camera_bind_group: wgpu::BindGroup,
pumpkin: model::Model,
delta: f32,
last_frame_time: Instant,
default_texture: wgpu::BindGroup,
}
impl WgpuRenderer<'_> {
pub fn draw(&mut self) {
let surface_texture = self.surface.get_current_texture().unwrap();
let view = surface_texture.texture.create_view(&Default::default());
let mut encoder =
self.device
.create_command_encoder(&wgpu::wgt::CommandEncoderDescriptor {
label: Some("Render encoder"),
});
{
let mut rpass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("Render"),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: &view,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Clear(wgpu::Color::WHITE),
store: wgpu::StoreOp::Store,
},
})],
depth_stencil_attachment: Some(wgpu::RenderPassDepthStencilAttachment {
view: &self.depth_texture_view,
depth_ops: Some(wgpu::Operations {
load: wgpu::LoadOp::Clear(1.0),
store: wgpu::StoreOp::Store,
}),
stencil_ops: None,
}),
occlusion_query_set: None,
timestamp_writes: None,
});
rpass.set_pipeline(&self.render_pipeline);
rpass.set_bind_group(0, &self.camera_bind_group, &[]);
for mesh in &self.pumpkin.meshes {
let bind_group = mesh
.material
.and_then(|i| self.pumpkin.materials.get(i))
.map(|m| &m.bind_group)
.unwrap_or(&self.default_texture);
rpass.set_bind_group(1, bind_group, &[]);
rpass.set_vertex_buffer(0, mesh.vertex_buffer.slice(..));
rpass.set_index_buffer(mesh.index_buffer.slice(..), IndexFormat::Uint32);
rpass.draw_indexed(0..mesh.num_elements, 0, 0..1);
}
}
self.queue.submit(Some(encoder.finish()));
let delta_time = std::time::Instant::now() - self.last_frame_time;
self.delta = delta_time.as_secs_f32();
info!("{}", self.delta);
surface_texture.present();
self.last_frame_time = std::time::Instant::now();
}
}
struct App<'window> {
state: WgpuState,
windows: BTreeMap<WindowId, WindowContext<'window>>,
}
impl App<'_> {
fn new() -> Self {
Self {
state: WgpuState::new(),
windows: BTreeMap::new(),
}
}
pub fn spawn_window(&mut self, event_loop: &ActiveEventLoop) {
let attr = WindowAttributes::default()
.with_title("Zenyx - SubWindow")
.with_min_inner_size(winit::dpi::LogicalSize::new(1, 1));
let window = event_loop.create_window(attr).unwrap();
let window = Arc::new(window);
let renderer = self.state.create_renderer(window.clone());
let window_ctx = WindowContext {
renderer: smol::block_on(renderer),
window: window.clone(),
};
let window_id = window.id();
self.windows.insert(window_id, window_ctx);
}
}
struct WgpuState {
instance: wgpu::Instance,
}
static _ICON: &[u8] = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/assets/Badge.png"));
static _PUMPKIN: &[u8] = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/Pumpkin.obj"));
impl WgpuState {
fn new() -> Self {
let backends = Backends::PRIMARY;
let instance_descriptor = InstanceDescriptor {
backends,
..Default::default()
};
let instance = Instance::new(&instance_descriptor);
Self { instance }
}
async fn create_renderer<'surface>(&self, window: Arc<Window>) -> WgpuRenderer<'surface> {
let surface = self.instance.create_surface(window.clone()).unwrap();
let adapter = self
.instance
.request_adapter(&wgpu::RequestAdapterOptions {
power_preference: wgpu::PowerPreference::default(),
compatible_surface: Some(&surface),
..Default::default()
})
.await
.unwrap();
let device_descriptor = wgpu::DeviceDescriptor::default();
let (device, queue) = adapter.request_device(&device_descriptor).await.unwrap();
let size = window.inner_size();
let width = size.width.max(1);
let height = size.height.max(1);
let texture_bind_group_layout =
device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
entries: &[
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Texture {
multisampled: false,
view_dimension: wgpu::TextureViewDimension::D2,
sample_type: wgpu::TextureSampleType::Float { filterable: true },
},
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStages::FRAGMENT,
// This should match the filterable field of the
// corresponding Texture entry above.
ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
count: None,
},
],
label: Some("texture_bind_group_layout"),
});
let camera_bind_group_layout =
device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
entries: &[wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::VERTEX,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
}],
label: Some("Camera Bind group layout"),
});
#[repr(align(4))]
struct ShaderCode<const N: usize>([u8; N]);
let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some("Pipeline Layout"),
bind_group_layouts: &[&camera_bind_group_layout, &texture_bind_group_layout],
push_constant_ranges: &[],
});
let vert_shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: Some("Vertex Shader"),
source: unsafe {
static SHADER_CODE: &[u8] =
&ShaderCode(*include_bytes!(concat!(env!("OUT_DIR"), "/vert.spv"))).0;
// assert!(bytes.len() % 4 == 0);
let shader = SHADER_CODE.align_to::<u32>().1;
wgpu::ShaderSource::SpirV(std::borrow::Cow::Borrowed(shader))
},
});
let frag_shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: Some("Fragment Shader"),
source: unsafe {
static SHADER_CODE: &[u8] =
&ShaderCode(*include_bytes!(concat!(env!("OUT_DIR"), "/frag.spv"))).0;
debug_assert!(SHADER_CODE.len() % 4 == 0);
let shader = SHADER_CODE.align_to::<u32>().1;
wgpu::ShaderSource::SpirV(std::borrow::Cow::Borrowed(shader))
},
});
let var_name = [Some(wgpu::ColorTargetState {
format: surface.get_capabilities(&adapter).formats[0],
blend: Some(wgpu::BlendState::ALPHA_BLENDING),
write_mask: wgpu::ColorWrites::ALL,
})];
let pipeline_descriptor = wgpu::RenderPipelineDescriptor {
label: Some("Main pipeline"),
layout: Some(&pipeline_layout),
vertex: wgpu::VertexState {
module: &vert_shader,
entry_point: Some("main"),
buffers: &[model::Vertex::desc()],
compilation_options: Default::default(),
},
primitive: wgpu::PrimitiveState {
topology: wgpu::PrimitiveTopology::TriangleList,
strip_index_format: None,
front_face: wgpu::FrontFace::Ccw,
cull_mode: Some(wgpu::Face::Back),
polygon_mode: wgpu::PolygonMode::Fill,
unclipped_depth: false,
conservative: false,
},
depth_stencil: Some(wgpu::DepthStencilState {
format: wgpu::TextureFormat::Depth32Float,
depth_write_enabled: true,
depth_compare: wgpu::CompareFunction::Less,
stencil: wgpu::StencilState::default(),
bias: wgpu::DepthBiasState::default(),
}),
multisample: wgpu::MultisampleState {
count: 1,
mask: !0,
alpha_to_coverage_enabled: false,
},
fragment: Some(FragmentState {
module: &frag_shader,
entry_point: Some("main"),
compilation_options: PipelineCompilationOptions::default(),
targets: &var_name,
}),
multiview: None,
cache: None,
};
// todo!();
let surface_caps = surface.get_capabilities(&adapter);
let present_mode = if surface_caps
.present_modes
.contains(&wgpu::PresentMode::Mailbox)
{
wgpu::PresentMode::Mailbox
} else {
wgpu::PresentMode::Fifo
};
let surface_config = wgpu::SurfaceConfiguration {
width,
height,
format: surface_caps.formats[0],
present_mode,
alpha_mode: wgpu::CompositeAlphaMode::Auto,
view_formats: vec![],
usage: wgpu::TextureUsages::RENDER_ATTACHMENT,
desired_maximum_frame_latency: 3,
};
surface.configure(&device, &surface_config);
let pumpkin = model::Model::load_obj(
&mut BufReader::new(std::fs::File::open("Pumpkin.obj").unwrap()),
&device,
&queue,
&texture_bind_group_layout,
);
let checkerboard_img = crate::texture::create_checkerboard();
let checkerboard_texture =
texture::Texture::from_image(&device, &queue, &checkerboard_img, Some("checkerboard"))
.unwrap();
let checkerboard_sampler = device.create_sampler(&wgpu::SamplerDescriptor {
address_mode_u: wgpu::AddressMode::Repeat,
address_mode_v: wgpu::AddressMode::Repeat,
address_mode_w: wgpu::AddressMode::Repeat,
mag_filter: wgpu::FilterMode::Nearest,
min_filter: wgpu::FilterMode::Nearest,
mipmap_filter: wgpu::FilterMode::Nearest,
..Default::default()
});
let default_texture = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("checkerboard-bind-group"),
layout: &texture_bind_group_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(&checkerboard_texture.view),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::Sampler(&checkerboard_sampler),
},
],
});
let (depth_texture, depth_texture_view) =
create_depth_texture(&device, surface_config.width, surface_config.height);
let camera = camera::Camera {
// position the camera 1 unit up and 2 units back
// +z is out of the screen
eye: (0.0, 0.0, 2.0).into(),
// have it look at the origin
target: (0.0, 0.0, 0.0).into(),
// which way is "up"
up: cgmath::Vector3::unit_y(),
aspect: surface_config.width as f32 / surface_config.height as f32,
fovy: 45.0,
znear: 0.1,
zfar: 1000.0,
};
let mut camera_uniform = camera::CameraUniform::default();
camera_uniform.update_view_proj(&camera);
let camera_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("Camera Buffer"),
contents: bytemuck::cast_slice(&[camera_uniform]),
usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST,
});
let camera_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
layout: &camera_bind_group_layout,
entries: &[wgpu::BindGroupEntry {
binding: 0,
resource: camera_buffer.as_entire_binding(),
}],
label: Some("Camera Bind Group"),
});
let render_pipeline = device.create_render_pipeline(&pipeline_descriptor);
WgpuRenderer {
surface,
surface_config,
depth_texture,
render_pipeline,
device,
camera,
queue,
camera_uniform,
camera_buffer,
camera_bind_group,
depth_texture_view,
pumpkin,
default_texture,
delta: 0f32,
last_frame_time: Instant::now(),
}
}
}
fn create_depth_texture(
device: &wgpu::Device,
width: u32,
height: u32,
) -> (wgpu::Texture, wgpu::TextureView) {
let size = wgpu::Extent3d {
width,
height,
depth_or_array_layers: 1,
};
let desc = wgpu::TextureDescriptor {
label: Some("Depth Texture"),
size,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Depth32Float,
usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::TEXTURE_BINDING,
view_formats: &[],
};
let texture = device.create_texture(&desc);
let view = texture.create_view(&wgpu::TextureViewDescriptor::default());
(texture, view)
}
impl ApplicationHandler for App<'_> {
fn window_event(
&mut self,
event_loop: &winit::event_loop::ActiveEventLoop,
window_id: WindowId,
event: winit::event::WindowEvent,
) {
match event {
winit::event::WindowEvent::RedrawRequested => {
let window_ctx = self.windows.get_mut(&window_id).unwrap();
window_ctx.renderer.draw()
}
winit::event::WindowEvent::CloseRequested => {
let _ = self.windows.remove(&window_id);
if self.windows.is_empty() {
event_loop.exit();
}
}
winit::event::WindowEvent::MouseInput { state, button, .. } => {
if button == MouseButton::Left && state == ElementState::Pressed {
self.spawn_window(event_loop);
}
}
winit::event::WindowEvent::Resized(size) => {
if let Some(window_ctx) = self.windows.get_mut(&window_id) {
if size.width == 0 || size.height == 0 {
return;
}
window_ctx
.renderer
.camera
.update_aspect(size.width as f32 / size.height as f32);
window_ctx
.renderer
.camera_uniform
.update_view_proj(&window_ctx.renderer.camera);
window_ctx.renderer.queue.write_buffer(
&window_ctx.renderer.camera_buffer,
0,
bytes_of(&window_ctx.renderer.camera_uniform),
);
let mut new_config = window_ctx.renderer.surface_config.clone();
new_config.width = size.width;
new_config.height = size.height;
let (depth_texture, depth_view) =
create_depth_texture(&window_ctx.renderer.device, size.width, size.height);
window_ctx.renderer.depth_texture = depth_texture;
window_ctx.renderer.depth_texture_view = depth_view;
window_ctx
.renderer
.surface
.configure(&window_ctx.renderer.device, &new_config)
}
}
_ => (),
}
}
fn resumed(&mut self, event_loop: &winit::event_loop::ActiveEventLoop) {
let attr = WindowAttributes::default()
.with_title("Zenyx")
.with_min_inner_size(winit::dpi::LogicalSize::new(1, 1));
if self.windows.is_empty() {
let window = event_loop.create_window(attr).unwrap();
let window = Arc::new(window);
let renderer = self.state.create_renderer(window.clone());
let window_ctx = WindowContext {
renderer: smol::block_on(renderer),
window: window.clone(),
};
let window_id = window.id();
self.windows.insert(window_id, window_ctx);
}
}
fn suspended(&mut self, _event_loop: &ActiveEventLoop) {
self.windows.clear();
}
}
pub fn main() -> Result<(), terminator::Terminator> {
let config = LoggerConfig::default()
.colored_stdout(true)
.log_to_stdout(true)
.file_include_time(true)
.log_to_file(true)
.level(LogLevel::Info)
.log_path("zenyx.log");
let _logger = zlog::Logger::new(config);
#[cfg(not(target_os = "android"))]
{
_main()
}
#[cfg(target_os = "android")]
{
Ok(())
}
}
pub fn run_app(event_loop: winit::event_loop::EventLoop<()>) -> Result<(), terminator::Terminator> {
let mut app = App::new();
event_loop.set_control_flow(winit::event_loop::ControlFlow::Poll);
event_loop.run_app(&mut app)?;
Ok(())
}
#[unsafe(no_mangle)]
#[cfg(target_os = "android")]
extern "C" fn android_main(app: AndroidApp) {
use winit::event_loop::EventLoopBuilder;
use winit::platform::android::EventLoopBuilderExtAndroid;
let event_loop = EventLoopBuilder::default()
.with_android_app(app)
.build()
.unwrap();
run_app(event_loop).unwrap()
}
fn _main() -> Result<(), terminator::Terminator> {
let event_loop = EventLoop::new()?;
run_app(event_loop)?;
info!("Exiting...");
Ok(())
}

226
src/model/mod.rs Normal file
View file

@ -0,0 +1,226 @@
use std::io::{BufRead, BufReader};
use crate::texture::Texture;
use cgmath::{Vector2, Vector3, Zero};
use tobj::Model as tModel;
use wgpu::util::DeviceExt;
pub struct Model {
pub meshes: Vec<Mesh>,
pub materials: Vec<Material>,
}
pub struct Material {
pub name: String,
pub diffuse_texture: Texture,
pub bind_group: wgpu::BindGroup,
}
pub struct Mesh {
pub name: String,
pub vertex_buffer: wgpu::Buffer,
pub index_buffer: wgpu::Buffer,
pub num_elements: u32,
pub material: Option<usize>,
}
impl Model {
pub fn load_obj<R: BufRead>(
read: &mut R,
device: &wgpu::Device,
queue: &wgpu::Queue,
layout: &wgpu::BindGroupLayout,
) -> Self {
let (models, obj_materials) = tobj::load_obj_buf(
read,
&tobj::LoadOptions {
triangulate: true,
single_index: true,
..Default::default()
},
|p| tobj::load_mtl_buf(&mut BufReader::new(std::fs::File::open(p).unwrap())),
)
.unwrap();
let mut materials = Vec::new();
for m in obj_materials.unwrap() {
let Some(texture) = &m.diffuse_texture else {
continue;
};
let diffuse_texture = load_texture(&m.name, texture, device, queue);
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("model-texture-bind"),
layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(&diffuse_texture.view),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::Sampler(&diffuse_texture.sampler),
},
],
});
materials.push(Material {
name: m.name,
diffuse_texture,
bind_group,
});
}
let meshes = models
.into_iter()
.map(|m| {
let vertices = (0..m.mesh.positions.len() / 3)
.map(|i| {
let position = cgmath::vec3(
m.mesh.positions[i * 3],
m.mesh.positions[i * 3 + 1],
m.mesh.positions[i * 3 + 2],
);
let tex_coords = cgmath::vec2(
m.mesh.texcoords[i * 2],
1.0 - m.mesh.texcoords[i * 2 + 1],
);
if m.mesh.normals.is_empty() {
Vertex {
position,
tex_coords,
normal: cgmath::Vector3::zero(),
color: cgmath::Vector3::zero(),
}
} else {
Vertex {
position,
tex_coords,
normal: cgmath::vec3(
m.mesh.normals[i * 3],
m.mesh.normals[i * 3 + 1],
m.mesh.normals[i * 3 + 2],
),
color: cgmath::Vector3::zero(),
}
}
})
.collect::<Vec<_>>();
let vertex_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: None,
contents: bytemuck::cast_slice(&vertices),
usage: wgpu::BufferUsages::VERTEX,
});
let index_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: None,
contents: bytemuck::cast_slice(&m.mesh.indices),
usage: wgpu::BufferUsages::INDEX,
});
Mesh {
name: "TOBJ_MESH".to_string(),
vertex_buffer,
index_buffer,
num_elements: m.mesh.indices.len() as u32,
material: m.mesh.material_id,
}
})
.collect::<Vec<_>>();
Self { meshes, materials }
}
}
pub fn load_texture<S: AsRef<std::path::Path>>(
label: &str,
path: S,
device: &wgpu::Device,
queue: &wgpu::Queue,
) -> Texture {
let file = std::fs::read(path).unwrap();
Texture::from_bytes(device, queue, &file, label).unwrap()
}
pub fn load(obj: &[tModel]) -> (Vec<Vertex>, Vec<u32>) {
let mut combined_vertices = Vec::new();
let mut combined_indices = Vec::new();
let mut vertex_offset = 0;
for object in obj {
let mesh: &_ = &object.mesh;
let vertices: Vec<Vertex> = (0..mesh.positions.len() / 3)
.map(|i| Vertex {
position: Vector3::from([
mesh.positions[i * 3],
mesh.positions[i * 3 + 1],
mesh.positions[i * 3 + 2],
]),
color: cgmath::Vector3::from([1.0, 1.0, 1.0]),
normal: if !mesh.normals.is_empty() {
Vector3::from([
mesh.normals[i * 3],
mesh.normals[i * 3 + 1],
mesh.normals[i * 3 + 2],
])
} else {
Vector3::zero()
},
tex_coords: if !mesh.texcoords.is_empty() {
Vector2::from([mesh.texcoords[i * 2], mesh.texcoords[i * 2 + 1]])
} else {
Vector2::zero()
},
})
.collect();
combined_vertices.extend(vertices);
combined_indices.extend(mesh.indices.iter().map(|&index| index + vertex_offset));
vertex_offset += (mesh.positions.len() as u32) / 3;
}
(combined_vertices, combined_indices)
}
#[derive(Copy, Clone, Debug)]
#[repr(C)]
pub struct Vertex {
pub position: cgmath::Vector3<f32>,
pub color: cgmath::Vector3<f32>,
pub normal: cgmath::Vector3<f32>,
pub tex_coords: cgmath::Vector2<f32>,
}
unsafe impl bytemuck::Pod for Vertex {}
unsafe impl bytemuck::Zeroable for Vertex {}
impl Vertex {
pub const ATTRIBS: [wgpu::VertexAttribute; 4] = [
wgpu::VertexAttribute {
offset: 0,
shader_location: 0,
format: wgpu::VertexFormat::Float32x3,
},
wgpu::VertexAttribute {
offset: std::mem::offset_of!(Vertex, color) as u64,
shader_location: 1,
format: wgpu::VertexFormat::Float32x3,
},
wgpu::VertexAttribute {
offset: std::mem::offset_of!(Vertex, normal) as u64,
shader_location: 2,
format: wgpu::VertexFormat::Float32x3,
},
wgpu::VertexAttribute {
offset: std::mem::offset_of!(Vertex, tex_coords) as u64,
shader_location: 3,
format: wgpu::VertexFormat::Float32x2,
},
];
pub const fn desc<'a>() -> wgpu::VertexBufferLayout<'a> {
wgpu::VertexBufferLayout {
array_stride: std::mem::size_of::<Vertex>() as wgpu::BufferAddress,
step_mode: wgpu::VertexStepMode::Vertex,
attributes: &Self::ATTRIBS,
}
}
}

108
src/texture.rs Normal file
View file

@ -0,0 +1,108 @@
use image::GenericImageView;
use thiserror::Error;
#[derive(Debug, Error)]
pub enum TextureError {
#[error("Failed to load image")]
ImageError(
#[from]
#[source]
image::ImageError,
),
}
pub struct Texture {
#[allow(unused)]
pub texture: wgpu::Texture,
pub view: wgpu::TextureView,
pub sampler: wgpu::Sampler,
}
impl Texture {
pub fn from_bytes(
device: &wgpu::Device,
queue: &wgpu::Queue,
bytes: &[u8],
label: &str,
) -> Result<Self, TextureError> {
let img = image::load_from_memory(bytes)?;
Self::from_image(device, queue, &img, Some(label))
}
pub fn from_image(
device: &wgpu::Device,
queue: &wgpu::Queue,
img: &image::DynamicImage,
label: Option<&str>,
) -> Result<Self, TextureError> {
let rgba = img.to_rgba8();
let dimensions = img.dimensions();
let size = wgpu::Extent3d {
width: dimensions.0,
height: dimensions.1,
depth_or_array_layers: 1,
};
let texture = device.create_texture(&wgpu::TextureDescriptor {
label,
size,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rgba8UnormSrgb,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[],
});
queue.write_texture(
wgpu::TexelCopyTextureInfo {
aspect: wgpu::TextureAspect::All,
texture: &texture,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
},
&rgba,
wgpu::TexelCopyBufferLayout {
offset: 0,
bytes_per_row: Some(4 * dimensions.0),
rows_per_image: Some(dimensions.1),
},
size,
);
let view = texture.create_view(&wgpu::TextureViewDescriptor::default());
let sampler = device.create_sampler(&wgpu::SamplerDescriptor {
address_mode_u: wgpu::AddressMode::ClampToEdge,
address_mode_v: wgpu::AddressMode::ClampToEdge,
address_mode_w: wgpu::AddressMode::ClampToEdge,
mag_filter: wgpu::FilterMode::Linear,
min_filter: wgpu::FilterMode::Nearest,
mipmap_filter: wgpu::FilterMode::Nearest,
..Default::default()
});
Ok(Self {
texture,
view,
sampler,
})
}
}
pub fn create_checkerboard() -> image::DynamicImage {
let size = 512;
let tile_size = 24;
let mut img = image::RgbaImage::new(size, size);
for y in 0..size {
for x in 0..size {
let tile_x = x / tile_size;
let tile_y = y / tile_size;
let color = if (tile_x + tile_y) % 2 == 0 {
[0, 0, 0, 255]
} else {
[255, 5, 255, 255]
};
img.put_pixel(x, y, image::Rgba(color));
}
}
image::DynamicImage::ImageRgba8(img)
}

View file

@ -1,7 +1,6 @@
[package]
name = "editor"
name = "renderer"
version = "0.1.0"
edition = "2021"
edition = "2024"
[dependencies]

View file

@ -0,0 +1 @@

19
subcrates/zlog/Cargo.toml Normal file
View file

@ -0,0 +1,19 @@
[package]
name = "zlog"
version = "0.1.0"
edition = "2024"
[dependencies]
tracing = "0.1.41"
tracing-subscriber = "0.3.19"
serde = { version = "1.0.219", optional = true }
serde_json = { version = "1.0.140", optional = true }
chrono = { version = "0.4.40", optional = true }
[dev-dependencies]
pretty_assertions = "1.4.1"
[features]
default = ["json"]
json = ["dep:serde_json", "dep:chrono", "serde"]
serde = ["dep:serde"]

View file

@ -0,0 +1,102 @@
use std::path::{Path, PathBuf};
use crate::LogLevel;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct LoggerConfig {
pub(crate) log_level: Option<LogLevel>,
pub(crate) log_to_file: bool,
pub(crate) log_file_path: PathBuf,
pub(crate) log_to_stdout: bool,
pub(crate) stdout_color: bool,
pub(crate) stdout_include_time: bool,
pub(crate) file_include_time: bool,
pub(crate) crate_max_level: Option<LogLevel>,
pub(crate) log_use_json: bool,
pub(crate) log_json_show_timestamp: bool,
pub(crate) log_json_show_level: bool,
pub(crate) log_json_show_message: bool,
pub(crate) log_json_show_additional_fields: bool,
}
impl LoggerConfig {
pub fn level(mut self, level: LogLevel) -> Self {
self.log_level = Some(level);
self
}
pub fn log_to_file(mut self, f: bool) -> Self {
self.log_to_file = f;
self
}
pub fn colored_stdout(mut self, c: bool) -> Self {
self.stdout_color = c;
self
}
pub fn log_to_stdout(mut self, s: bool) -> Self {
self.log_to_stdout = s;
self
}
pub fn log_path<P: AsRef<Path>>(mut self, p: P) -> Self {
self.log_file_path = p.as_ref().to_path_buf();
self
}
pub fn stdout_include_time(mut self, i: bool) -> Self {
self.stdout_include_time = i;
self
}
pub fn file_include_time(mut self, i: bool) -> Self {
self.file_include_time = i;
self
}
pub fn log_use_json(mut self, i: bool) -> Self {
self.log_use_json = i;
self
}
pub fn log_json_show_timestamp(mut self, i: bool) -> Self {
self.log_json_show_timestamp = i;
self
}
pub fn log_json_show_level(mut self, i: bool) -> Self {
self.log_json_show_level = i;
self
}
pub fn log_json_show_message(mut self, i: bool) -> Self {
self.log_json_show_message = i;
self
}
pub fn log_json_show_additional_fields(mut self, i: bool) -> Self {
self.log_json_show_additional_fields = i;
self
}
}
impl Default for LoggerConfig {
fn default() -> Self {
Self {
log_level: None,
crate_max_level: None,
log_to_file: true,
log_file_path: "app.log".into(),
log_to_stdout: true,
stdout_color: true,
stdout_include_time: false,
file_include_time: false,
log_use_json: false,
log_json_show_timestamp: false,
log_json_show_level: false,
log_json_show_message: false,
log_json_show_additional_fields: false
}
}
}

346
subcrates/zlog/src/lib.rs Normal file
View file

@ -0,0 +1,346 @@
pub mod config;
pub mod query;
#[cfg(test)]
mod tests;
use std::{
fmt,
fs::OpenOptions,
io::{BufWriter, Write},
str::FromStr,
sync::{
Arc, RwLock,
mpsc::{self, Sender},
},
thread,
time::SystemTime,
};
use config::LoggerConfig;
use query::LogQuery;
use tracing::{Event, Level, Subscriber, level_filters::LevelFilter, subscriber::DefaultGuard};
use tracing_subscriber::{
layer::{Context, Layer, SubscriberExt},
registry::LookupSpan,
util::SubscriberInitExt,
};
#[cfg(feature = "json")]
use serde::{Deserialize, Serialize};
#[cfg(feature = "json")]
use serde_json::Value;
#[cfg(feature = "json")]
use chrono::{DateTime, Utc};
#[derive(Debug, Clone, PartialEq, Eq)]
enum LogEvent {
Log(LogEntry),
Shutdown,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct LogEntry {
timestamp: SystemTime,
level: Level,
message: String,
#[cfg(feature = "json")]
additional_fields: serde_json::Map<String, Value>,
}
impl PartialOrd for LogEntry {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for LogEntry {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.timestamp
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.cmp(
&other
.timestamp
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap(),
)
}
}
struct BufferLayer {
log_entries: Arc<RwLock<Vec<LogEntry>>>,
senders: Vec<Sender<LogEvent>>,
}
impl BufferLayer {
pub fn new(log_entries: Arc<RwLock<Vec<LogEntry>>>, senders: Vec<Sender<LogEvent>>) -> Self {
Self {
log_entries,
senders,
}
}
}
impl Drop for BufferLayer {
fn drop(&mut self) {
for tx in &self.senders {
if let Err(e) = tx.send(LogEvent::Shutdown) {
panic!("{e}")
}
}
self.senders.clear();
}
}
impl<S> Layer<S> for BufferLayer
where
S: Subscriber + for<'a> LookupSpan<'a>,
{
fn on_event(&self, event: &Event<'_>, _ctx: Context<'_, S>) {
let metadata = event.metadata();
let level = *metadata.level();
let timestamp = SystemTime::now();
#[cfg(feature = "json")]
let additional_fields = serde_json::Map::new();
let mut message = String::new();
let mut visitor = LogVisitor::new(&mut message);
event.record(&mut visitor);
let log_entry = LogEvent::Log(LogEntry {
timestamp,
level,
message,
#[cfg(feature = "json")]
additional_fields
});
if let LogEvent::Log(ref entry) = log_entry {
if let Ok(mut buf) = self.log_entries.write() {
buf.push(entry.clone());
}
}
for tx in &self.senders {
let _ = tx.send(log_entry.clone());
}
}
}
struct LogVisitor<'msg> {
message: &'msg mut String,
}
impl<'msg> LogVisitor<'msg> {
fn new(message: &'msg mut String) -> Self {
Self { message }
}
}
impl tracing::field::Visit for LogVisitor<'_> {
fn record_debug(&mut self, field: &tracing::field::Field, value: &dyn fmt::Debug) {
use std::fmt::Write;
if field.name() == "message" {
write!(self.message, "{:?}", value).unwrap();
} else {
write!(self.message, "{}={:?} ", field.name(), value).unwrap();
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
pub enum LogLevel {
Error,
#[default]
Info,
Warn,
Debug,
Trace,
}
impl From<LogLevel> for LevelFilter {
fn from(level: LogLevel) -> Self {
match level {
LogLevel::Error => LevelFilter::ERROR,
LogLevel::Info => LevelFilter::INFO,
LogLevel::Debug => LevelFilter::DEBUG,
LogLevel::Trace => LevelFilter::TRACE,
LogLevel::Warn => LevelFilter::WARN,
}
}
}
impl fmt::Display for LogLevel {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl FromStr for LogLevel {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"trace" => Ok(Self::Trace),
"debug" => Ok(Self::Debug),
"info" => Ok(Self::Info),
"error" => Ok(Self::Error),
"warn" => Ok(Self::Warn),
_ => Err(()),
}
}
}
pub struct Logger {
subscriber_guard: Option<tracing::subscriber::DefaultGuard>,
log_entries: Arc<RwLock<Vec<LogEntry>>>,
_handles: Vec<thread::JoinHandle<()>>,
}
impl Logger {
pub fn new(config: LoggerConfig) -> Self {
let log_entries = Arc::new(RwLock::new(Vec::new()));
let mut senders = Vec::new();
let mut handles = Vec::new();
if config.log_to_stdout || config.log_use_json {
let (tx, rx) = mpsc::channel();
senders.push(tx);
let config_clone = config.clone();
let handle = thread::spawn(move || {
for msg in rx {
match msg {
LogEvent::Log(mut entry) => {
println!(
"{}",
format_entry(
&mut entry,
&config_clone
)
);
}
LogEvent::Shutdown => break,
}
}
});
handles.push(handle);
}
if config.log_to_file {
let (tx, rx) = mpsc::channel();
senders.push(tx);
let config_clone = config.clone();
let path = config.log_file_path.clone();
let handle = thread::spawn(move || {
let file = OpenOptions::new()
.append(true)
.create(true)
.open(&path)
.expect("Failed to open log file");
let mut writer = BufWriter::new(file);
for msg in rx {
match msg {
LogEvent::Log(mut entry) => {
let line = format_entry(&mut entry, &config_clone);
writeln!(writer, "{}", line).expect("Failed to write to log file");
writer.flush().expect("Failed to flush log file");
}
LogEvent::Shutdown => break,
}
}
});
handles.push(handle);
}
let buffer_layer = BufferLayer::new(log_entries.clone(), senders);
let mk_logger = |guard: DefaultGuard| Logger {
subscriber_guard: Some(guard),
log_entries,
_handles: handles,
};
if let Some(level) = config.log_level {
let subscriber = tracing_subscriber::registry()
.with(buffer_layer)
.with(LevelFilter::from(level));
let guard = subscriber.set_default();
mk_logger(guard)
} else {
let subscriber = tracing_subscriber::registry().with(buffer_layer);
let guard = subscriber.set_default();
mk_logger(guard)
}
}
pub fn get_logs(&self, query: LogQuery) -> Vec<LogEntry> {
let guard = self.log_entries.read().unwrap();
match query {
LogQuery::All => guard.clone(),
LogQuery::From(t) => guard.iter().filter(|e| e.timestamp >= t).cloned().collect(),
}
}
}
impl Drop for Logger {
fn drop(&mut self) {
let _ = self.subscriber_guard.take();
let handles = std::mem::take(&mut self._handles);
for handle in handles {
handle.join().unwrap_or_else(|e| {
eprintln!("Logger thread panicked: {:?}", e);
});
}
}
}
fn format_entry(entry: &mut LogEntry, log_config: &LoggerConfig) -> String {
if log_config.log_use_json {
return format_entry_json(entry, log_config);
}
if log_config.log_to_stdout || log_config.log_to_file {
return format_entry_string(entry, log_config);
} else {
return String::new();
}
}
fn format_entry_string(entry: &LogEntry, log_config: &LoggerConfig) -> String {
let lvl = if log_config.stdout_color {
match entry.level {
Level::ERROR => "\x1b[31mERROR\x1b[0m",
Level::WARN => "\x1b[33mWARN\x1b[0m",
Level::INFO => "\x1b[32mINFO\x1b[0m",
Level::DEBUG => "\x1b[36mDEBUG\x1b[0m",
Level::TRACE => "\x1b[34mTRACE\x1b[0m",
}
} else {
entry.level.as_str()
};
format!("{} {}", lvl, entry.message)
}
/// Formats the log entry as a json object ([`serde_json`]) and returns it as a [`String`]
fn format_entry_json(entry: &mut LogEntry, log_config: &LoggerConfig) -> String {
let mut json_object = serde_json::Map::new();
if log_config.log_json_show_timestamp {
json_object.insert("timestamp".to_string(), Value::String(DateTime::<Utc>::from(entry.timestamp).to_rfc3339()));
}
if log_config.log_json_show_level {
json_object.insert("level".to_string(), Value::String(entry.level.to_string()));
}
if log_config.log_json_show_message {
json_object.insert("message".to_string(), Value::String(entry.message.to_string()));
}
if log_config.log_json_show_additional_fields {
json_object.append(&mut entry.additional_fields);
}
serde_json::to_string(&json_object).unwrap()
}

View file

@ -0,0 +1,14 @@
use std::time::SystemTime;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Default)]
pub enum LogQuery {
#[default]
All,
From(SystemTime),
}
impl LogQuery {
pub fn since(time: SystemTime) -> Self {
LogQuery::From(time)
}
}

139
subcrates/zlog/src/tests.rs Normal file
View file

@ -0,0 +1,139 @@
use pretty_assertions::assert_eq;
use tracing::Level;
use serde_json::Map;
use serde::{Serialize, Deserialize};
use super::*;
#[test]
fn test_logger_sequential_consistency() {
use std::sync::atomic::{AtomicUsize, Ordering};
use tracing::{debug, error, info, trace, warn};
let config = LoggerConfig::default()
.log_to_stdout(true)
.log_to_file(true);
let logger = Logger::new(config);
static COUNTER: AtomicUsize = AtomicUsize::new(0);
for i in 0..4096 * 128 {
let count = COUNTER.fetch_add(1, Ordering::SeqCst);
match i % 5 {
0 => error!("Error message {}", count),
1 => warn!("Warning message {}", count),
2 => info!("Info message {}", count),
3 => debug!("Debug message {}", count),
_ => trace!("Trace message {}", count),
}
}
let logs = logger.get_logs(LogQuery::All);
assert_eq!(
logs.len(),
4096 * 128,
"Should have exactly 5000 log entries"
);
for (i, log) in logs.iter().enumerate() {
let expected_count = i;
let expected_level = match i % 5 {
0 => Level::ERROR,
1 => Level::WARN,
2 => Level::INFO,
3 => Level::DEBUG,
_ => Level::TRACE,
};
assert_eq!(
log.level, expected_level,
"Log {} has incorrect level: {:?}",
i, log.level
);
let expected_msg = match expected_level {
Level::ERROR => format!("Error message {}", expected_count),
Level::WARN => format!("Warning message {}", expected_count),
Level::INFO => format!("Info message {}", expected_count),
Level::DEBUG => format!("Debug message {}", expected_count),
Level::TRACE => format!("Trace message {}", expected_count),
};
assert_eq!(
log.message, expected_msg,
"Log {} has incorrect message. Expected: '{}', Got: '{}'",
i, expected_msg, log.message
);
if i > 0 {
assert!(
log.timestamp >= logs[i - 1].timestamp,
"Log {} has timestamp out of order. Current: {:?}, Previous: {:?}",
i,
log.timestamp,
logs[i - 1].timestamp
);
}
}
let mut counts: Vec<usize> = logs
.iter()
.map(|log| {
log.message
.split_whitespace()
.last()
.unwrap()
.parse::<usize>()
.unwrap()
})
.collect();
counts.sort();
counts.dedup();
assert_eq!(counts.len(), 4096 * 128, "Found duplicate log entries");
}
#[test]
fn test_logger_sequential_consistency_json() {
use std::sync::atomic::{AtomicUsize, Ordering};
use tracing::{debug, error, info, trace, warn};
let config = LoggerConfig::default()
.log_to_stdout(false)
.log_to_file(false)
.log_use_json(true)
.log_json_show_timestamp(true)
.log_json_show_level(true)
.log_json_show_message(true)
.log_json_show_additional_fields(false); // Not implemented yet
let logger = Logger::new(config);
static COUNTER: AtomicUsize = AtomicUsize::new(0);
for i in 0..4096 * 128 {
let count = COUNTER.fetch_add(1, Ordering::SeqCst);
match i % 5 {
0 => error!("Error message {}", count),
1 => warn!("Warning message {}", count),
2 => info!("Info message {}", count),
3 => debug!("Debug message {}", count),
_ => trace!("Trace message {}", count),
}
}
let mut log_json: Vec<Map<String, Value>> = vec![];
for log in logger.get_logs(LogQuery::All) {
let mut json_object = serde_json::Map::new();
json_object.insert("timestamp".to_string(), Value::String(DateTime::<Utc>::from(log.timestamp).to_rfc3339()));
json_object.insert("level".to_string(), Value::String(log.level.to_string()));
json_object.insert("message".to_string(), Value::String(log.message.to_string()));
log_json.push(json_object);
}
for log in log_json {
serde_json::to_string(&log).unwrap();
}
}

View file

@ -1,32 +0,0 @@
[package]
name = "xtask"
version = "0.1.0"
edition = "2021"
[dependencies]
clap = { version = "4.5.20", features = ["derive"] }
[profile.dev]
rpath = false
panic = "abort"
lto = "off"
opt-level = 0
debug = false
overflow-checks = false
incremental = true
codegen-units = 256
strip = "symbols"
debug-assertions = false
[profile.dev.package."*"]
opt-level = 0
debug = false
overflow-checks = false
incremental = true
codegen-units = 256
strip = "symbols"
debug-assertions = false

View file

@ -1 +0,0 @@
pub fn build_editor() {}

View file

@ -1,22 +0,0 @@
use std::process::Stdio;
pub fn build_engine() {
}
pub fn build_core() {
let threads = format!("-j{}",std::thread::available_parallelism().unwrap().get());
let mut run = std::process::Command::new("cargo")
.arg("run")
.arg(threads)
.arg("--bin")
.arg("zenyx")
.stdin(Stdio::inherit())
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.spawn()
.unwrap();
run.wait().unwrap();
}

View file

@ -1,70 +0,0 @@
use clap::{CommandFactory, Parser, Subcommand, ValueEnum};
pub mod engine;
pub mod editor;
#[derive(Parser)]
#[command(version, about, long_about = None,disable_version_flag = true,disable_help_flag = true)]
struct Cli {
#[arg(short,long)]
release: bool,
#[command(subcommand)]
command: Option<Commands>,
}
#[derive(Subcommand)]
enum Commands {
Run {
#[arg()]
task: Task
},
Config,
}
#[derive(Clone,ValueEnum)]
enum Task {
Engine, // Builds both editor and core
Editor, // Builds editor only
Core, // Builds engine core only
Help,
}
fn main() {
let cli = Cli::parse();
if cli.release {
println!("Running in release mode")
}
match &cli.command {
None => {
Cli::command().print_help().map_err(|e| {
println!("Could not run Xtask: {e}");
}).unwrap();
}
Some(Commands::Run { task }) => {
match task {
Task::Engine => engine::build_engine(),
Task::Editor => todo!("Editor is not being actively worked on"),
Task::Core => {
engine::build_core();
},
Task::Help => {
println!("The following options are avalible to run");
todo!()
},
}
}
Some(Commands::Config) => {
todo!()
}
}
}