Saving progress
This commit is contained in:
parent
26a2badb94
commit
ff3edd6426
1
.gitignore
vendored
1
.gitignore
vendored
@ -17,3 +17,4 @@ GRAPH*.png
|
||||
SICK_JOKE*
|
||||
*.hi
|
||||
*_stub.h
|
||||
copying_to_cpp.sh
|
||||
@ -16,7 +16,7 @@ void generate_util_temp_very_base_headers() {
|
||||
(util_templates_instantiation_options){
|
||||
.T = T_codegen_VecAndSpan[i],
|
||||
.t_integer = true, .t_primitive = true, .vec = true, .vec_extended = true,
|
||||
.vec_equal = true, .span = true, .span_extended = true, .mut_span = true,
|
||||
.vec_equal = true, .span = true, .mut_span = true, .span_extended = true,
|
||||
.collab_vec_span = true, .collab_vec_span_extended = true,
|
||||
});
|
||||
}
|
||||
@ -32,7 +32,7 @@ void generate_util_temp_very_base_headers() {
|
||||
VecU8_to_span(&dependency),
|
||||
(util_templates_instantiation_options){
|
||||
.T = VecU8_to_span(&VecT), .t_clonable = true, .vec = true, .vec_extended = true,
|
||||
.span = true, .collab_vec_span = true, .vec_equal = true, .vec_new_of_size = true
|
||||
.vec_equal = true, .vec_new_of_size = true, .span = true, .collab_vec_span = true,
|
||||
});
|
||||
VecU8_drop(VecT);
|
||||
VecU8_drop(dependency);
|
||||
|
||||
@ -441,7 +441,6 @@ vec2 height_map_cb_that_uses_bublazhuzhka(void* ug, vec2 v) {
|
||||
return Bublazhuzhka_get_derivative(bzh, v);
|
||||
}
|
||||
|
||||
// todo: rewrite this crrp and merge it with other one-fourth-of-a-cylinder generiting functions
|
||||
void r4_asset_gen_generic_mesh_one_fourth_of_a_cylinder(float s_resol, float w, float r, U32 k,
|
||||
VecU8 path_to_mesh, VecU8 path_to_template_tex, VecU8 path_to_normal_tex
|
||||
) {
|
||||
@ -455,7 +454,6 @@ void r4_asset_gen_generic_mesh_one_fourth_of_a_cylinder(float s_resol, float w,
|
||||
|
||||
U64 texture_width = (U64)ceilf(2 * r_mag + w_mag);
|
||||
U64 texture_height = (size_t)ceilf(2 * r_mag + (float)k * l_mag);
|
||||
// todo: aaaa i am gonna go fucking insane who the fuck wrote trhis shit. AAA, I hate this code so much I hate myself so fuckign much
|
||||
const vec2 v0tex = {r_mag / (float)texture_width, r_mag / (float)texture_height};
|
||||
const vec2 v1tex = {(r_mag + w_mag) / (float)texture_width, r_mag / (float)texture_height};
|
||||
const vec2 v2tex = {r_mag / (float)texture_width, 2 * r_mag / (float)texture_height};
|
||||
|
||||
71
src/l2/core/glb_file.h
Normal file
71
src/l2/core/glb_file.h
Normal file
@ -0,0 +1,71 @@
|
||||
#ifndef prototype1_src_l2_alice_glb_file_h
|
||||
#define prototype1_src_l2_alice_glb_file_h
|
||||
|
||||
#include "json_encoded.h"
|
||||
|
||||
/* todo: add big endian support */
|
||||
|
||||
/* Points to some string (BIN segment) + contains decoded json object */
|
||||
typedef struct {
|
||||
Json gltf;
|
||||
/* If length is 0, BIN segment is absent */
|
||||
SpanU8 bin_segment;
|
||||
} GLBFileSegments;
|
||||
|
||||
void GLBFileSegments_drop(GLBFileSegments self){
|
||||
Json_drop(self.gltf);
|
||||
}
|
||||
|
||||
/* Returns positive on error, 0 on ok */
|
||||
int glb_file_get_segments(SpanU8 file, GLBFileSegments* ret){
|
||||
if (file.len < 12) {
|
||||
return 1;
|
||||
}
|
||||
SpanU8 json_segment = {0, 0}; // length of 0 means segment is absent
|
||||
SpanU8 bin_segment = {0, 0}; // length of 0 means segment is absent
|
||||
if (*(const U32*)file.data != 0x46546C67) {
|
||||
return 2;
|
||||
}
|
||||
/* Nobody cares about version */
|
||||
if (*(const U32*)(file.data + 8) != file.len) {
|
||||
return 3;
|
||||
}
|
||||
U64 cur = 12;
|
||||
while (cur < file.len) {
|
||||
if (cur + 8 > file.len) {
|
||||
return 4;
|
||||
}
|
||||
U32 chunk_length = *(const U32*)(file.data + cur);
|
||||
U32 chunk_type = *(const U32*)(file.data + cur + 4);
|
||||
if (cur + 8 + chunk_length > file.len) {
|
||||
return 5;
|
||||
}
|
||||
SpanU8 cur_segment = SpanU8_span(file, cur + 8, chunk_length);
|
||||
if (chunk_type == 0x4E4F534A) {
|
||||
if (json_segment.len > 0) {
|
||||
/* Illegal! Two json segments */
|
||||
return 6;
|
||||
}
|
||||
json_segment = cur_segment;
|
||||
} else if (chunk_type == 0x004E4942) {
|
||||
if (bin_segment.len > 0) {
|
||||
/* Illegal! Two bin segments */
|
||||
return 7;
|
||||
}
|
||||
bin_segment = cur_segment;
|
||||
}
|
||||
}
|
||||
// if (json_segment.len == 0) {
|
||||
// /* Illegal, no json segment */
|
||||
// return 8;
|
||||
// }
|
||||
OptionJson parsed_json = json_decode(json_segment, 15);
|
||||
if (parsed_json.variant == Option_None) {
|
||||
return 9;
|
||||
}
|
||||
/* Everything is correct */
|
||||
*ret = (GLBFileSegments){.gltf = parsed_json.some, .bin_segment = bin_segment};
|
||||
return 0;
|
||||
}
|
||||
|
||||
#endif
|
||||
@ -23,12 +23,14 @@ typedef enum {
|
||||
Json_none,
|
||||
} Json_variant;
|
||||
|
||||
typedef RBTree_MapVecU8ToJson json_dictionary_t;
|
||||
|
||||
struct Json{
|
||||
Json_variant variant;
|
||||
union {
|
||||
S64 integer;
|
||||
float float_num;
|
||||
RBTree_MapVecU8ToJson dict;
|
||||
json_dictionary_t dict;
|
||||
VecJson arr;
|
||||
VecU8 str;
|
||||
};
|
||||
@ -42,7 +44,7 @@ typedef struct RBTreeNode_KVPVecU8ToJson {
|
||||
|
||||
/* Pulling declarations of methods, defined below */
|
||||
void VecJson_drop(VecJson self);
|
||||
void RBTree_MapVecU8ToJson_drop(RBTree_MapVecU8ToJson self);
|
||||
void RBTree_MapVecU8ToJson_drop(json_dictionary_t self);
|
||||
|
||||
void Json_drop(Json self) {
|
||||
if (self.variant == Json_str) {
|
||||
@ -62,7 +64,7 @@ Json Json_from_float(float x){
|
||||
}
|
||||
|
||||
Json Json_from_int(S64 x){
|
||||
return (Json){.variant = Json_float, .integer = x};
|
||||
return (Json){.variant = Json_integer, .integer = x};
|
||||
}
|
||||
|
||||
Json Json_from_VecU8(VecU8 x){
|
||||
@ -85,7 +87,7 @@ Json Json_from_VecJson(VecJson arr){
|
||||
return (Json){.variant = Json_arr, .arr = arr};
|
||||
}
|
||||
|
||||
Json Json_from_MapVecU8ToJson(RBTree_MapVecU8ToJson dict){
|
||||
Json Json_from_MapVecU8ToJson(json_dictionary_t dict){
|
||||
return (Json){.variant = Json_dict, .dict = dict};
|
||||
}
|
||||
|
||||
|
||||
@ -86,7 +86,6 @@ VecU8 json_encode(const Json* obj){
|
||||
/* Kids had their fun with json encoding. Now it's time for adults to enjoy some parsing */
|
||||
|
||||
|
||||
|
||||
OptionJson json_decoding_h_no_spaces(SpanU8* rem, U32 depth_rem);
|
||||
|
||||
OptionJson json_decoding_h(SpanU8* rem, U32 depth_rem){
|
||||
@ -224,7 +223,7 @@ OptionJson json_decoding_h_no_spaces(SpanU8* rem, U32 depth_rem){
|
||||
}
|
||||
if (SpanU8_parsing_try_read_char(rem, '{')) {
|
||||
SpanU8_parsing_skip_spaces(rem);
|
||||
RBTree_MapVecU8ToJson dict = RBTree_MapVecU8ToJson_new();
|
||||
json_dictionary_t dict = RBTree_MapVecU8ToJson_new();
|
||||
while (true) {
|
||||
if (SpanU8_parsing_try_read_char(rem, '}')) {
|
||||
return Some_Json(Json_from_MapVecU8ToJson(dict));
|
||||
|
||||
@ -1 +0,0 @@
|
||||
|
||||
@ -278,19 +278,84 @@ void tt24(){
|
||||
check(vec_matches_cstr(buf, "\"" "\\\"\\n\\t\\r\\\\ AB" "\""));
|
||||
}
|
||||
|
||||
void tt25(){
|
||||
Json x = Json_from_VecJson(VecJson_new());
|
||||
void test_json_encoding(Json x, const char* str_lit){
|
||||
VecU8 my_ans = json_encode(&x);
|
||||
Json_drop(x);
|
||||
check(vec_matches_cstr(my_ans, "[]"));
|
||||
check(vec_matches_cstr(my_ans, str_lit));
|
||||
}
|
||||
|
||||
void tt25(){
|
||||
test_json_encoding(Json_from_VecJson(VecJson_new()), "[]");
|
||||
test_json_encoding(Json_None, "none");
|
||||
test_json_encoding(Json_False, "false");
|
||||
test_json_encoding(Json_True, "true");
|
||||
test_json_encoding(Json_from_int(INT64_MIN), "-9223372036854775808");
|
||||
test_json_encoding(Json_from_SpanU8(cstr("that's right\nLet's do it")), "\"that's right\\nLet's do it\"");
|
||||
test_json_encoding(Json_from_MapVecU8ToJson(RBTree_MapVecU8ToJson_new()), "{}");
|
||||
}
|
||||
|
||||
void tt26(){
|
||||
{
|
||||
Json x = Json_from_float(123.0f);
|
||||
VecU8 s = json_encode(&x);
|
||||
check(SpanU8_is_prefix(cstr("123"), VecU8_to_span(&s)));
|
||||
Json_drop(x);
|
||||
VecU8_drop(s);
|
||||
}
|
||||
{
|
||||
VecJson v = VecJson_new();
|
||||
VecJson_append(&v, Json_from_VecJson(VecJson_new()));
|
||||
VecJson_append(&v, Json_from_int(1223));
|
||||
test_json_encoding(Json_from_VecJson(v), "[[], 1223]");
|
||||
}
|
||||
{
|
||||
json_dictionary_t d = RBTree_MapVecU8ToJson_new();
|
||||
RBTree_MapVecU8ToJson_insert(&d, vcstr("k1"), Json_from_int(45));
|
||||
RBTree_MapVecU8ToJson_insert(&d, vcstr("k2"), Json_from_int(45555));
|
||||
test_json_encoding(Json_from_MapVecU8ToJson(d), "{\"k1\": 45, \"k2\": 45555}");
|
||||
}
|
||||
}
|
||||
|
||||
void test_json_equal(const Json* a, const Json* b){
|
||||
check(a->variant == b->variant)
|
||||
if (a->variant == Json_integer) {
|
||||
check(a->integer == b->integer);
|
||||
} else if (a->variant == Json_float) {
|
||||
check(fabsf(a->float_num - b->float_num))
|
||||
} else if (a->variant == Json_str) {
|
||||
check(VecU8_equal_VecU8(&a->str, &b->str));
|
||||
}
|
||||
}
|
||||
|
||||
void test_json_decoding_ok(const char* str_lit, Json right_ans){
|
||||
|
||||
}
|
||||
|
||||
void tt27(){
|
||||
|
||||
}
|
||||
|
||||
void test_json_decoding_with_ill_formed_inp(const char* str_lit){
|
||||
OptionJson res = json_decode(SpanU8_from_cstr(str_lit), 10);
|
||||
check(res.variant == Option_None);
|
||||
}
|
||||
|
||||
void tt28(){
|
||||
test_json_decoding_with_ill_formed_inp("123e2222222222222222222222222");
|
||||
test_json_decoding_with_ill_formed_inp("999999999999999999999999999999999999999");
|
||||
test_json_decoding_with_ill_formed_inp("\"666");
|
||||
test_json_decoding_with_ill_formed_inp("{{{}}");
|
||||
test_json_decoding_with_ill_formed_inp("f");
|
||||
test_json_decoding_with_ill_formed_inp("tru");
|
||||
test_json_decoding_with_ill_formed_inp("none3");
|
||||
test_json_decoding_with_ill_formed_inp("]");
|
||||
test_json_decoding_with_ill_formed_inp("}");
|
||||
}
|
||||
|
||||
|
||||
int main(){
|
||||
tt1(); tt2(); tt3(); tt4(); tt5(); tt6(); tt7(); tt8(); tt9(); tt10(); tt11(); tt12(); tt13(); tt14(); tt15();
|
||||
tt16(); tt17(); tt18(); tt19();
|
||||
tt20(); tt21(); tt22(); tt23(); tt24(); tt25()
|
||||
tt20(); tt21(); tt22(); tt23(); tt24(); tt25(); tt26();
|
||||
return 0;
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user