From 04a0cd8ab24b28ca176f2500c0e669def846e3c0 Mon Sep 17 00:00:00 2001
From: Tom <thomas.hodson@ecmwf.int>
Date: Wed, 11 Dec 2024 14:08:18 +0000
Subject: [PATCH] updates

---
 config/destinE/schema                         |  130 --
 config/local/language.yaml                    | 1253 +++++++++++++++++
 stac_server/main.py                           |   63 +-
 stac_server/run.sh                            |    2 +-
 tree_compresser/README.md                     |    5 +
 .../tree_traverser/CompressedTree.py          |   27 +-
 .../python_src/tree_traverser/__init__.py     |    2 +-
 tree_compresser/rust_src/compressed_tree.rs   |  334 +++++
 web_query_builder/run.sh                      |    2 +-
 web_query_builder/static/app.js               |   98 +-
 10 files changed, 1685 insertions(+), 231 deletions(-)
 delete mode 100644 config/destinE/schema
 create mode 100644 config/local/language.yaml
 create mode 100644 tree_compresser/rust_src/compressed_tree.rs

diff --git a/config/destinE/schema b/config/destinE/schema
deleted file mode 100644
index faa8402..0000000
--- a/config/destinE/schema
+++ /dev/null
@@ -1,130 +0,0 @@
-# * Format of the rules is:
-
-# [a1, a2, a3 ...[b1, b2, b3... [c1, c2, c3...]]]
-
-# - The first level (a) defines which attributes are used to name the top level directory
-# - The second level (b) defines which attributes are used to name the data files
-# - The third level (c) defines which attributes are used as index keys
-
-# * Rules can be grouped
-
-# [a1, a2, a3 ...
-#   [b1, b2, b3... [c1, c2, c3...]]
-#   [B1, B2, B3... [C1, C2, C3...]]
-# ]
-
-# * A list of values can be given for an attribute
-# [ ..., stream=enfo/efov, ... ]
-# This will be used when matching rules.
-
-# * Attributes can be typed
-#   Globally, at the begining of this file:
-
-#   refdate: Date;
-
-#   or in the context of a rule:
-#   [type=cl, ... [date:ClimateMonth, ...]]
-
-# Typing attributes is done when the user's requests or the GRIB values need to be modified before directories, files and indexes are created. For example, ClimateMonth will transform 2010-04-01 to 'may' internally.
-
-# * Attributes can be optional
-# [ step, levelist?, param ]
-# They will be replaced internally by an empty value. It is also posiible to provide a default subtitution value: e.g. [domain?g] will consider the domain to be 'g' if missing.
-
-# * Attributes can be removed:
-# [grid-]
-# This is useful to remove attributes present in the GRIB that should not be ignored
-
-# * Rules are matched:
-
-# - If the attributes are present in the GRIB/Request, or marked optional or ignored
-# - If a list of possible value is provided, one of them must match, for example
-#   [ class, expver, stream=enfo/efov, date, time, domain ]
-#   will match either stream=enfo or stream=efov, all other attributes will be matched if they exist in the GRIB or user's request
-
-# * On archive:
-# - Attributes are extracted from the GRIB (namespace 'mars'), possibly modified by the attribute type
-# - Only the first rule is used, so order is important
-# - All GRIB attributes must be used by the rules, otherwise an error is raised
-
-# * On retrieve:
-# - Attributes are extracted from the user's request, possibly modified by the attribute type (e.g. for handling of U/V)
-# - All the matching rules are considered
-# - Only attributes listed in the rules are used to extract values from the user's request
-
-
-# Default types
-
-param:      Param;
-step:       Step;
-date:       Date;
-hdate:      Date;
-refdate:    Date;
-latitude:   Double;
-longitude:  Double;
-levelist:   Double;
-grid:       Grid;
-expver:     Expver;
-
-time:       Time;
-fcmonth:    Integer;
-
-number:     Integer;
-frequency:  Integer;
-direction:  Integer;
-channel:    Integer;
-
-instrument: Integer;
-ident:      Integer;
-
-diagnostic: Integer;
-iteration:  Integer;
-system:     Integer;
-method:     Integer;
-
-
-########################################################
-# These are the rules for the Climate DT
-
-# clte/wave
-[ class=d1, dataset=climate-dt, activity, experiment, generation, model, realization, expver, stream=clte/wave, date
-       [ resolution, type, levtype
-               [ time, levelist?, param, frequency?, direction? ]]
-]
-
-# clmn
-[ class=d1, dataset=climate-dt, activity, experiment, generation, model, realization, expver, stream=clmn, year
-       [ month, resolution, type, levtype
-               [ levelist?, param ]]
-]
-
-########################################################
-# These are the rules for the Extremes DT
-# oper/wave/lwda/lwwv
-[ class=d1, dataset=extremes-dt, expver, stream=oper/wave/lwda/lwwv, date, time
-        [ resolution?, type, levtype
-                [ step, levelist?, param, frequency?, direction? ]]
-]
-########################################################
-# These are the rules for the On-Demand Extremes DT
-# oper/wave
-[ class=d1, dataset=on-demand-extremes-dt, expver, stream=oper/wave, date, time
-        [ type, levtype
-                [ step, levelist?, param, frequency?, direction? ]]
-]
-########################################################
-
-
-########################################################
-#
-# These are the rules for rd
-# oper/wave/lwda/lwwv
-[ class=rd, expver, stream=oper/wave/lwda/lwwv/dcda/enfo, date, time, domain?
-        [ type, levtype
-                [ number?, step, levelist?, param, frequency?, direction? ]]
-]
-
-[ class=rd, expver, stream=mnth, domain?
-       [ type, levtype
-               [ date , time, step?, levelist?, param ]]
-]
\ No newline at end of file
diff --git a/config/local/language.yaml b/config/local/language.yaml
new file mode 100644
index 0000000..077b256
--- /dev/null
+++ b/config/local/language.yaml
@@ -0,0 +1,1253 @@
+---
+_field: &_field
+
+
+
+# type:
+#     values:
+#         - [fc, ]
+
+
+
+
+  levtype:
+      values:
+          - [pl, ]
+
+  class:
+    description: Class selects the main category of data to be retrieved such as operational, research or AIFS
+    category: data
+    default: od
+    flatten: false
+    type: enum
+    values:
+    # - [ai, operational aifs]
+    - [d1, Destine data]
+    # - [od, operations]
+    # - [rd, research]
+
+
+  type:
+    category: data
+    default: an
+    flatten: false
+    type: enum
+    multiple: true
+    values:
+    # - [3g, 3d variational gradients]
+    # - [3v, 3d variational analysis]
+    # - [4g, 4d variational gradients]
+    # - [4i, 4d variational increments]
+    # - [4v, 4d variational analysis]
+    # - [ab, analysis bias]
+    # - [af, analysis feedback]
+    # - [ai, analysis input]
+    # - [an, analysis]
+    # - [as, adjoint singular vector]
+    # - [bf, bias-corrected forecast]
+    # - [cd, climate distribution]
+    # - [cf, control forecast]
+    # - [ci, clustering information]
+    # - [cl, climatology]
+    # - [cm, cluster means]
+    # - [cr, cluster representative]
+    # - [cs, cluster std deviations]
+    # - [cv, calibration validation forecast]
+    # - [ea, errors in analysis]
+    # - [ed, empirical distribution]
+    # - [ef, errors in first guess]
+    # - [efi, extreme forecast index]
+    # - [efic, extreme forecast index control]
+    # - [em, ensemble mean]
+    # - [eme, ensemble data assimilation model errors]
+    # - [emtm, ensemble mean of temporal mean]
+    # - [ep, event probability]
+    # - [es, ensemble standard deviation]
+    # - [est, ensemble statistics]
+    # - [estdtm, ensemble standard deviation of temporal mean]
+    # - [fa, forecast accumulation]
+    # - [fb, feedback]
+    - [fc, forecast]
+    # - [fcdfb, forecast departures feedback]
+    # - [fcmax, forecast maximum]
+    # - [fcmean, forecast mean]
+    # - [fcmin, forecast minimum]
+    # - [fcstdev, forecast standard deviation]
+    # - [ff, flux forcing realtime]
+    # - [fg, first guess]
+    # - [fp, forecast probability]
+    # - [fsoifb, forecast sensitivity to observations impact feedback]
+    # - [fu, fill-up]
+    # - [fx, flux forcing]
+    # - [ga, gfas analysis]
+    # - [gbf, bias-corrected gridbox]
+    # - [gai, gridded analysis input]
+    # - [go, gridded observations]
+    # - [gsd, gridded satellite data]
+    # - [gwt, weather type gridbox]
+    # - [hcmean, hindcast mean]
+    # - [ia, init. analysis]
+    # - [icp, initial condition perturbation]
+    # - [mpp, model physics perturbation]
+    # - [if, interim forecast]
+    # - [im, images]
+    # - [me, model errors]
+    # - [mfb, mondb feedback]
+    # - [oai, odb analysis input]
+    # - [ob, observations]
+    # - [of, ocean forward]
+    # - [ofb, odb feedback]
+    # - [oi, oi analysis]
+    # - [oldim, old format images]
+    # - [or, ocean reanalysis]
+    # - [pa, perturbed analysis]
+    # - [pb, probability boundary]
+    # - [pd, probability distribution]
+    - [pf, perturbed forecast]
+    # - [pfc, point values]
+    # - [ppm, point value metrics]
+    # - [s3, climate 30 days simulation]
+    # - [ses, scaled ensemble standard deviation]
+    # - [sf, sensitivity forecast]
+    # - [sfb, summary feedback]
+    # - [sfo, simulations with forcing]
+    # - [sg, sensitivity gradient]
+    # - [si, climate simulation]
+    # - [sim, simulated images]
+    # - [sot, shift of tails]
+    # - [ssd, simulated satellite data]
+    # - [sv, singular vector]
+    # - [svar, signal variance]
+    # - [taem, time average ensemble mean]
+    # - [taes, time average ensemble standard deviation]
+    # - [tpa, time processed analysis]
+    # - [tf, trajectory forecast]
+    # - [tu, tube]
+    # - [wem, weighted ensemble mean]
+    # - [wes, weighted ensemble standard deviation]
+    # - [wp, weather parameters]
+
+  stream:
+    description: Stream selects the kind of data to be retrieved, for example the forecast model or the ensemble model.
+    category: data
+    default: oper
+    flatten: false
+    type: enum
+    values:
+    - [clte, climate, Climate run output] # climate-dt
+    - [oper, da, daily archive, atmospheric model] # climate-dt / extremes-dt / on-demand-extremes-dt
+    - [wave, wv, wave model] # climate-dt / extremes-dt
+    - [lwda, long window daily archive] # extremes-dt
+    - [lwwv, long window wave] # extremes-dt
+    - [clmn, climate-monthly, Climate run monthly means output] # climate-dt
+  
+    # - [amap, analysis for multianalysis project]
+    # - [ammc, melbourne]
+    # - [cher, ch, chernobyl]
+
+    # - [cnrm, meteo france climate centre]
+    # - [cwao, montreal]
+    # - [dacl, daily climatology]
+    # - [dacw, daily climatology wave]
+    # - [dahc, daily archive hindcast]
+    # - [dcda, atmospheric model (delayed cutoff)]
+    # - [dcwv, wave model (delayed cutoff)]
+    # - [edmm, ensemble data assimilation monthly means]
+    # - [edmo, ensemble data assimilation monthly means of daily means]
+    # - [edzw, offenbach]
+    # - [eefh, extended ensemble forecast hindcast]
+    # - [eefo, extended ensemble prediction system]
+    # - [eehs, extended ensemble forecast hindcast statistics]
+    # - [efas, european flood awareness system (efas)]
+    # - [efcl, european flood awareness system (efas) climatology]
+    # - [efhc, ensemble forecast hindcasts (obsolete)]
+    # - [efho, ensemble forecast hindcast overlap]
+    # - [efhs, ensemble forecast hindcast statistics]
+    # - [efov, ensemble forecast overlap]
+    # - [efrf, european flood awareness system (efas) reforecasts]
+    # - [efse, european flood awareness system (efas) seasonal forecasts]
+    # - [efsr, european flood awareness system (efas) seasonal reforecasts]
+    # - [egrr, exeter, bracknell]
+    # - [ehmm, combined multi-model hindcast monthly means]
+    # - [elda, ensemble long window data assimilation]
+    # - [enda, ensemble data assimilation]
+    # - [enfh, ensemble forecast hindcasts]
+    # - [enfo, ef, ensemble prediction system]
+    # - [enwh, ensemble forecast wave hindcasts]
+    # - [esmm, combined multi-model monthly means]
+    # - [espd, ensemble supplementary data]
+    # - [ewda, ensemble wave data assimilation]
+    # - [ewhc, wave ensemble forecast hindcast (obsolete)]
+    # - [ewho, ensemble forecast wave hindcast overlap]
+    # - [ewla, ensemble wave long window data assimilation]
+    # - [ewmm, ensemble wave data assimilation monthly means]
+    # - [ewmo, ensemble wave data assimilation monthly means of daily means]
+    # - [fgge, fg]
+    # - [fsob, forecast sensitivity to observations]
+    # - [fsow, forecast sensitivity to observations wave]
+    # - [gfas, global fire assimilation system]
+    # - [gfra, global fire assimilation system reanalysis]
+    # - [kwbc, washington]
+    # - [lfpw, paris, toulouse]
+    # - [ma, means archive]
+    # - [maed, multianalysis ensemble data]
+    # - [mawm, wave anomaly means]
+    # - [mawv, multianalysis wave data]
+    # - [mdfa, monthly means of daily forecast accumulations]
+    # - [mfam, anomaly means]
+    # - [mfaw, wave anomalies]
+    # - [mfhm, hindcast means]
+    # - [mfhw, monthly forecast hindcasts wave]
+    # - [mfwm, wave real-time means]
+    # - [mhwm, wave hindcast means]
+    # - [mmaf, multi-model multi-annual forecast]
+    # - [mmam, multi-model multi-annual forecast means]
+    # - [mmaw, multi-model multi-annual forecast wave]
+    # - [mmsa, multi-model seasonal forecast monthly anomalies]
+    # - [mmsf, multi-model seasonal forecast]
+    # - [mmwm, multi-model multi-annual forecast wave means]
+    # - [mnfa, anomalies]
+    # - [mnfc, real-time]
+    # - [mnfh, hindcasts]
+    # - [mnfm, real-time means]
+    # - [mnfw, wave real-time]
+    # - [mnth, mo, monthly, monthly means]
+    # - [mnvr, monthly variance and covariance data using g. boer's step function]
+    # - [moda, monthly means of daily means]
+    # - [mofc, monthly forecast]
+    # - [mofm, monthly forecast means]
+    # - [monr, monthly means using g. boer's step function]
+    # - [mpic, max plank institute]
+    # - [msda, monthly standard deviation and covariance of daily means]
+    # - [msdc, mv, monthly standard deviation and covariance]
+    # - [msmm, multi-model seasonal forecast atmospheric monthly means]
+    # - [mswm, multi-model seasonal forecast wave monthly means]
+    # - [ocda, ocean data assimilation]
+    # - [ocea, ocean]
+    # - [olda, ocean Long window data assimilation]
+    # - [rjtd, tokyo]
+    # - [scda, atmospheric model (short cutoff)]
+    # - [scwv, wave model (short cutoff)]
+    # - [seap, sensitive area prediction]
+    # - [seas, seasonal forecast]
+    # - [sens, sf, sensitivity forecast]
+    # - [sfmm, seasonal forecast atmospheric monthly means]
+    # - [smma, seasonal monthly means anomalies]
+    # - [supd, sd, deterministic supplementary data]
+    # - [swmm, seasonal forecast wave monthly means]
+    # - [toga, tg]
+    # - [ukmo, ukmo climate centre]
+    # - [waef, we, wave ensemble forecast]
+    # - [wamd, wave monthly means of daily means]
+    # - [wamf, wave monthly forecast]
+    # - [wamo, wave monthly means]
+    # - [wams, multi-model seasonal forecast wave]
+    # - [wasf, wave seasonal forecast]
+    # - [wavm, wave model (standalone)]
+    # - [weef, wave extended ensemble forecast]
+    # - [weeh, wave extended ensemble forecast hindcast]
+    # - [wees, wave extended ensemble forecast hindcast statistics]
+    # - [wehs, wave ensemble forecast hindcast statistics]
+    # - [weov, wave ensemble forecast overlap]
+    # - [wfas, global flood awareness system (glofas)]
+    # - [wfcl, global flood awareness system (glofas) climatology]
+    # - [wfrf, global flood awareness system (glofas) reforecasts]
+    # - [wfse, global flood awareness system (glofas) seasonal forecasts]
+    # - [wfsr, global flood awareness system (glofas) seasonal reforecasts]
+    # - [wmfm, wave monthly forecast means]
+    # - [wvhc, wave hindcast]
+  expver:
+    description: Experiment number, 0001 is operational data.
+    category: data
+    default: '0001'
+    flatten: false
+    type: enum
+    values:
+      - ['0001', 'Operational Data']
+      - ['xxxx', 'Experimental Data']
+      - ['xxxy', 'Experimental Data']
+
+  dataset:
+    description: The dataset, for DestinE this is one of climate-dt, extremes-dt, on-demand-extremes-dt
+    multiple: true
+    type: enum
+    values:
+    - [climate-dt, Climate Data]
+    - [extremes-dt, Extremes Data]
+    - [on-demand-extremes-dt, On-Demand Extremes Data]
+
+  model:
+    category: data
+    type: lowercase
+
+  repres:
+    flatten: false
+    multiple: true
+    type: enum
+    values:
+    - gg
+    - sh
+    - ll
+    - np
+    - rl
+
+  obsgroup:
+    category: data
+    multiple: true
+    type: enum
+    values:
+    # - [conventional]
+    - [sat, satellite]
+    - [ers1]
+    - [trmm]
+    - [qscat]
+    - [reo3] # reo3 needs to stay for compatibility
+    # previously in "obsgroups.def"
+    - [hirs, 1,  HIRS ]
+    - [amsua, 2,  AMSUA ]
+    - [amsub, 3,  AMSUB ]
+    - [mhs, 4,  MHS ]
+    - [geos, 5,  GEOS ]
+    - [resat, 6,  RESAT ]
+    - [meris, 7,  MERIS ]
+    - [gpsro, 8,  GPSRO ]
+    - [satob, 9,  SATOB ]
+    - [scatt, 10,  SCATT ]
+    - [ssmi_as, 11,  SSMI ALL-SKY ]
+    - [iasi, 12,  IASI ]
+    - [airs, 13,  AIRS ]
+    - [ssmis_as, 14,  SSMIS ALL-SKY ]
+    - [tmi_as, 15,  TMI ALL-SKY ]
+    - [amsre_as, 16,  AMSRE ALL-SKY ]
+    - [conv, 17,  CONV ]
+    - [smos, 19,  SMOS ]
+    - [windsat_as, 20,  WINDSAT ALL-SKY ]
+    - [ssmi, 21,  SSMI ]
+    - [amsua_as, 22,  AMSUA ALL-SKY ]
+    - [amsre, 23,  AMSRE ]
+    - [tmi, 24,  TMI ]
+    - [ssmis, 25,  SSMIS ]
+    - [gbrad, 26,  GBRAD ]
+    - [mwhs, 27,  MWHS ]
+    - [mwts, 28,  MWTS ]
+    - [mwri_as, 29,  MWRI ALL-SKY ]
+    - [iras, 30,  IRAS ]
+    - [msu, 31,  MSU ]
+    - [ssu, 32,  SSU ]
+    - [vtpr1, 33,  VTPR1 ]
+    - [vtpr2, 34,  VTPR2 ]
+    - [atms, 35,  ATMS ]
+    - [resat_ak, 36,  RESAT AVERAGING KERNELS ]
+    - [cris, 37,  CRIS ]
+    - [wave_ip, 38,  WAVE INTEGRATED PARAMETERS ]
+    - [wave_sp, 39,  WAVE SPECTRA ]
+    - [raingg, 40,  RAINGG ]
+    - [sfc_ms, 41,  SURFACE MULTISENSOR ]
+    - [amsr2_as, 42,  AMSR-2 ALL-SKY ]
+    - [saphir_as, 43,  SAPHIR ALL-SKY ]
+    - [amsub_as, 44,  AMSUB ALL-SKY ]
+    - [mhs_as, 45,  MHS ALL-SKY ]
+    - [dwl, 46,  DOPPLER WIND LIDAR ]
+    - [iris, 47,  IRIS ]
+    - [aatsr, 49,  AATSR ]
+    - [atms_as, 50,  ATMS ALL-SKY ]
+    - [gmi_as, 51,  GMI ALL-SKY ]
+    - [godae_sst, 52,  GODAE SEA SURFACE TEMPERATURES ]
+    - [atovs_ms, 53,  ATOVS MULTISENSOR ]
+    - [atmospheric_composition, 54,  ATMOSPHERIC COMPOSITION ]
+    - [non_sfc_ms, 55,  NON-SURFACE MULTISENSOR ]
+    - [mwts2, 56,  MWTS2 ]
+    - [ssmi_1d, 57,  SSMI 1DVAR TCWV CLOUDY-SKY ]
+    - [mwhs2_as, 58,  MWHS2 ALL-SKY ]
+    - [ssmt2, 59,  SSMT2 ]
+    - [smap, 60,  SMAP ]
+    - [tovs_ms, 61,  TOVS MULTISENSOR ]
+    - [cloud_r, 62,  CLOUD REFLECTIVITY ]
+    - [cloud_l, 63,  CLOUD LIDAR ]
+    - [satellite_lightning, 64,  SATELLITE LIGHTNING ]
+    - [geos_vis, 65,  GEOS VIS ]
+    - [oconv, 66,  OCONV ]
+    - [mwts3_as, 67,  MWTS3 All-sky ]
+    - [giirs, 68,  GIIRS ]
+    - [test, 99,  TEST ]
+
+  reportype:
+    category: data
+    type: any
+    multiple: true
+
+  # rdbprefix
+
+  levtype:
+    description: The Level Type, can be pressure levels, the surface, model levels etc.
+    category: data
+    default: pl
+    flatten: false
+    type: enum
+    values:
+    # - [cat, category]
+    # - [dp, depth]
+    # - [layer]
+    # - [ml, model levels]
+    - [pl, pressure levels]
+    # - [hl, height levels]
+    # - [pt, potential temperature]
+    # - [pv, potential vorticity]
+    - [sfc, surface]
+    # - [sol, surface other (multi)levels]
+    # - [wv, ocean wave]
+    # - [o2d, ocean surface]
+    # - [o3d, ocean model levels]
+    never:
+    - type: ssd
+
+  levelist:
+    category: data
+    multiple: true
+    by: 1
+    default:
+    - 1000
+    - 850
+    - 700
+    - 500
+    - 400
+    - 300
+    never:
+    - levtype: [sfc, o2d]
+    - type: ssd
+    type: enum
+    values:
+        - [1, ]
+        - [5, ]
+        - [10, ]
+        - [20, ]
+        - [30, ]
+        - [50, ]
+        - [70, ]
+        - [100, ]
+        - [150, ]
+        - [200, ]
+        - [250, ]
+        - [300, ]
+        - [400, ]
+        - [500, ]
+        - [600, ]
+        - [700, ]
+        - [850, ]
+        - [925, ]
+        - [1000, ]
+
+  param:
+    category: data
+    default: 129
+    multiple: true
+    type: param
+    never:
+    - type: [tf, ob]
+    values:
+        - [60, "Potential vorticity"]
+        - [129, "Geopotential"]
+        - [130, "Temperature"]
+        - [131, "U component of wind"]
+        - [132, "V component of wind"]
+        - [133, "Specific humidity"]
+        - [135, "Vertical velocity"]
+        - [157, "Relative humidity"]
+        - [246, "Specific cloud liquid water content"]
+
+
+#################################################################
+
+  # year
+  # decade
+  # month
+
+  date:
+    category: data
+    default: 0
+    type: enum
+    multiple: true
+    values:
+    - [20211021, ]  
+
+  year:
+    category: data
+    type: to-by-list
+    multiple: true
+    by: 1
+
+  month:
+    category: data
+    flatten: true
+    type: enum
+    multiple: true
+    values:
+    - [1, jan, January]
+    - [2, feb, February]
+    - [3, mar, March]
+    - [4, apr, April]
+    - [5, may, May]
+    - [6, jun, June]
+    - [7, jul, July]
+    - [8, aug, August]
+    - [9, sep, September]
+    - [10, oct, October]
+    - [11, nov, November]
+    - [12, dec, December]
+
+  # verify
+  # refdate
+
+  hdate:
+    category: data
+    multiple: true
+    only:
+    - stream:
+      - enfh
+      - enwh
+      - efho
+      - ehmm
+      - ewho
+      - eefh
+      - weeh
+    type: integer
+
+  offsetdate:
+    category: data
+    multiple: true
+    type: date
+
+  fcmonth:
+    category: data
+    multiple: true
+    by: 1
+    type: to-by-list
+
+  fcperiod:
+    category: data
+    multiple: true
+    type: integer
+
+  time:
+    category: data
+    default: '1200'
+    multiple: true
+    type: enum
+    values:
+      - ["0000", ]
+      - ["0100", ]
+      - ["0200", ]
+      - ["0300", ]
+      - ["0400", ]
+      - ["0500", ]
+      - ["0600", ]
+      - ["0700", ]
+      - ["0800", ]
+      - ["0900", ]
+      - ["1000", ]
+      - ["1100", ]
+      - ["1200", ]
+      - ["1300", ]
+      - ["1400", ]
+      - ["1500", ]
+      - ["1600", ]
+      - ["1700", ]
+      - ["1800", ]
+      - ["1900", ]
+      - ["2000", ]
+      - ["2100", ]
+      - ["2200", ]
+      - ["2300", ]
+
+  offsettime:
+    category: data
+    multiple: true
+    type: time
+
+  # leadtime
+  # opttime
+  # range
+
+  step:
+    description: Specify which forecast we want in hours past the date/time.
+    category: data
+    multiple: true
+    by: 12
+    default: 0
+    type: range
+    never:
+    - dataset:
+      - climate-dt
+    - stream:
+      - msmm
+      - mmsa
+      - swmm
+
+  anoffset:
+    category: data
+    multiple: true
+    type: integer
+
+  reference:
+    category: data
+    multiple: true
+    type: integer
+
+#################################################################
+
+  # cluster
+  # probability
+
+  number:
+    description: Selects a subset of ensemble members
+    category: data
+    multiple: true
+    aliases:
+    - ensemble
+    by: 1
+    only:
+    - type: [pf, cr, cm, fcmean, fcmin, fcmax, fcstdev, sot, fc, wp, 4i, 4v]
+    never:
+    # This is to prevent number with type=fc and stream=oper
+    - stream: [oper, wave]
+    type: to-by-list
+
+  quantile:
+    category: data
+    multiple: true
+    only:
+    - type:
+      - pd
+      - pb
+      - taem
+      - cd
+      # - sot
+    type: to-by-list-quantile
+    denominators: [2,3,4,5,10,100,1000]
+    by: 1
+  domain:
+    description: The large scale geographic region.
+    category: data
+    default: g
+    flatten: false
+    type: enum
+    never:
+    - dataset:
+      - climate-dt
+    values:
+    # - [a, north west europe]
+    # - [b, north east europe, baltic and black sea]
+    - [c, south west europe]
+    - [d, south east europe]
+    - [e, europe]
+    # - [f, fastex]
+    - [g, globe, general european area]
+    # - [h]
+    # - [i]
+    # - [j]
+    # - [k]
+    # - [l]
+    # - [m, mediterranean]
+    # - ['n', northern hemisphere]
+    # - [o]
+    # - [p]
+    # - [q]
+    # - [r]
+    # - [s, southern hemisphere]
+    # - [t, tropics]
+    # - [u, tropics 2]
+    # - [v]
+    # - [w, western atlantic]
+    # - [x]
+    # - ['y']
+    # - [z]
+
+  frequency:
+    category: data
+    multiple: true
+    by: 1
+    only:
+    - param:
+      - '140251'
+    type: to-by-list
+
+  direction:
+    category: data
+    multiple: true
+    by: 1
+    only:
+    - param:
+      - '140251'
+    type: to-by-list
+
+  diagnostic:
+    category: data
+    type: integer
+    multiple: true
+
+  iteration:
+    category: data
+    type: integer
+    multiple: true
+
+  channel:
+    category: data
+    only:
+    - type: ssd
+    type: integer
+    multiple: true
+
+  ident:
+    category: data
+    only:
+    - type: ssd
+    type: integer
+    multiple: true
+
+  instrument:
+    category: data
+    only:
+    - type: ssd
+    type: integer
+    multiple: true
+
+  method:
+    category: data
+    type: integer
+
+  origin:
+    category: data
+    multiple: true
+    type: enum
+    values:
+    - [ammc, 1, melbourne]
+    - [babj, 38, beijing]
+    - [cmcc]
+    - [cnmc, 80]
+    - [consensus, 255]
+    - [crfc, 239, cerfacs]
+    - [cwao, 54, montreal]
+    - [ecmf, 98, ecmwf]
+    - [edzw, dwd, 78, offenbach]
+    - [egrr, 74, exeter, bracknell]
+    - [enmi, 88, oslo]
+    - [fnmo, fnmoc, 58, fleet numerical]
+    - [hadc, 247, hadley centre]
+    - [ifmk, 246]
+    - [ingv, 235]
+    - [knmi, 245]
+    - [kwbc, 7, washington]
+    - [lemm, 214, madrid]
+    - [lfpw, 84, 85, paris, toulouse]
+    - [rjtd, 34, tokyo]
+    - [rksl, 40, seoul]
+    - [sbsj, 46, cptec]
+    - [vuwien, 244, university of vienna]
+
+  system:
+    category: data
+    type: integer
+
+#######################################################################
+# DestinE ClimateDT related keywords
+
+
+  model:
+    type: enum
+    description: Which climate model to use.
+    values:
+      - [ifs-fesom, Integrated Forecast System - FESOM]
+
+  activity:
+    category: data
+    type: enum
+    values:
+      - [story-nudging, ]
+
+  experiment:
+    category: data
+    type: enum
+    values:
+      - [tplus2.0k, ]
+
+  generation:
+    category: data
+    type: enum
+    values:
+      - [1, ]
+
+  realization:
+    category: data
+    type: integer
+    values:
+      - [1, ]
+
+  resolution:
+    category: data
+    type: enum
+    values:
+      - [standard, ]
+
+#######################################################################
+
+_observation: &_observation
+
+  obstype:
+    category: data
+    type: any
+    multiple: true
+
+  obsgroup:
+    category: data
+    type: any
+    multiple: true
+
+#######################################################################
+
+_postproc: &_postproc
+
+  accuracy:
+    category: postproc
+    flatten: false
+    type: [enum, integer]
+    values:
+    - [av]
+    - ['off', normal, auto]
+
+  bitmap:
+    category: postproc
+    flatten: false
+    type: any
+
+  format:
+    category: postproc
+    flatten: false
+    type: enum
+    values:
+    - - grib
+      - grib1
+      - gb
+    - - grib2
+    - - bufr
+      - bf
+    - - grid
+      - gd
+    - odb
+    - ascii
+
+  frame:
+    category: postproc
+    type: integer
+
+  gaussian:
+    category: postproc
+    type: enum
+    values:
+    - reduced
+    - regular
+
+  area:
+    category: postproc
+    flatten: false
+    multiple: true
+    type: [float, enum]
+    values:
+    - ['off', g, global]
+    - [e, europe]
+
+  grid:
+    category: postproc
+    flatten: false
+    multiple: true
+    type: [enum, float, regex]
+    values:
+    - auto
+    - N16
+    - N24
+    - N32
+    - N48
+    - N64
+    - N80
+    - N96
+    - N128
+    - N160
+    - N200
+    - N256
+    - N320
+    - N400
+    - N512
+    - N576
+    - N640
+    - N800
+    - N1024
+    - N1280
+    - N1600
+    - N2000
+    - N4000
+    - N8000
+    regex:
+    - '^[oOfF][1-9][0-9]+$'
+    uppercase: true
+
+  interpolation:
+    category: postproc
+    flatten: false
+    type: enum
+    values:
+    - - linear
+    - - nearest-lsm
+      - nearest lsm
+    - - 'off'
+      - default
+      - any
+
+  packing:
+    category: postproc
+    flatten: false
+    type: enum
+    values:
+    - - so
+      - second order
+    - ['off', av]
+    - [co, complex]
+    - simple
+    - ccsds
+
+  resol:
+    category: postproc
+    flatten: false
+    aliases:
+        - tra
+    type: [enum, integer]
+    values:
+    - - 'off'
+      - av
+      - reduced gaussian 160
+
+  rotation:
+    category: postproc
+    flatten: false
+    multiple: true
+    type: float
+
+  intgrid:
+    category: postproc
+    flatten: false
+    type: [enum, regex]
+    values:
+    - 'off'
+    - auto
+    - N32
+    - N48
+    - N64
+    - N80
+    - N96
+    - N128
+    - N160
+    - N192
+    - N200
+    - N256
+    - N320
+    - N400
+    - N512
+    - N640
+    - N800
+    - N912
+    - N1024
+    - N1280
+    regex:
+    - '^[oOfF][1-9][0-9]+$'
+
+  truncation:
+    category: postproc
+    flatten: false
+    type: [enum, integer]
+    values:
+    - auto
+    - 'off'
+#######################################################################
+
+_obspproc: &_obspproc
+
+  filter:
+    type: any
+    category: postproc
+
+  ident:
+    type: any
+    category: postproc
+
+
+#######################################################################
+
+disseminate:
+  <<: *_field
+  <<: *_postproc
+
+  requirements:
+    type: any
+
+  use:
+    category: data
+    flatten: false
+    multiple: true
+    type: enum
+    values:
+    - bc
+    - monday
+    - tuesday
+    - wednesday
+    - thursday
+    - friday
+    - saturday
+    - sunday
+
+  option:
+    default: normal
+    flatten: false
+    multiple: true
+    type: enum
+    values:
+    - normal
+    - delay
+    - asap
+    - gts
+    - opendata
+
+  compatibility:
+    category: postproc
+    flatten: false
+    multiple: true
+    type: enum
+    values:
+    - 'off'
+    - 'no-local-extension'
+
+  priority:
+    flatten: false
+    type: integer
+
+  target:
+    flatten: false
+    type: any
+
+##############################################################
+
+archive:
+  <<: *_field
+  <<: *_observation
+
+
+  database:
+    flatten: false
+    multiple: true
+    type: any
+
+  source:
+    flatten: false
+    multiple: true
+    type: any
+
+  expect:
+    flatten: false
+    multiple: false
+    type: integer
+
+##############################################################
+
+retrieve:
+
+  <<: *_field
+  <<: *_observation
+  <<: *_postproc
+  <<: *_obspproc
+
+  target:
+    flatten: false
+    multiple: true
+    type: any
+
+  expect:
+    flatten: false
+    multiple: false
+    type: integer
+
+  fieldset:
+    flatten: false
+    multiple: false
+    type: any
+
+  database:
+    flatten: false
+    multiple: true
+    type: any
+
+  optimise:
+    type: enum
+    values:
+    - true
+    - false
+    default:
+      "off"
+
+  padding:
+    flatten: false
+    type: enum
+    values:
+    - none
+    - auto
+
+##############################################################
+
+read:
+  source:
+    flatten: false
+    multiple: true
+    type: any
+
+  <<: *_field
+  <<: *_observation
+  <<: *_postproc
+  <<: *_obspproc
+
+  target:
+    flatten: false
+    multiple: true
+    type: any
+
+  fieldset:
+    flatten: false
+    multiple: false
+    type: any
+
+  _defaults:
+    class: null
+    date: null
+    domain: null
+    expver: null
+    levelist: null
+    levtype: null
+    param: null
+    step: null
+    stream: null
+    time: null
+    type: null
+
+  _options:
+    param:
+      # expand_with:  # In case not type/stream/levtype is provided
+      #   type: an
+      #   stream: oper
+      #   levtype: pl
+      first_rule: true
+##############################################################
+
+get:
+
+  tape:
+    flatten: false
+    multiple: false
+    type: any
+
+  database:
+    flatten: false
+    multiple: true
+    type: any
+
+  target:
+    flatten: false
+    multiple: true
+    type: any
+
+##############################################################
+
+list:
+
+  <<: *_field
+  <<: *_observation
+
+  database:
+    flatten: false
+    multiple: true
+    type: any
+
+  target:
+    flatten: false
+    multiple: true
+    type: any
+
+  _defaults:
+    # class: null
+    date: null
+    domain: null
+    expver: null
+    levelist: null
+    levtype: null
+    param: null
+    step: null
+    stream: null
+    time: null
+    type: null
+
+##############################################################
+
+compute:
+  formula:
+    flatten: false
+    multiple: false
+    type: any
+
+  fieldset:
+    flatten: false
+    multiple: false
+    type: any
+
+##############################################################
+
+write:
+
+  fieldset:
+    flatten: false
+    multiple: false
+    type: any
+
+  target:
+    flatten: false
+    multiple: true
+    type: any
+
+##############################################################
+
+pointdb:
+  lat:
+    multiple: false
+    type: float
+
+  lon:
+    multiple: false
+    type: float
+
+  <<: *_field
+
+  _defaults:
+    class: null
+    date: null
+    domain: null
+    expver: null
+    levelist: null
+    levtype: null
+    param: null
+    step: null
+    stream: null
+    time: null
+    type: null
+
+  _options:
+    param:
+      # expand_with:  # In case not type/stream/levtype is provided
+      #   type: an
+      #   stream: oper
+      #   levtype: pl
+      first_rule: true
+
+end: {}
diff --git a/stac_server/main.py b/stac_server/main.py
index 6c10595..c9f8e65 100644
--- a/stac_server/main.py
+++ b/stac_server/main.py
@@ -1,24 +1,14 @@
 import json
-import yaml
-from pathlib import Path
 import os
-from datetime import datetime
 from collections import defaultdict
 from typing import Any, Dict
-import yaml
-import os
-
-from fastapi import FastAPI, Request
-from fastapi.middleware.cors import CORSMiddleware
-from fastapi.staticfiles import StaticFiles
-from fastapi.responses import RedirectResponse, FileResponse
-from fastapi.templating import Jinja2Templates
-
-
-from TreeTraverser.fdb_schema import FDBSchemaFile 
-from TreeTraverser.CompressedTree import CompressedTree
 
 import redis
+import yaml
+from fastapi import FastAPI, Request
+from fastapi.middleware.cors import CORSMiddleware
+from fastapi.responses import FileResponse
+from tree_traverser import CompressedTree
 
 app = FastAPI()
 app.add_middleware(
@@ -33,20 +23,26 @@ app.add_middleware(
 async def favicon():
     return FileResponse("favicon.ico")
 
+with open(os.environ.get("CONFIG_DIR", ".") + "/config.yaml", "r") as f:
+    config = yaml.safe_load(f)
 
-print("Getting cache from redis")
-r = redis.Redis(host=os.environ.get("REDIS_HOST", "localhost"), port=6379, db=0)
-json_data = r.get('compressed_catalog')
-if not json_data:
-    print("Didn't find compressed tree in redis using empty tree")
-    c_tree = CompressedTree({})
+if "local_cache" in config:
+    print("Getting cache from local file")
+    with open(config["local_cache"], "r") as f:
+        json_data = f.read()
+    print("Found compressed catalog in local file")
+else:
+    print("Getting cache from redis")
+    r = redis.Redis(host=os.environ.get("REDIS_HOST", "localhost"), port=6379, db=0)
+    json_data = r.get('compressed_catalog')
+
+print("Loading tree to json")
+if not json_data:
+    c_tree = CompressedTree.from_json({})
 else:
-    print("Loading tree to json")
     compressed_tree_json = json.loads(json_data)
     c_tree = CompressedTree.from_json(compressed_tree_json)
 
-
-
 print("Partialy decompressing tree, shoud be able to skip this step in future.")
 tree = c_tree.reconstruct_compressed_ecmwf_style()
 
@@ -61,9 +57,6 @@ config = {
 with open(config["mars_language"], "r") as f:
     mars_language = yaml.safe_load(f)["_field"]
 
-###### Load FDB Schema
-schema = FDBSchemaFile(config["fdb_schema"])
-
 def request_to_dict(request: Request) -> Dict[str, Any]:
     # Convert query parameters to dictionary format
     request_dict = dict(request.query_params)
@@ -167,20 +160,6 @@ async def get_STAC(request: Request):
     request_dict = request_to_dict(request)
     paths = await api_paths(request)
 
-    # # Run the schema matching logic
-    # matches = schema.match_all(dict(v.split("=") for v in path))
-
-    # # Only take the longest matches
-    # max_len = max(len(m) for m in matches)
-    # matches = [m for m in matches if len(m) == max_len]
-
-    # # Take the ends of all partial matches, ignore those that are full matches
-    # # Full matches are indicated by the last key having boolean value True
-    # key_frontier = defaultdict(list)
-    # for match in matches:
-    #     if not match[-1]:
-    #         key_frontier[match[-1].key].append([m for m in match[:-1]])
-
 
     def make_link(key_name, paths, values):
         """Take a MARS Key and information about which paths matched up to this point and use it to make a STAC Link"""
@@ -221,7 +200,7 @@ async def get_STAC(request: Request):
     def value_descriptions(key, values):
         return {
             v[0] : v[-1] for v in mars_language.get(key, {}).get("values", [])
-            if len(v) > 1 and v[0] in values
+            if len(v) > 1 and v[0] in list(values)
         }
 
     descriptions = {
diff --git a/stac_server/run.sh b/stac_server/run.sh
index 74f2741..c3b7060 100755
--- a/stac_server/run.sh
+++ b/stac_server/run.sh
@@ -1,3 +1,3 @@
 parent_path=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P )
 cd "$parent_path"
-REDIS_HOST=localhost CONFIG_DIR=../config/destinE fastapi dev ./main.py --port 8124 --reload
\ No newline at end of file
+CONFIG_DIR=../config/local fastapi dev ./main.py --port 8124 --reload
\ No newline at end of file
diff --git a/tree_compresser/README.md b/tree_compresser/README.md
index e69de29..2a389c5 100644
--- a/tree_compresser/README.md
+++ b/tree_compresser/README.md
@@ -0,0 +1,5 @@
+
+```
+pip install maturin
+maturing develop
+```
\ No newline at end of file
diff --git a/tree_compresser/python_src/tree_traverser/CompressedTree.py b/tree_compresser/python_src/tree_traverser/CompressedTree.py
index 5f8c095..1422445 100644
--- a/tree_compresser/python_src/tree_traverser/CompressedTree.py
+++ b/tree_compresser/python_src/tree_traverser/CompressedTree.py
@@ -1,6 +1,5 @@
 import json
 from collections import defaultdict
-from typing import TypeVar
 from pathlib import Path
 
 Tree = dict[str, "Tree"]
@@ -71,11 +70,13 @@ class CompressedTree():
         k, *rest = path
         return self._add_to_cache(RefcountedDict({k : self._cache_path(rest)}))
     
-    def reconstruct(self) -> dict[str, dict]:
+    def reconstruct(self, max_depth = None) -> dict[str, dict]:
         "Reconstruct the tree as a normal nested dictionary"
-        def reconstruct_node(h : int) -> dict[str, dict]:
-            return {k : reconstruct_node(v) for k, v in self.cache[h].items()}
-        return reconstruct_node(self.root_hash)
+        def reconstruct_node(h : int, depth : int) -> dict[str, dict]:
+            if max_depth is not None and depth > max_depth:
+                return {}
+            return {k : reconstruct_node(v, depth+1) for k, v in self.cache[h].items()}
+        return reconstruct_node(self.root_hash, 0)
     
     def reconstruct_compressed(self) -> dict[str, dict]:
         "Reconstruct the tree as a normal nested dictionary"
@@ -87,18 +88,18 @@ class CompressedTree():
             return {"/".join(keys) : reconstruct_node(h) for h, keys in dedup.items()}
         return reconstruct_node(self.root_hash)
     
-    def reconstruct_compressed_ecmwf_style(self) -> dict[str, dict]:
+    def reconstruct_compressed_ecmwf_style(self, max_depth=None, from_node=None) -> dict[str, dict]:
         "Reconstruct the tree as a normal nested dictionary"
-        def reconstruct_node(h : int) -> dict[str, dict]:
+        def reconstruct_node(h : int, depth : int) -> dict[str, dict]:
+            if max_depth is not None and depth > max_depth: 
+                return {}
             dedup : dict[tuple[int, str], set[str]] = defaultdict(set)
             for k, h2 in self.cache[h].items():
                 key, value = k.split("=")
                 dedup[(h2, key)].add(value)
 
-            
-
-            return {f"{key}={','.join(values)}" : reconstruct_node(h) for (h, key), values in dedup.items()}
-        return reconstruct_node(self.root_hash)
+            return {f"{key}={','.join(values)}" : reconstruct_node(h, depth=depth+1) for (h, key), values in dedup.items()}
+        return reconstruct_node(from_node or self.root_hash, depth=0)
     
     def __init__(self, tree : Tree):
         self.cache = {}
@@ -124,8 +125,8 @@ class CompressedTree():
                 h = loc[key] # get the hash of the subtree
                 loc = self.cache[h] # get the subtree
             else:
-                return False, keys[:i]
-        return True, keys
+                return False, keys[:i], h
+        return True, keys, h
 
     def keys(self, keys : tuple[str, ...] = ()) -> list[str] | None:
         loc = self.tree
diff --git a/tree_compresser/python_src/tree_traverser/__init__.py b/tree_compresser/python_src/tree_traverser/__init__.py
index f8c6565..dcd3b3c 100644
--- a/tree_compresser/python_src/tree_traverser/__init__.py
+++ b/tree_compresser/python_src/tree_traverser/__init__.py
@@ -1,2 +1,2 @@
 from . import rust as backend
-from .CompressedTree import CompressedTree
\ No newline at end of file
+from .CompressedTree import CompressedTree, RefcountedDict
\ No newline at end of file
diff --git a/tree_compresser/rust_src/compressed_tree.rs b/tree_compresser/rust_src/compressed_tree.rs
new file mode 100644
index 0000000..9148d54
--- /dev/null
+++ b/tree_compresser/rust_src/compressed_tree.rs
@@ -0,0 +1,334 @@
+#![allow(dead_code)]
+
+use std::rc::Rc;
+use smallstr::SmallString;
+
+use slotmap::{new_key_type, SlotMap};
+
+new_key_type! {
+    struct NodeId;
+}
+
+type CompactString = SmallString<[u8; 16]>;
+
+#[derive(Clone)]
+enum NodeValueTypes {
+    String(CompactString),
+    Int(i32),
+}
+
+impl From<&str> for NodeValueTypes {
+    fn from(s: &str) -> Self {
+        NodeValueTypes::String(CompactString::from(s))
+    }
+}
+
+impl From<i32> for NodeValueTypes {
+    fn from(i: i32) -> Self {
+        NodeValueTypes::Int(i)
+    }
+}
+
+enum NodeValue {
+    Single(NodeValueTypes),
+    Multiple(Vec<NodeValueTypes>),
+}
+
+struct Node<Payload> {
+    key: Rc<String>,
+    value: NodeValue,
+    parent: Option<NodeId>,
+    prev_sibling: Option<NodeId>,
+    next_sibling: Option<NodeId>,
+    // vector may be faster for traversal, but linkedlist should be faster for insertion
+    children: Option<(NodeId, NodeId)>, // (first_child, last_child)
+    data: Option<Payload>,
+}
+
+struct QueryTree<Payload> {
+    nodes: SlotMap<NodeId, Node<Payload>>,
+}
+
+impl<Payload> QueryTree<Payload> {
+    fn new() -> Self {
+        QueryTree {
+            nodes: SlotMap::with_key(),
+        }
+    }
+
+    // Adds a node with a key and single value
+    fn add_node<S>(&mut self, key: &Rc<String>, value: S, parent: Option<NodeId>) -> NodeId
+    where
+        S: Into<NodeValueTypes>,
+    {
+        let node_id = self.nodes.insert_with_key(|_| Node {
+            key: Rc::clone(key),
+            value: NodeValue::Single(value.into()),
+            parent,
+            prev_sibling: None,
+            next_sibling: None,
+            children: None,
+            data: None,
+        });
+
+        if let Some(parent_id) = parent {
+            // Determine if parent has existing children
+            if let Some((first_child_id, last_child_id)) = self.nodes[parent_id].children {
+                // Update the last child's `next_sibling`
+                {
+                    let last_child = &mut self.nodes[last_child_id];
+                    last_child.next_sibling = Some(node_id);
+                }
+
+                // Update the new node's `prev_sibling`
+                {
+                    let new_node = &mut self.nodes[node_id];
+                    new_node.prev_sibling = Some(last_child_id);
+                }
+
+                // Update parent's last child
+                let parent_node = &mut self.nodes[parent_id];
+                parent_node.children = Some((first_child_id, node_id));
+            } else {
+                // No existing children
+                let parent_node = &mut self.nodes[parent_id];
+                parent_node.children = Some((node_id, node_id));
+            }
+        }
+
+        node_id
+    }
+
+    // Add a single value to a node
+    fn add_value<S>(&mut self, node_id: NodeId, value: S)
+    where
+        S: Into<NodeValueTypes>,
+    {
+        if let Some(node) = self.nodes.get_mut(node_id) {
+            match &mut node.value {
+                NodeValue::Single(v) => {
+                    let values = vec![v.clone(), value.into()];
+                    node.value = NodeValue::Multiple(values);
+                }
+                NodeValue::Multiple(values) => {
+                    values.push(value.into());
+                }
+            }
+        }
+    }
+
+    // Add multiple values to a node
+    fn add_values<S>(&mut self, node_id: NodeId, values: Vec<S>)
+    where
+        S: Into<NodeValueTypes>,
+    {
+        if let Some(node) = self.nodes.get_mut(node_id) {
+            match &mut node.value {
+                NodeValue::Single(v) => {
+                    let mut new_values = vec![v.clone()];
+                    new_values.extend(values.into_iter().map(|v| v.into()));
+                    node.value = NodeValue::Multiple(new_values);
+                }
+                NodeValue::Multiple(existing_values) => {
+                    existing_values.extend(values.into_iter().map(|v| v.into()));
+                }
+            }
+        }
+    }
+
+    fn get_node(&self, node_id: NodeId) -> Option<&Node<Payload>> {
+        self.nodes.get(node_id)
+    }
+
+    // TODO: better if this returns an iterator?
+    fn get_children(&self, node_id: NodeId) -> Vec<NodeId> {
+        let mut children = Vec::new();
+
+        if let Some(node) = self.get_node(node_id) {
+            if let Some((first_child_id, _)) = node.children {
+                let mut current_id = Some(first_child_id);
+                while let Some(cid) = current_id {
+                    children.push(cid);
+                    current_id = self.nodes[cid].next_sibling;
+                }
+            }
+        }
+
+        children
+    }
+
+    fn remove_node(&mut self, node_id: NodeId) {
+        // Remove the node and update parent and siblings
+        if let Some(node) = self.nodes.remove(node_id) {
+            // Update parent's children
+            if let Some(parent_id) = node.parent {
+                let parent_node = self.nodes.get_mut(parent_id).unwrap();
+                if let Some((first_child_id, last_child_id)) = parent_node.children {
+                    if first_child_id == node_id && last_child_id == node_id {
+                        // Node was the only child
+                        parent_node.children = None;
+                    } else if first_child_id == node_id {
+                        // Node was the first child
+                        parent_node.children = Some((node.next_sibling.unwrap(), last_child_id));
+                    } else if last_child_id == node_id {
+                        // Node was the last child
+                        parent_node.children = Some((first_child_id, node.prev_sibling.unwrap()));
+                    }
+                }
+            }
+
+            // Update siblings
+            if let Some(prev_id) = node.prev_sibling {
+                self.nodes[prev_id].next_sibling = node.next_sibling;
+            }
+            if let Some(next_id) = node.next_sibling {
+                self.nodes[next_id].prev_sibling = node.prev_sibling;
+            }
+
+            // Recursively remove children
+            let children_ids = self.get_children(node_id);
+            for child_id in children_ids {
+                self.remove_node(child_id);
+            }
+        }
+    }
+
+    fn is_root(&self, node_id: NodeId) -> bool {
+        self.nodes[node_id].parent.is_none()
+    }
+
+    fn is_leaf(&self, node_id: NodeId) -> bool {
+        self.nodes[node_id].children.is_none()
+    }
+
+    fn add_payload(&mut self, node_id: NodeId, payload: Payload) {
+        if let Some(node) = self.nodes.get_mut(node_id) {
+            node.data = Some(payload);
+        }
+    }
+
+    fn print_tree(&self) {
+        // Find all root nodes (nodes without a parent)
+        let roots: Vec<NodeId> = self
+            .nodes
+            .iter()
+            .filter_map(|(id, node)| {
+                if node.parent.is_none() {
+                    Some(id)
+                } else {
+                    None
+                }
+            })
+            .collect();
+
+        // Iterate through each root node and print its subtree
+        for (i, root_id) in roots.iter().enumerate() {
+            let is_last = i == roots.len() - 1;
+            self.print_node(*root_id, String::new(), is_last);
+        }
+    }
+
+    /// Recursively prints a node and its children.
+    ///
+    /// - `node_id`: The current node's ID.
+    /// - `prefix`: The string prefix for indentation and branch lines.
+    /// - `is_last`: Boolean indicating if the node is the last child of its parent.
+    fn print_node(&self, node_id: NodeId, prefix: String, is_last: bool) {
+        // Retrieve the current node
+        let node = match self.nodes.get(node_id) {
+            Some(n) => n,
+            None => return, // Node not found; skip
+        };
+
+        // Determine the branch character
+        let branch = if prefix.is_empty() {
+            "" // Root node doesn't have a branch
+        } else if is_last {
+            "└── " // Last child
+        } else {
+            "├── " // Middle child
+        };
+
+        // Print the current node's key and values
+        print!("{}{}{}", prefix, branch, node.key);
+        match &node.value {
+            NodeValue::Single(v) => match v {
+                NodeValueTypes::String(s) => println!(": ({})", s),
+                NodeValueTypes::Int(i) => println!(": ({})", i),
+            },
+            NodeValue::Multiple(vs) => {
+                let values: Vec<String> = vs
+                    .iter()
+                    .map(|v| match v {
+                        NodeValueTypes::String(s) => s.to_string(),
+                        NodeValueTypes::Int(i) => i.to_string(),
+                    })
+                    .collect();
+                println!(": ({})", values.join(", "));
+            }
+        }
+
+        // Prepare the prefix for child nodes
+        let new_prefix = if prefix.is_empty() {
+            if is_last {
+                "    ".to_string()
+            } else {
+                "│   ".to_string()
+            }
+        } else {
+            if is_last {
+                format!("{}    ", prefix)
+            } else {
+                format!("{}│   ", prefix)
+            }
+        };
+
+        // Retrieve and iterate through child nodes
+        if let Some((_first_child_id, _last_child_id)) = node.children {
+            let children = self.get_children(node_id);
+            let total = children.len();
+            for (i, child_id) in children.iter().enumerate() {
+                let child_is_last = i == total - 1;
+                self.print_node(*child_id, new_prefix.clone(), child_is_last);
+            }
+        }
+    }
+}
+
+fn main() {
+    let mut tree: QueryTree<i16> = QueryTree::new();
+
+    let value = "hello";
+    let axis = Rc::new("foo".to_string());
+
+    let root_id = tree.add_node(&axis, value, None);
+
+    use std::time::Instant;
+    let now = Instant::now();
+
+    for _ in 0..100 {
+        // let child_value = format!("child_val{}", i);
+        let child_id = tree.add_node(&axis, value, Some(root_id));
+        // tree.add_value(child_id, value);
+
+        for _ in 0..100 {
+            // let gchild_value = format!("gchild_val{}", j);
+            let gchild_id = tree.add_node(&axis, value, Some(child_id));
+            // tree.add_values(gchild_id, vec![1, 2]);
+
+            for _ in 0..1000 {
+                // let ggchild_value = format!("ggchild_val{}", k);
+                let _ggchild_id = tree.add_node(&axis, value, Some(gchild_id));
+                // tree.add_value(_ggchild_id, value);
+                // tree.add_values(_ggchild_id, vec![1, 2, 3, 4]);
+            }
+        }
+    }
+
+    assert_eq!(tree.nodes.len(), 10_010_101);
+
+    let elapsed = now.elapsed();
+    println!("Elapsed: {:.2?}", elapsed);
+
+    // tree.print_tree();
+}
diff --git a/web_query_builder/run.sh b/web_query_builder/run.sh
index d1a12b8..dd5bd21 100755
--- a/web_query_builder/run.sh
+++ b/web_query_builder/run.sh
@@ -1 +1 @@
-flask run --debug --port=5005
\ No newline at end of file
+flask run --debug --port=5006
\ No newline at end of file
diff --git a/web_query_builder/static/app.js b/web_query_builder/static/app.js
index a89853b..a9321d8 100644
--- a/web_query_builder/static/app.js
+++ b/web_query_builder/static/app.js
@@ -73,9 +73,13 @@ function goToNextUrl() {
       values.push(timePicker.value.replace(":", ""));
     }
 
-    const enum_checkboxes = item.querySelectorAll("input[type='checkbox']:checked");
+    const enum_checkboxes = item.querySelectorAll(
+      "input[type='checkbox']:checked"
+    );
     if (enum_checkboxes.length > 0) {
-      values.push(...Array.from(enum_checkboxes).map((checkbox) => checkbox.value));
+      values.push(
+        ...Array.from(enum_checkboxes).map((checkbox) => checkbox.value)
+      );
     }
 
     const any = item.querySelector("input[type='text']");
@@ -104,7 +108,7 @@ function goToNextUrl() {
     );
 
     if (existingIndex !== -1) {
-      // If the key already exists, 
+      // If the key already exists,
       // and the values aren't already in there,
       // append the values
       request[existingIndex][1] = [...request[existingIndex][1], ...values];
@@ -125,7 +129,6 @@ async function createCatalogItem(link, itemsContainer) {
   itemsContainer.appendChild(itemDiv);
 
   try {
-
     // Update the item div with real content
     itemDiv.classList.remove("loading");
 
@@ -134,16 +137,19 @@ async function createCatalogItem(link, itemsContainer) {
     // add data-key attribute to the itemDiv
     itemDiv.dataset.key = link.title;
     itemDiv.dataset.keyType = dimension.type;
-    
+
     itemDiv.innerHTML = `
       <h3 class="item-title">${link.title || "No title available"}</h3>
       <p class="item-type">Key Type: ${itemDiv.dataset.keyType || "Unknown"}</p>
       <!-- <p class="item-type">Paths: ${dimension.paths}</p> -->
       <p class="item-type">Optional: ${dimension.optional ? "Yes" : "No"}</p>
-      <p class="item-description">${dimension.description ? dimension.description.slice(0, 100) : "No description available"}...</p>
+      <p class="item-description">${
+        dimension.description
+          ? dimension.description.slice(0, 100)
+          : "No description available"
+      }...</p>
     `;
 
-
     // if (dimension.type === "date" || dimension.type === "time") {
     //   // Render a date picker for the "date" key
     //   const picker = `<input type="${link.title}" name="${link.title}">`;
@@ -155,7 +161,7 @@ async function createCatalogItem(link, itemsContainer) {
     // }
     // Otherwise create a scrollable list with checkboxes for values if available
     if (
-    //   dimension.type === "enum" &&
+      //   dimension.type === "enum" &&
       dimension.values &&
       dimension.values.length > 0
     ) {
@@ -173,20 +179,24 @@ async function createCatalogItem(link, itemsContainer) {
 }
 
 function renderCheckboxList(link) {
-    const dimension = link["generalized_datacube:dimension"];
-    const value_descriptions = dimension.value_descriptions || [];
-  
-    const listContainerHTML = `
+  const dimension = link["generalized_datacube:dimension"];
+  const value_descriptions = dimension.value_descriptions || [];
+
+  const listContainerHTML = `
       <div class="item-list-container">
         <label class="list-label">Select one or more values:</label>
         <div class="scrollable-list">
           ${dimension.values
             .map((value, index) => {
-              const labelText = value_descriptions[index] ? `${value} - ${value_descriptions[index]}` : value;
+              const labelText = value_descriptions[index]
+                ? `${value} - ${value_descriptions[index]}`
+                : value;
               return `
                 <div class="checkbox-container">
                   <label class="checkbox-label">
-                  <input type="checkbox" class="item-checkbox" value="${value}" ${dimension.values.length === 1? 'checked' : ''}>
+                  <input type="checkbox" class="item-checkbox" value="${value}" ${
+                dimension.values.length === 1 ? "checked" : ""
+              }>
                   ${labelText}
                   </label>
                 </div>
@@ -196,9 +206,10 @@ function renderCheckboxList(link) {
         </div>
       </div>
     `;
-  
-    return document.createRange().createContextualFragment(listContainerHTML).firstElementChild;
-  }
+
+  return document.createRange().createContextualFragment(listContainerHTML)
+    .firstElementChild;
+}
 
 // Render catalog items in the sidebar
 function renderCatalogItems(links) {
@@ -217,36 +228,37 @@ function renderCatalogItems(links) {
 }
 
 function renderRequestBreakdown(request, descriptions) {
-    const container = document.getElementById("request-breakdown");
-    const format_value = (key, value) => {
-      return `<span class="value" title="${descriptions[key]['value_descriptions'][value]}">"${value}"</span>`;
-    };
-    
-    const format_values = (key, values) => {
-      if (values.length === 1) {
-        return format_value(key, values[0]);
-      }
-      return `[${values.map((v) => 
-        format_value(key, v)
-    ).join(", ")}]`;
-    };
-  
-    let html = `{\n` +
-      request
-        .map(
-          ([key, values]) =>
-            `    <span class="key" title="${descriptions[key]['description']}">"${key}"</span>: ${format_values(key, values)},`
-        )
-        .join("\n") +
-      `\n}`;
-    container.innerHTML = html;
-  }
+  const container = document.getElementById("request-breakdown");
+  const format_value = (key, value) => {
+    return `<span class="value" title="${descriptions[key]["value_descriptions"][value]}">"${value}"</span>`;
+  };
+
+  const format_values = (key, values) => {
+    if (values.length === 1) {
+      return format_value(key, values[0]);
+    }
+    return `[${values.map((v) => format_value(key, v)).join(", ")}]`;
+  };
+
+  let html =
+    `{\n` +
+    request
+      .map(
+        ([key, values]) =>
+          `    <span class="key" title="${
+            descriptions[key]["description"]
+          }">"${key}"</span>: ${format_values(key, values)},`
+      )
+      .join("\n") +
+    `\n}`;
+  container.innerHTML = html;
+}
 
 function renderRawSTACResponse(catalog) {
   const itemDetails = document.getElementById("raw-stac");
   // create new object without debug key
-    let just_stac = Object.assign({}, catalog);
-    delete just_stac.debug;
+  let just_stac = Object.assign({}, catalog);
+  delete just_stac.debug;
   itemDetails.textContent = JSON.stringify(just_stac, null, 2);
 
   const debug_container = document.getElementById("debug");