IAP GITLAB

Skip to content
Snippets Groups Projects
Commit b3d79de4 authored by Marcel Köpke's avatar Marcel Köpke
Browse files

src/data/raw/json_to_numpy documentation

parent 9e6158aa
No related branches found
No related tags found
No related merge requests found
......@@ -22,6 +22,25 @@ numpy_label_layout = config["numpy_label_layout"]
def create_dataobject(filename, run, expand_depth):
"""Create a dictionary from a json file and convert data to numpy.
Parameters
----------
filename : str
Filename (without path) of the json file that should be converted.
run : str
Subfolder of data/raw frow which json files should be read.
expand_depth : bool
Flag to indicate if different size longitudinal profiles (due to
different zeniths) should be expanded (True) to maximum size with
additional nan values or if all profiles should be cut (False) to
minimum size.
Returns
-------
dataobject : dict
Dictionary with metadata and converted data in numpy format.
"""
# read json data
filepath = os.path.join(rawpath, run, filename)
with open(filepath, "r") as fd:
......
......@@ -10,6 +10,25 @@ interimpath = get_path()
def store_dataobject(dataobject, overwrite=False):
"""Save dataobject to numpy files and metadata file.
This functions intends to write the (converted) contents of a json file
to their corresponding numpy and metadata file format in data/interim.
The following files will be created:
particle_distribution_timestamp.py
energy_deposit_timestamp.py
label_timestamp.py
metadata_timestamp.json
Parameters
----------
dataobject : dict
Dictionary that contains all json file information. (See also
src.data.raw.json_to_numpy.create_dataobject)
overwrite : bool, optional
Flag to indicate if files in data/interim should be overwritten.
Raises exception if overwrite fails.
"""
json_file : str = dataobject["json_file"]
run : str = dataobject["run"]
timestamp = json_file.split("conex_data_")[-1].split(".json")[0]
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment