Skip to content

Commit

Permalink
Add unit tests (#11)
Browse files Browse the repository at this point in the history
* Update .gitignore

* Create requirements.txt

* Add Github action

* format with isort and black

* set up basic test

* fix small GHA issue

* added pytest as a requirement

* fix file path

* Update conftest.py

* fix file paths

* add tests

* fix tests
  • Loading branch information
nwlandry authored Aug 15, 2024
1 parent 7814d21 commit d377d78
Show file tree
Hide file tree
Showing 11 changed files with 132 additions and 27 deletions.
31 changes: 31 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
name: Test schema

on:
push:
branches:
- main
pull_request:
branches:
- main

jobs:
pytest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"

- name: Install packages
run: |
python -m pip install --upgrade pip wheel setuptools
python -m pip install -r requirements.txt
python -m pip list
- name: Test schema
run: |
# run test suite
pytest --color=yes
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -160,3 +160,5 @@ cython_debug/
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/

test.*
3 changes: 3 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
fastjsonschema
pytest
requests
20 changes: 11 additions & 9 deletions schemas/HIF_schemas.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@
"metadata": {},
"outputs": [],
"source": [
"schema = json.load(open(\"hif_schema_v0.1.0.json\",\"r\"))\n",
"schema = json.load(open(\"hif_schema_v0.1.0.json\", \"r\"))\n",
"schema"
]
},
Expand Down Expand Up @@ -182,7 +182,7 @@
"source": [
"# %%timeit #5.55 ms ± 101 µs per loop (mean ± std. dev. of 7 runs, 100 loops each)\n",
"hif = to_hif(lm)\n",
"json.dump(hif,open(\"../examples/lesmis_hif.json\",\"w\"),allow_nan=False)"
"json.dump(hif, open(\"../examples/lesmis_hif.json\", \"w\"), allow_nan=False)"
]
},
{
Expand All @@ -194,7 +194,7 @@
"source": [
"# %%timeit #742 µs ± 2.52 µs per loop (mean ± std. dev. of 7 runs, 1,000 loops each)\n",
"try:\n",
" validator(hif);\n",
" validator(hif)\n",
"except Exception as e:\n",
" print(e)"
]
Expand All @@ -214,12 +214,12 @@
"metadata": {},
"outputs": [],
"source": [
"\n",
"from copy import deepcopy\n",
"\n",
"hiftest = deepcopy(hif)\n",
"hiftest['network-type'] = \"ordered\"\n",
"hiftest[\"network-type\"] = \"ordered\"\n",
"try:\n",
" validator(hiftest);\n",
" validator(hiftest)\n",
"except Exception as e:\n",
" print(e)\n",
"validator(hiftest);"
Expand Down Expand Up @@ -381,7 +381,7 @@
"outputs": [],
"source": [
"try:\n",
" validator(hshif);\n",
" validator(hshif)\n",
"except Exception as e:\n",
" print(e)"
]
Expand Down Expand Up @@ -414,7 +414,7 @@
"metadata": {},
"outputs": [],
"source": [
"assert hshyp == newhshyp ## this checks that the structure is the same"
"assert hshyp == newhshyp ## this checks that the structure is the same"
]
},
{
Expand All @@ -424,7 +424,9 @@
"metadata": {},
"outputs": [],
"source": [
"json.dump(hshif,open(\"../examples/contacts_high_school_hif.json\",\"w\"),allow_nan=False)"
"json.dump(\n",
" hshif, open(\"../examples/contacts_high_school_hif.json\", \"w\"), allow_nan=False\n",
")"
]
},
{
Expand Down
39 changes: 21 additions & 18 deletions schemas/performance_testing.py
Original file line number Diff line number Diff line change
@@ -1,32 +1,33 @@
'''
"""
Datasets to use:
1. http://bigg.ucsd.edu/models/e_coli_core (xgi.load_big_data ecoli dataset from this website)]
2. https://github.com/HGX-Team/data/tree/main/contacts/high-school
'''
"""

import timeit
import sys
import hypernetx as hnx
import xgi
import pandas as pd
import fastjsonschema
import datetime as dtm
import json
import sys
import timeit
import warnings
import datetime as dtm
from time import perf_counter


import fastjsonschema
import hypernetx as hnx
import pandas as pd
import xgi

warnings.simplefilter("ignore")
sys.stdout = open('performance_testing_output.txt','a')
sys.stdout = open("performance_testing_output.txt", "a")


def marktime(msg=None):
temp = dtm.datetime.now()
print(temp.strftime("%d/%m/%y %H:%M:%S"),": ",msg,flush=True)
print(temp.strftime("%d/%m/%y %H:%M:%S"), ": ", msg, flush=True)
return temp

schema = json.load(open("hif_schema_v0.1.0.json","r"))

schema = json.load(open("hif_schema_v0.1.0.json", "r"))
validator = fastjsonschema.compile(schema)

### high_school data as dataframes for hnx;
Expand All @@ -39,8 +40,9 @@ def marktime(msg=None):
hs_nodedf = pd.DataFrame(hs["nodes"])
hs_nodedf = hs_nodedf.set_index("id").reset_index().fillna("")


### HNX constructors
def hnx_hypergraph(df,nodedf=None,edgedf=None):
def hnx_hypergraph(df, nodedf=None, edgedf=None):
return hnx.Hypergraph(df, node_properties=nodedf)


Expand All @@ -66,21 +68,22 @@ def hnx_from_hif(hif):
incidences = pd.DataFrame(hif["incidences"])
return hnx.Hypergraph(incidences, node_properties=nodes, edge_properties=edges)


marktime("Begin Run")

start = perf_counter()
h = hnx_hypergraph(hs_df,nodedf=hs_nodedf)
h = hnx_hypergraph(hs_df, nodedf=hs_nodedf)
finish = perf_counter()
print('hnx_high_school ',f'{finish - start:.5f} ns',flush=True)
print("hnx_high_school ", f"{finish - start:.5f} ns", flush=True)

start = perf_counter()
hif = hnx_to_hif(h)
finish = perf_counter()
print('hnx_to_hif high_school ',f'{finish - start:.5f} ns',flush=True)
print("hnx_to_hif high_school ", f"{finish - start:.5f} ns", flush=True)

start = perf_counter()
newh = hnx_from_hif(hif)
finish = perf_counter()
print('hnx_from_hif high_school ',f'{finish - start:.5f} ns',flush=True)
print("hnx_from_hif high_school ", f"{finish - start:.5f} ns", flush=True)

marktime("Run Complete \n")
35 changes: 35 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import json

import pytest
import requests

schema_location = "https://raw.githubusercontent.com/pszufe/HIF_validators/main/schemas/hif_schema_v0.1.0.json"
json_dir = "tests/test_files"


@pytest.fixture
def schema():
r = requests.get(schema_location)

if r.ok:
return r.json()


@pytest.fixture
def empty():
return json.load(open(f"{json_dir}/empty.json", "r"))


@pytest.fixture
def bad_top_level_field():
return json.load(open(f"{json_dir}/bad_top_level_field.json", "r"))


@pytest.fixture
def metadata_as_list():
return json.load(open(f"{json_dir}/metadata_as_list.json", "r"))


@pytest.fixture
def empty_hypergraph():
return json.load(open(f"{json_dir}/empty_hypergraph.json", "r"))
1 change: 1 addition & 0 deletions tests/test_files/bad_top_level_field.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"test": {"prop1": "test"}, "incidences": []}
1 change: 1 addition & 0 deletions tests/test_files/empty.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{}
1 change: 1 addition & 0 deletions tests/test_files/empty_hypergraph.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"incidences": []}
1 change: 1 addition & 0 deletions tests/test_files/metadata_as_list.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"metadata": [0, 1, 2], "incidences": []}
25 changes: 25 additions & 0 deletions tests/test_schema.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import fastjsonschema
import pytest


def test_empty(schema, empty):
validator = fastjsonschema.compile(schema)
with pytest.raises(ValueError):
validator(empty)


def test_bad_top_level_field(schema, bad_top_level_field):
validator = fastjsonschema.compile(schema)
with pytest.raises(ValueError):
validator(bad_top_level_field)


def test_metadata_as_list(schema, metadata_as_list):
validator = fastjsonschema.compile(schema)
with pytest.raises(ValueError):
validator(metadata_as_list)


def test_empty_hypergraph(schema, empty_hypergraph):
validator = fastjsonschema.compile(schema)
validator(empty_hypergraph)

0 comments on commit d377d78

Please sign in to comment.