From 67771a6ad1ebd7c47d9e78f2627ffdc3c7f92bb4 Mon Sep 17 00:00:00 2001 From: mayomatsuda Date: Tue, 30 Jul 2024 01:23:11 -0400 Subject: [PATCH 01/30] dev-markdown-link-check: add github workflow to check markdown links. Resolves #464 --- .github/workflows/twa-md-push.yml | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 .github/workflows/twa-md-push.yml diff --git a/.github/workflows/twa-md-push.yml b/.github/workflows/twa-md-push.yml new file mode 100644 index 00000000000..40e19c0002d --- /dev/null +++ b/.github/workflows/twa-md-push.yml @@ -0,0 +1,23 @@ +# +# This workflow contains a job to check for broken links within Markdown files in the repository. +# +name: TWA Markdown Push + +# Trigger this workflow during pushes to the 'main' branch if changes to Markdown files +on: + push: + branches: + - main + paths: + - '**.md' + +jobs: + # Check for broken links within Markdown files + markdown-link-check: + name: Check markdown files for broken links + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Markdown links check + uses: ruzickap/action-my-markdown-link-checker@v1 \ No newline at end of file From 460d477ae20970476dd2e93c4fa24830a9db25ed Mon Sep 17 00:00:00 2001 From: Myles Date: Sun, 16 Feb 2025 16:07:48 -0500 Subject: [PATCH 02/30] fix markdown links --- .github/workflows/twa-md-push.yml | 5 +++- .mlc_config.json | 23 +++++++++++++++++++ Agents/SeaLevelImpactAgent/README.md | 2 +- Agents/utils/python-utils/README.md | 2 +- Apps/SampleApp/feature/todo/README.md | 2 +- .../vendor/willdurand/negotiation/README.md | 4 ++-- EntityRDFizer/README.md | 2 +- JPS_ARBITRAGE/README.md | 8 +++---- .../python_wrapper/docs/examples/dif.md | 2 +- .../ac/cam/cares/jps/base/converter/README.md | 2 +- .../cam/cares/jps/base/timeseries/README.md | 2 +- JPS_BLAZEGRAPH/Readme.md | 2 +- JPS_ESS/README.MD | 2 +- .../ontology/ontoassetmanagement/README.MD | 4 ++-- JPS_Ontology/ontology/ontopoi/README.md | 2 +- JPS_Ontology/ontology/ontosealevel/README.md | 4 ++-- JPS_Ontology/ontology/ontoubemmp/README.md | 8 +++---- JPS_Ontology/ontology/ontozoning/README.MD | 4 ++-- JPS_VIRTUALSENSOR/README.md | 4 +++- QuestionAnswering/JPS_Chatbot/README.md | 2 +- .../JPS_LDF/dependencies/README.md | 2 +- .../Training/EntityLinking/readme.md | 4 ++-- .../MARIE_AND_BERT/Training/readme.md | 2 +- QuestionAnswering/MARIE_AND_BERT/readme.md | 8 +++---- .../MARIE_SEQ2SEQ/training/README.md | 4 ++-- .../QA_ICL/data_generation/README.md | 7 +++--- .../next_app_marie/resources/history.md | 2 +- .../resources/tbox-info/ontocompchem.md | 2 +- .../resources/tbox-info/ontokin.md | 4 ++-- .../resources/tbox-info/ontomops.md | 4 ++-- .../resources/tbox-info/ontospecies.md | 2 +- .../resources/tbox-info/ontozeolite.md | 4 ++-- .../ifcto3Dtilesnext/README.md | 4 +++- obsolete/JPS_DES/README.md | 3 ++- obsolete/JPS_Version_0/BMS/BMSMap/LICENSE.md | 2 +- obsolete/JPS_Version_0/BMS/BMSMap/README.md | 2 +- ontology-tools/CMCLOntoChemExp/README.md | 10 ++++---- thermo/README.md | 9 ++++---- thermo/obda-thermochemistry/README.md | 12 +++++----- thermo/spin-thermochemistry/README.md | 4 ++-- web/augmented-uk/docs/data.md | 16 ++++++------- .../data/street_light/king's_lynn/README.md | 1 + .../vector/132kv-overhead-lines/README.md | 2 +- .../ukpn/vector/132kv-poles-towers/README.md | 2 +- .../ukpn/vector/33kv-overhead-lines/README.md | 2 +- .../README.md | 2 +- web/docs/README.md | 1 + web/pylon-visualisation/README.md | 2 +- web/twa-vis-framework/docs/cesium.md | 2 +- web/twa-vis-framework/docs/mapbox.md | 2 +- web/twa-vis-framework/docs/overview.md | 6 ++--- web/twa-vis-framework/docs/troubleshooting.md | 2 +- web/twa-vis-framework/docs/tutorial-mapbox.md | 4 ++-- web/twa-vis-framework/docs/tutorials.md | 2 +- web/twa-vis-framework/library/README.md | 6 ++--- 55 files changed, 130 insertions(+), 99 deletions(-) create mode 100644 .mlc_config.json diff --git a/.github/workflows/twa-md-push.yml b/.github/workflows/twa-md-push.yml index 40e19c0002d..3c18b0fa1a4 100644 --- a/.github/workflows/twa-md-push.yml +++ b/.github/workflows/twa-md-push.yml @@ -20,4 +20,7 @@ jobs: - uses: actions/checkout@v2 - name: Markdown links check - uses: ruzickap/action-my-markdown-link-checker@v1 \ No newline at end of file + uses: ruzickap/action-my-markdown-link-checker@v1 + with: + config_file: .github/workflows/twa-md-push-config.json + debug: true \ No newline at end of file diff --git a/.mlc_config.json b/.mlc_config.json new file mode 100644 index 00000000000..48a354a7b80 --- /dev/null +++ b/.mlc_config.json @@ -0,0 +1,23 @@ +{ + "ignorePatterns": [ + { + "pattern": "http://localhost(:\\d+)?(/[^ ]*)?" + }, + { + "pattern": "http://HOST:PORT?(/[^ ]*)?" + }, + { + "pattern": "https://www.cmegroup.com?(/[^ ]*)?" + }, + { + "pattern": "https?://(www\\.)?theworldavatar.com(/[^ ]*)?" + }, + { + "pattern": "https://abc.xyz?(/[^ ]*)?" + }, + { + "pattern": "https://maven.pkg.github.com/cambridge-cares/TheWorldAvatar/?" + } + ], + "aliveStatusCodes": [200, 403, 0] +} \ No newline at end of file diff --git a/Agents/SeaLevelImpactAgent/README.md b/Agents/SeaLevelImpactAgent/README.md index 57af926ee0e..1d0cda40fe4 100644 --- a/Agents/SeaLevelImpactAgent/README.md +++ b/Agents/SeaLevelImpactAgent/README.md @@ -21,7 +21,7 @@ The SeaLevelImpactAgent is an agent that ## 2. Prerequisites This agent is developed as part of the [Singapore-sea-level-rise stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-sea-level-rise-singapore/Deploy/stacks/Singapore-sea-level-rise). -Data in the [Singapore-sea-level-rise stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-sea-level-rise-singapore/Deploy/stacks/Singapore-sea-level-rise) needs to be uploaded by stack-data-uploader before running this agent. +Data in the [Singapore-sea-level-rise stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/Singapore-sea-level-rise) needs to be uploaded by stack-data-uploader before running this agent. ### 2.1. Stack Set Up The agent has been implemented to work in the stack. Follow the instructions in the [stack-manager]'s README to set up the stack. diff --git a/Agents/utils/python-utils/README.md b/Agents/utils/python-utils/README.md index 67896623e5b..7ec904a1e20 100644 --- a/Agents/utils/python-utils/README.md +++ b/Agents/utils/python-utils/README.md @@ -1,6 +1,6 @@ # TheWorldAvatar - Python Utils -This Python package contains a number of logging utilities that may be useful to any Python-based project within The World Avatar (TWA) ecosystem. At the time of writing, this project builds an isolated package named `agentlogging` that users can import in their own code. In the future, this package may be bundled with the Python wrapper for the JPS Base Library so that only one dependency is required. **Deprecation Warning: `agentlogging` is packaged with `py4jps` as of version [1.0.29](https://pypi.org/project/py4jps/1.0.29/). Please do NOT use or develop this isolated package further. Instead, please use and continue develop [`TheWorldAvatar/JPS_BASE_LIB/python_wrapper/py4jps/agentlogging`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_wrapper/py4jps/agentlogging).** +This Python package contains a number of logging utilities that may be useful to any Python-based project within The World Avatar (TWA) ecosystem. At the time of writing, this project builds an isolated package named `agentlogging` that users can import in their own code. In the future, this package may be bundled with the Python wrapper for the JPS Base Library so that only one dependency is required. **Deprecation Warning: `agentlogging` is packaged with `py4jps` as of version [1.0.29](https://pypi.org/project/py4jps/1.0.29/). Please do NOT use or develop this isolated package further. Instead, please use and continue develop [`TheWorldAvatar/JPS_BASE_LIB/python_wrapper/twa/agentlogging`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_wrapper/twa/agentlogging).** ## Functions diff --git a/Apps/SampleApp/feature/todo/README.md b/Apps/SampleApp/feature/todo/README.md index 7fcdccdd7dc..82592f24c6b 100644 --- a/Apps/SampleApp/feature/todo/README.md +++ b/Apps/SampleApp/feature/todo/README.md @@ -3,7 +3,7 @@ A module demonstrates in module navigation with action, data binding and the complete workflow of retrieving data from internet and displaying in the app. ## 1. Workflow -The workflow of data transfer has been discussed in [SampleApp/README.md](https://github.com/cambridge-cares/TheWorldAvatar/blob/1786-android-documentation/Apps/SampleApp/README.md#22-data-transfer). Please refer to it for more details. +The workflow of data transfer has been discussed in [SampleApp/README.md](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Apps/SampleApp/README.md#22-data-transfer). Please refer to it for more details. ## 2. In Module Navigation In module navigation has been discussed in [SampleApp/README.md](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Apps/SampleApp/README.md#213-action). Please refer to it for more details. diff --git a/Deploy/stacks/web/website/site/vendor/willdurand/negotiation/README.md b/Deploy/stacks/web/website/site/vendor/willdurand/negotiation/README.md index 1d85c123485..0c0cc59fa0c 100644 --- a/Deploy/stacks/web/website/site/vendor/willdurand/negotiation/README.md +++ b/Deploy/stacks/web/website/site/vendor/willdurand/negotiation/README.md @@ -2,7 +2,7 @@ Negotiation =========== [![Build -Status](https://travis-ci.org/willdurand/Negotiation.svg?branch=master)](http://travis-ci.org/willdurand/Negotiation) +Status](https://travis-ci.org/willdurand/Negotiation.svg?branch=master)](http://travis-ci.org/willdurand/Negotiation) [![Build status](https://ci.appveyor.com/api/projects/status/6tbe8j3gofdlfm4v?svg=true)](https://ci.appveyor.com/project/willdurand/negotiation) [![Total @@ -170,7 +170,7 @@ Run it using PHPUnit: Contributing ------------ -See [CONTRIBUTING](CONTRIBUTING.md) file. +See [CONTRIBUTING](CONTRIBUTING.md) file. Credits diff --git a/EntityRDFizer/README.md b/EntityRDFizer/README.md index 5081ebd9f4f..8ae3abbf2b9 100644 --- a/EntityRDFizer/README.md +++ b/EntityRDFizer/README.md @@ -3,7 +3,7 @@ The `entityrdfizer` project is designed to convert entities of any domain and their data and metadata into RDF. It requires the entities and their data to be provided as inputs in an ABox CSV template, that is filled in with data. A group of ABox CSV template files are provided under the following URL: -https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/KBTemplates/ABox +https://github.com/TheWorldAvatar/ontology/blob/main/KBTemplates/ABox # Installation # These instructions will get you a copy of the project up and running on your local machine for development and testing purposes. diff --git a/JPS_ARBITRAGE/README.md b/JPS_ARBITRAGE/README.md index 98ee042f80d..b72379360ff 100644 --- a/JPS_ARBITRAGE/README.md +++ b/JPS_ARBITRAGE/README.md @@ -33,18 +33,18 @@ How do we know if it's not working? If the python logger logging to tomcat serve - http://www.usinflationcalculator.com/ - https://www.icis.com/resources/news/2013/11/08/9723077/se-asia-to-china-palm-oil-freight-rates-may-fall-on-weak-demand/ - https://www.icis.com/resources/news/2013/11/08/9723077/se-asia-to-china-palm-oil-freight-rates-may-fall-on-weak-demand/ - - https://www.ema.gov.sg/Non_Residential_Programmes_Electricity_Tariffs.aspx + - Non_Residential_Programmes_Electricity_Tariffs.aspx - missing 2. exchange_rates.pyw - http://apilayer.net/api/live?access_key=402d77f0850c35adfa5a797e325262dd¤cies=CNY,SGD&source=USD&format=1 3. FAME_download.pyw - - http://www.cmegroup.com/trading/energy/refined-products/fame-0-argus-biodiesel-fob-rdam-red-compliant-swap-futures.html + - https://www.cmegroup.com/markets/energy/biofuels/fame-0-argus-biodiesel-fob-rdam-red-compliant-swap-futures.html 4. HNG_download.pyw - - http://www.cmegroup.com/trading/energy/natural-gas/natural-gas_quotes_globex.html + - https://www.cmegroup.com/markets/energy/natural-gas/natural-gas.quotes.html 5. NG_to_MeOH_MoDS.py - https://business.directenergy.com/understanding-energy/energy-tools/conversion-factors. - 6. ZCE_download.pyw + 6. ZCE_download.pyw - http://english.czce.com.cn/enportal/DFSStaticFiles/Future/EnglishFutureQuotesMA.htm ### TODO: diff --git a/JPS_BASE_LIB/python_wrapper/docs/examples/dif.md b/JPS_BASE_LIB/python_wrapper/docs/examples/dif.md index 1568f960a6e..0f8706a1e87 100644 --- a/JPS_BASE_LIB/python_wrapper/docs/examples/dif.md +++ b/JPS_BASE_LIB/python_wrapper/docs/examples/dif.md @@ -13,7 +13,7 @@ To read the academic paper describing the DIF: To read the academic papers using the DIF: - Jiaru Bai, Sebastian Mosbach, Connor J. Taylor, Dogancan Karan, Kok Foong Lee, Simon D. Rihm, Jethro Akroyd, Alexei A. Lapkin, and Markus Kraft. (2024). A dynamic knowledge graph approach to distributed self-driving laboratories. Nature Communications 15, 462. [doi:10.1038/s41467-023-44599-9](https://doi.org/10.1038/s41467-023-44599-9) -- Wanni Xie, Feroz Farazi, John Atherton, Jiaru Bai, Sebastian Mosbach, Jethro Akroyd, and Markus Kraft. (2024). Dynamic knowledge graph approach for modelling the decarbonisation of power systems. Energy and AI 17, 100359. [doi:10.1016/j.egyai.2024.10035](https://doi.org/10.1016/j.egyai.2024.10035) +- Wanni Xie, Feroz Farazi, John Atherton, Jiaru Bai, Sebastian Mosbach, Jethro Akroyd, and Markus Kraft. (2024). Dynamic knowledge graph approach for modelling the decarbonisation of power systems. Energy and AI 17, 100359. [doi:10.1016/j.egyai.2024.10035](https://www.sciencedirect.com/science/article/pii/S2666546824000259) - Markus Hofmeister, Jiaru Bai, George Brownbridge, Sebastian Mosbach, Kok Foong Lee, Feroz Farazi, Michael Hillman, Mehal Agarwal, Srishti Ganguly, Jethro Akroyd, and Markus Kraft. (2024). Semantic agent framework for automated flood assessment using dynamic knowledge graphs. Data-Centric Engineering 5, 14. [doi:10.1017/dce.2024.11](https://doi.org/10.1017/dce.2024.11) - Markus Hofmeister, George Brownbridge, Michael Hillman, Sebastian Mosbach, Jethro Akroyd, Kok Foong Lee, and Markus Kraft. (2024). Cross-domain flood risk assessment for smart cities using dynamic knowledge graphs. Sustainable Cities and Society 101, 105113. [doi:10.1016/j.scs.2023.105113](https://doi.org/10.1016/j.scs.2023.105113) - Markus Hofmeister, Kok Foong Lee, Yi-Kai Tsai, Magnus Müller, Karthik Nagarajan, Sebastian Mosbach, Jethro Akroyd, and Markus Kraft. (2024). Dynamic control of district heating networks with integrated emission modelling: A dynamic knowledge graph approach. Energy and AI 17, 100376. [doi:10.1016/j.egyai.2024.100376](https://doi.org/10.1016/j.egyai.2024.100376) diff --git a/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/converter/README.md b/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/converter/README.md index 29710ae1b82..8a6abed49c2 100644 --- a/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/converter/README.md +++ b/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/converter/README.md @@ -167,7 +167,7 @@ Suppose you followed the instructions for filling out the template in the TBox M |OntoKin,TBox,http://www.theworldavatar.com/kg/ontokin, https://www.w3.org/2007/05/powder-s#hasIRI, , , , , , | |OntoKin,TBox,1,http://www.w3.org/2002/07/owl#versionInfo, , , , , , | |OntoKin,TBox,OntoKin is an ontology developed for representing chemical kinetic reaction mechanisms,http://www.w3.org/2000/01/rdf-schema#comment, , , , , , | -|OntoKin,TBox,http://theworldavatar.com/ontology/ontocape/OntoCAPE.owl,http://www.w3.org/2002/07/owl#imports, , , , , , | +|OntoKin,TBox,http://theworldavatar.com/ontology/ontocape/OntoCAPE.owl, http://www.w3.org/2002/07/owl#imports, , , , , , | |ReactionMechanism,Class, , , , , ,A reaction mechanism refers to a set of elementary reactions., http://www.theworldavatar.com/ontology/ontokin/OntoKin.owl, Reaction Mechanism| |Phase,Class, , , , , ,A phase of a substance is a form of matter., http://www.theworldavatar.com/ontology/ontokin/OntoKin.owl, Phase| |GasPhase,Class,Phase,IS-A, , , ,A continuous gaseous phase.,http://www.theworldavatar.com/ontology/ontokin/OntoKin.owl, Gas Phase| diff --git a/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries/README.md b/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries/README.md index 2985300acb9..6574b84c883 100644 --- a/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries/README.md +++ b/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries/README.md @@ -250,5 +250,5 @@ You can request login details by emailing `supportcmclinnovations.com` with [test repository]: [AQMeshInputAgent]: [FloodAgent]: - [GasGridAgent]: + [GasGridAgent]: [TimeSeriesExample]: diff --git a/JPS_BLAZEGRAPH/Readme.md b/JPS_BLAZEGRAPH/Readme.md index 261dafa5cdb..6d748d7381b 100644 --- a/JPS_BLAZEGRAPH/Readme.md +++ b/JPS_BLAZEGRAPH/Readme.md @@ -1,6 +1,6 @@ # Blazegraph Project ### Authors -* [Feroz Farazi](msff2@cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) ### Ontology Upload diff --git a/JPS_ESS/README.MD b/JPS_ESS/README.MD index 0b6708073b8..cbd2ef4af99 100644 --- a/JPS_ESS/README.MD +++ b/JPS_ESS/README.MD @@ -57,7 +57,7 @@ - Second, check if the test `testCreateScenarioAndCallESSCoordinate` is working. This calls C as a whole, thus errors can come from there. - Third, if both of the above conditions run without errors, then it's most likely due to the visualization being broken. It could be that you are reading from the wrong location (at which case, look at \web\CO2Web\public\javascripts\pwBaseFile.js, Line 2. Enable it to Line 3, and it should be reading from Claudius rather than local deployment. - Possible errors on the backend could include: - 1. Check if the domain name is available on Claudius. If it fails at BatteryCreator or OptimizationAgent, than it's most likely that. So access [J-ParkSimulator](www.jparksimulator.com). + 1. Check if the domain name is available on Claudius. If it fails at BatteryCreator or OptimizationAgent, than it's most likely that. So access [J-ParkSimulator](http://www.jparksimulator.com). 2. JPS-POWSYS components. This program depends on POWSYS war file being available, and utilizes ENAgent and retrofitAgents from the retrofit package. 3. GAMS not being installed and hidden under GAMS DIR. If you don't want to change your directory name, then Line 105 of EnergyStorageSystem should be changed to where your GAMS is installed. 4. Python not being installed diff --git a/JPS_Ontology/ontology/ontoassetmanagement/README.MD b/JPS_Ontology/ontology/ontoassetmanagement/README.MD index 147783d2caa..c784b7a753a 100644 --- a/JPS_Ontology/ontology/ontoassetmanagement/README.MD +++ b/JPS_Ontology/ontology/ontoassetmanagement/README.MD @@ -18,10 +18,10 @@ OntoApplication is developed to represent assets and its related information (e. | [fibo-organizations](https://spec.edmcouncil.org/fibo/ontology/FND/Organizations/Organizations/) | `https://spec.edmcouncil.org/fibo/ontology/FND/Organizations/Organizations/` | | [fibo-formalorganizations](https://spec.edmcouncil.org/fibo/ontology/FND/Organizations/FormalOrganizations/) | `https://spec.edmcouncil.org/fibo/ontology/FND/Organizations/FormalOrganizations/` | | [fibo-people](https://spec.edmcouncil.org/fibo/ontology/FND/AgentsAndPeople/People/) | `https://spec.edmcouncil.org/fibo/ontology/FND/AgentsAndPeople/People/` | -| [p2p-doc](https://purl.org/p2p-o/document) | `https://purl.org/p2p-o/document#` | +| [p2p-doc](https://purl.org/p2p-o/document) | `https://purl.org/p2p-o/document#` | | [p2p-docline](https://purl.org/p2p-o/documentline) | `https://purl.org/p2p-o/documentline#` | | [p2p-invoice](https://purl.org/p2p-o/invoice) | `https://purl.org/p2p-o/invoice#` | -| [p2p-item](https://purl.org/p2p-o/item) | `https://purl.org/p2p-o/item#` | +| [p2p-item](https://purl.org/p2p-o/item) | `https://purl.org/p2p-o/item#` | | [ontobim](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobim) | `https://www.theworldavatar.com/kg/ontobim/` | | [ontocape_technical_system](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontocape/upper_level/technical_system.owl) | `http://www.theworldavatar.com/ontology/ontocape/upper_level/technical_system.owl#` | diff --git a/JPS_Ontology/ontology/ontopoi/README.md b/JPS_Ontology/ontology/ontopoi/README.md index 57935598ae7..6c387f6e28f 100644 --- a/JPS_Ontology/ontology/ontopoi/README.md +++ b/JPS_Ontology/ontology/ontopoi/README.md @@ -5,7 +5,7 @@ This documentation provides an overview of the development process for the OntoP ## Development ## - **ontopoi.owl**: The ontology in the `ontopoi.owl` file contains all 9 top-level groups, 52 mid-level categories, and 600 lowest-level classes available in the [Points of Interest Classification Scheme](https://www.dropbox.com/scl/fi/krzpch9kkobpo2vek7np1/points-of-interest-classification-schemes-v3.4.pdf?rlkey=etc51hicq2ys19jh8nd45fk8i&st=4cyp2b3v&dl=0) published by Ordnance Survey. - - To create the ontology, the [class extractor](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-AI-for-Healthcare/Deploy/stacks/AI4Healthcare/AI4Healthcare_Common-Script) was executed to extract the groups, categories, and classes, representing them in a [TBox CSV file-based template](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/converter) as ontological classes. + - To create the ontology, the [class extractor](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/AI4PublicHealth/Common_Script) was executed to extract the groups, categories, and classes, representing them in a [TBox CSV file-based template](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/converter) as ontological classes. - The extractor also defined subclass relationships, linking each category to its group and each class to its category. The resulting data was saved to `ontopoi.csv`, then converted into an OWL format using the [TBox Generator](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/converter), which generated the `ontopoi.owl` file. - **ontopoi-with-properties.owl**: This OWL file extends the base ontology (`ontopoi.owl`) by incorporating object and datatype properties. These additions are essential for producing a semantic description of the UK Points of Interest dataset as published by Ordnance Survey. diff --git a/JPS_Ontology/ontology/ontosealevel/README.md b/JPS_Ontology/ontology/ontosealevel/README.md index 6131b95e2cb..0687c18c0a2 100644 --- a/JPS_Ontology/ontology/ontosealevel/README.md +++ b/JPS_Ontology/ontology/ontosealevel/README.md @@ -7,10 +7,10 @@ As part of TheWorldAvatar(TWA), this ontology has a modular design that can be e | Ontology | Incorporated data | |---------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------| -| [OntoBuildingEnvironment](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobuiltenv) | Building properties, uses, values, and location | +| [OntoBuildingEnvironment](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontobuiltenv) | Building properties, uses, values, and location | | [OntoPlot](https://www.theworldavatar.com/kg/ontoplot/) | Attributes of land plots | | [Routable tiles ontology](https://w3id.org/openstreetmap/terms#) | OpenStreetMap road tags and their semantics | ## 1.2 Related Agents This ontology is used in the following agent: -1) [SeaLevelImpactAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/1808-dev-sealevelimpactagent) \ No newline at end of file +1) [SeaLevelImpactAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/SeaLevelImpactAgent) \ No newline at end of file diff --git a/JPS_Ontology/ontology/ontoubemmp/README.md b/JPS_Ontology/ontology/ontoubemmp/README.md index b7caf6a6bd2..1fd290d4c74 100644 --- a/JPS_Ontology/ontology/ontoubemmp/README.md +++ b/JPS_Ontology/ontology/ontoubemmp/README.md @@ -7,10 +7,10 @@ As part of TheWorldAvatar(TWA), this ontology has a modular design that can be e | Ontology | Incorporated data | |---------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------| -| [OntoBuildingEnvironment](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobuiltenv) | Building properties, uses, values, and location | -| [OntoBIM](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobim) | Topological relationships between a building and its elements | -| [OntoBuildingStructure](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobuildingstructure) | Wall and roof facades to receive solar irradiation | -| [OntoDevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice) | Device properties for solar devices | +| [OntoBuildingEnvironment](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontobuiltenv) | Building properties, uses, values, and location | +| [OntoBIM](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontobim) | Topological relationships between a building and its elements | +| [OntoBuildingStructure](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontobuildingstructure) | Wall and roof facades to receive solar irradiation | +| [OntoDevice](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontodevice) | Device properties for solar devices | ### 1.2 Related Agents This ontology is used in the following agent: diff --git a/JPS_Ontology/ontology/ontozoning/README.MD b/JPS_Ontology/ontology/ontozoning/README.MD index 406d78fcb8d..bcd39921f5f 100644 --- a/JPS_Ontology/ontology/ontozoning/README.MD +++ b/JPS_Ontology/ontology/ontozoning/README.MD @@ -27,8 +27,8 @@ The namespace for this ontology is: ## 2. Legend Prefix | Namespace --- | --- -[ontoplot](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoplot) | `https://www.theworldavatar.com/kg/ontoplot/` -[opr](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoplanningregulation) | `https://www.theworldavatar.com/kg/ontoplanningregulation/` +[ontoplot](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontoplot) | `https://www.theworldavatar.com/kg/ontoplot/` +[opr](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontoplanningregulation) | `https://www.theworldavatar.com/kg/ontoplanningregulation/` ## 3. Modelling Decisions >LandUseType diff --git a/JPS_VIRTUALSENSOR/README.md b/JPS_VIRTUALSENSOR/README.md index 34fef2ef196..2237130c44c 100644 --- a/JPS_VIRTUALSENSOR/README.md +++ b/JPS_VIRTUALSENSOR/README.md @@ -5,7 +5,7 @@ - mapbox_api_key - mapbox_username 2) Set openweather API key in stack-manager/inputs/config/services/weather-agent.json, the API key needs to have OneCall enabled (credit card required, you can set the call limit below the limit before it starts charging). -3) If running AERMOD for static point sources, it is necessary to instantiate the input data required for AERMOD Agent according to OntoDispersion (https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodispersion). See the JurongIslandInputAgent folder for an example of an agent that does this. +3) If running AERMOD for static point sources, it is necessary to instantiate the input data required for AERMOD Agent according to OntoDispersion (https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontodispersion). See the JurongIslandInputAgent folder for an example of an agent that does this. 4) Elevation data (optional): AERMOD agent will try to query elevation data from a table named `elevation` in the default database. AERMOD agent can query the data stored in any SRID, but the table needs to contain data in one SRID only, hence it's recommended to convert any elevation data to a uniform SRID, e.g. 4326. An example is provided in [elevation.json]. Note that this config file is written for data in SRID=32632 and it needs to be changed according to your source data. The raw data files should be stored in `./stack-data-uploader/inputs/data/elevation`, any format supported by gdal should work, see https://gdal.org/drivers/raster/index.html for more info. 5) Buildings data (optional for ships, compulsory for static point source use cases): @@ -51,6 +51,7 @@ disp:tempMeasure rdf:type om:Measure ; om:hasUnit om:kelvin . ``` + An instance can emit multiple pollutants, the class of pollutant ID needs to be one of the following: - - @@ -59,6 +60,7 @@ An instance can emit multiple pollutants, the class of pollutant ID needs to be - - - + ## Important for visualisation if not deployed locally Modify instances of `http://localhost:4242` in [data.json] to the external URL of where the stack is going to be deployed. diff --git a/QuestionAnswering/JPS_Chatbot/README.md b/QuestionAnswering/JPS_Chatbot/README.md index 622b4b7fae3..7989d95ca77 100644 --- a/QuestionAnswering/JPS_Chatbot/README.md +++ b/QuestionAnswering/JPS_Chatbot/README.md @@ -5,7 +5,7 @@ Before making any changes to it, please consult the application's developer (Xia ### In a Docker stack -The JPS Chatbot (and it's associated LDF server) have been added to the 'agent' Docker stack (see the [deployment readme](../Deploy/README.md) for more info). +The JPS Chatbot (and it's associated LDF server) have been added to the 'agent' Docker stack (see the [deployment readme](../../Deploy/README.md) for more info). ### In isolation (for development and local testing) The instructions below are intended for isolated development and testing only. diff --git a/QuestionAnswering/JPS_LDF/dependencies/README.md b/QuestionAnswering/JPS_LDF/dependencies/README.md index de0e2652d9a..cc7ea5a35a8 100644 --- a/QuestionAnswering/JPS_LDF/dependencies/README.md +++ b/QuestionAnswering/JPS_LDF/dependencies/README.md @@ -4,7 +4,7 @@ Each of the three dependencies in this directory are managed using separate Mave The descriptor.xml specifies the content and format (zip) of each dependency, while the pom.xml contains the artifact's metadata and describes how to package and deploy it. To upload a new version of one of the artifacts: -1. Ensure you have [Maven](https://maven.apache.org) installed and configured with appropriate settings to allow upload to the World Avatar Maven repo. See [this readme](../../Deploy/examples/maven_dependency/deploy/README.md) for instructions. +1. Ensure you have [Maven](https://maven.apache.org) installed and configured with appropriate settings to allow upload to the World Avatar Maven repo. See [this readme](../../../Deploy/examples/maven_dependency/deploy/README.md) for instructions. 2. Place the new files/directories in the dependency sub-directory. The required content is: ``` ./custom_node_modules diff --git a/QuestionAnswering/MARIE_AND_BERT/Training/EntityLinking/readme.md b/QuestionAnswering/MARIE_AND_BERT/Training/EntityLinking/readme.md index cc325dfa96a..cfbe513e228 100644 --- a/QuestionAnswering/MARIE_AND_BERT/Training/EntityLinking/readme.md +++ b/QuestionAnswering/MARIE_AND_BERT/Training/EntityLinking/readme.md @@ -35,7 +35,7 @@ Shown below is an example of the expected folder structure after set-up: ## Train SMILES NER ###Training on Windows -Follow the [setup guide](setup) to configure the environment. Move the `/data` folder created in [Data Preparation](#data-preparation) under `MARIE_AND_BERT/Training/EntityLinking`. +Follow the [setup guide](#setup) to configure the environment. Move the `/data` folder created in [Data Preparation](#data-preparation) under `MARIE_AND_BERT/Training/EntityLinking`. Use `MARIE_AND_BERT/Training/EntityLinking` as root folder to run the following command to train the SMILES NER model: ``` @@ -125,7 +125,7 @@ workdir="/home/[your_CRSid]/[your_training_folder]/NEL_Training_Marie_and_Bert" ``` ###Training on Windows -Follow the [setup guide](setup) to configure the environment. Move the `/data` folder created in [Data Preparation](data-preparation) under `MARIE_AND_BERT/Training/EntityLinking`. Also use `MARIE_AND_BERT/Training/EntityLinking` as the root folder to run the following commands: +Follow the [setup guide](#setup) to configure the environment. Move the `/data` folder created in [Data Preparation](#data-preparation) under `MARIE_AND_BERT/Training/EntityLinking`. Also use `MARIE_AND_BERT/Training/EntityLinking` as the root folder to run the following commands: First step: diff --git a/QuestionAnswering/MARIE_AND_BERT/Training/readme.md b/QuestionAnswering/MARIE_AND_BERT/Training/readme.md index 6bf5ee9896b..5a95a570bd0 100644 --- a/QuestionAnswering/MARIE_AND_BERT/Training/readme.md +++ b/QuestionAnswering/MARIE_AND_BERT/Training/readme.md @@ -122,7 +122,7 @@ under the ontology folders or sub-ontology folders. ### File requirement 1. All the files required for embedding and the trained embedding files. -2. `score_model_training.tsv`, see [readme.md for dataset creation](./KGToolbox/readme.md) to create the file +2. `score_model_training.tsv`, see [readme.md for dataset creation](../KGToolbox/readme.md) to create the file The files need to be placed in `MARIE_AND_BERT/DATA/CrossGraph/[ontology_name]/[sub-ontology_name]` if there is a sub ontology folder, otherwise, the files need to be placed in `CrossGraph/[ontology_name]` diff --git a/QuestionAnswering/MARIE_AND_BERT/readme.md b/QuestionAnswering/MARIE_AND_BERT/readme.md index eb14970ba9d..d71a0803e62 100644 --- a/QuestionAnswering/MARIE_AND_BERT/readme.md +++ b/QuestionAnswering/MARIE_AND_BERT/readme.md @@ -1,8 +1,6 @@ # Marie and BERT (Marie 3.0) -The ``Marie and Bert`` a.k.a `Marie 3.0` project is developed by [Xiaochi Zhou](xz378@cam.ac.uk) and [Shaocong Zhang](sz375@cam.ac.uk) and [Mehal Agarwal](ma988@cam.ac.uk). - -A demonstration webpage is deployed at [Marie Website](http://159.223.42.53:5003/) +The ``Marie and Bert`` a.k.a `Marie 3.0` project is developed by [Xiaochi Zhou](mailto:xz378@cam.ac.uk) and [Shaocong Zhang](mailto:sz375@cam.ac.uk) and [Mehal Agarwal](mailto:ma988@cam.ac.uk). ## Architecture @@ -97,7 +95,7 @@ The user will need to change `/tmp/directory/for/models` to their folder of choi To run the full functions of the Marie system, three other systems are required: 1. The LDF server. See [LDF server readme](../JPS_LDF/README.md) to run it. -2. The semantic agents. See [PCE Agent readme](../Agents/PCEAgent/README.md) and [STDC Agent readme](../Agents/STDCThermoAgent/README.md) to create docker containers running them. +2. The semantic agents. See [PCE Agent readme](../../Agents/PCEAgent/README.md) and [STDC Agent readme](../../Agents/STDCThermoAgent/README.md) to create docker containers running them. ## Docker Deployment @@ -122,6 +120,7 @@ The deployment requires at least 16 GB of memory allocated to docker. The buildi To deploy the local LDF server (For reaction queries) and the Agents system (For agent queries) 1. Created a folder `/home/user1/Marie/TheWorldAvatar/MARIE_AND_BERT/DATA/KG` . Create `ontospecies.nt` and `ontocompchem.nt` using + ``` python KGToolbox/SPARQLEndpoint/export_triples.py --endpoint http://www.theworldavatar.com/blazegraph/namespace/copy_ontospecies_marie @@ -133,6 +132,7 @@ python KGToolbox/SPARQLEndpoint/export_triples.py --endpoint http://www.theworldavatar.com/blazegraph/namespace/ontocompchem --output_filename ontocompchem.nt ``` + The script needs to be run under `/home/user1/Marie/TheWorldAvatar/MARIE_AND_BERT` and the files will be created under `MARIE_AND_BERT/DATA/KG`. diff --git a/QuestionAnswering/MARIE_SEQ2SEQ/training/README.md b/QuestionAnswering/MARIE_SEQ2SEQ/training/README.md index 09d417f247d..5efc00b2a1f 100644 --- a/QuestionAnswering/MARIE_SEQ2SEQ/training/README.md +++ b/QuestionAnswering/MARIE_SEQ2SEQ/training/README.md @@ -2,7 +2,7 @@ Prerequisites -- Linux OS (recommended) . It is not advisable to run this project on Windows as [`bitsandbytes` is not supported on Windows]((https://github.com/TimDettmers/bitsandbytes/issues/30)). +- Linux OS (recommended) . It is not advisable to run this project on Windows as [`bitsandbytes` is not supported on Windows](https://github.com/TimDettmers/bitsandbytes/issues/30). - [conda](https://conda.io/projects/conda/en/latest/index.html) (recommended). - `python==3.10`. - CUDA @@ -69,7 +69,7 @@ Additional dependencies: - ONNX Runtime for GPU: `pip install optimum==1.12.0 && pip install optimum[onnxruntime-gpu]` - TensorRT: - [CUDA toolkit](https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html) - - [cuDNN](https://docs.nvidia.com/deeplearning/cudnn/install-guide/index.html) + - [cuDNN](https://docs.nvidia.com/deeplearning/cudnn/archives/cudnn-860/install-guide/index.html) - [TensorRT](https://docs.nvidia.com/deeplearning/tensorrt/install-guide/index.html) (installation from tar ball recommended) - To enable memory profiling with the command line argument `--do_profile`, run `pip install memory-profiler==0.61.0`. diff --git a/QuestionAnswering/QA_ICL/data_generation/README.md b/QuestionAnswering/QA_ICL/data_generation/README.md index 530ae1749b7..73191d025f1 100644 --- a/QuestionAnswering/QA_ICL/data_generation/README.md +++ b/QuestionAnswering/QA_ICL/data_generation/README.md @@ -406,13 +406,12 @@ Most scripts in this directory are in Python. The command line arguments support - Generate lexicons specific to an entity type: - Entities of `purl:Element` type: [lexicon/Element_lexicon.py](lexicon/Element_lexicon.py). - - Entities of `disp:Ship` type: [lexicon/Ship_lexicon.py](lexicon/Ship_lexicon.py). ### KG Schema Extraction -- Extract edge type info from an ABox exposed via a SPARQL endpoint: [simplified_schema/extract_edgetypes_from_tbox.py](simplified_schema/extract_edgetypes_from_abox.py). -- Extract relation type info from OWL files: [simplified_schema/extract_relations_from_tbox.py](simplified_schema/extract_schema_from_tbox.py). -- Extract relation type info from an ABox exposed via a SPARQL endpoint: [simplified_schema/extract_relations_from_abox.py](simplified_schema/extract_schema_from_abox.py) +- Extract edge type info from an ABox exposed via a SPARQL endpoint: [simplified_schema/extract_edgetypes_from_tbox.py](simplified_schema/extract_edgetypes_from_tbox.py). +- Extract relation type info from OWL files: [simplified_schema/extract_relations_from_tbox.py](simplified_schema/extract_relations_from_tbox.py). +- Extract relation type info from an ABox exposed via a SPARQL endpoint: [simplified_schema/extract_relations_from_abox.py](simplified_schema/extract_relations_from_abox.py) ### CSV-to-JSON Conversion of Data Request Generation Examples diff --git a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/history.md b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/history.md index 1f99983200a..0f92f6eff1c 100644 --- a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/history.md +++ b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/history.md @@ -22,7 +22,7 @@ [5] L. Pascazio, D. Tran, S. D. Rihm, Jiaru Bai, J. Akroyd, S. Mosbach, and M. Kraft, "Question-answering system for combustion kinetics", Technical Report 315, c4e-Preprint Series, Cambridge, 2023 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/c4e-preprint-315.pdf)). -[6] D. Tran, S. D. Rihm, A. Kondniski, L. Pascazio, F. Saluz, S. Mosbach, J. Akroyd, and M. Kraft, "Natural Language Access Point to Digital Metal-Organic Polyhedra Chemistry in The World Avatar", Technical Report 327, c4e-Preprint Series, Cambridge, 2024 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/c4e_327_nmdt2_MarieICL__preprint_.pdf)). +[6] D. Tran, S. D. Rihm, A. Kondniski, L. Pascazio, F. Saluz, S. Mosbach, J. Akroyd, and M. Kraft, "Natural Language Access Point to Digital Metal-Organic Polyhedra Chemistry in The World Avatar", Technical Report 327, c4e-Preprint Series, Cambridge, 2024 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/c4e_327_nmdt2_MarieICL__preprint__dih1fxm.pdf)). ## Previous versions diff --git a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontocompchem.md b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontocompchem.md index f1552e96366..69b3854f12a 100644 --- a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontocompchem.md +++ b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontocompchem.md @@ -8,7 +8,7 @@ OntoCompChem is an ontology designed to represent the input and output processes ### Download -- [OntoCompChem.owl](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontocompchem/OntoCompChem.owl) +- [OntoCompChem.owl](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontocompchem/ontocompchem.owl) ### Access diff --git a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontokin.md b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontokin.md index 76469718bf4..fabc5efee5c 100644 --- a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontokin.md +++ b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontokin.md @@ -8,7 +8,7 @@ OntoKin is an ontology designed to represent reaction mechanisms. It details set ### Download -- [OntoKin.owl](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontokin/OntoKin.owl) +- [OntoKin.owl](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontokin/OntoKin.owl) ### Access @@ -26,7 +26,7 @@ OntoKin is an ontology designed to represent reaction mechanisms. It details set ### Preprints -[1] F. Farazi, J. Akroyd, S. Mosbach, P. Buerger, D. Nurkowski, and M. Kraft, "OntoKin: An ontology for chemical kinetic reaction mechanisms", Technical Report 218, c4e-Preprint Series, Cambridge, 2019 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/c4e-preprint-218.pdf)) +[1] F. Farazi, J. Akroyd, S. Mosbach, P. Buerger, D. Nurkowski, and M. Kraft, "OntoKin: An ontology for chemical kinetic reaction mechanisms", Technical Report 218, c4e-Preprint Series, Cambridge, 2019 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/c4e-Preprint-218.pdf)) [2] F. Farazi, N. Krdzavac, J. Akroyd, S. Mosbach, A. Menon, D. Nurkowski, and M. Kraft, "Linking reaction mechanisms and quantum chemistry: An ontological approach", Technical Report 236, c4e-Preprint Series, Cambridge, 2019 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/c4e-preprint-236.pdf)) diff --git a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontomops.md b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontomops.md index c2ba9897ae4..69b253e96c7 100644 --- a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontomops.md +++ b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontomops.md @@ -8,7 +8,7 @@ The OntoMOPs ontology is designed to provide and enrich semantic relationships b ### Download -- [OntoMOPs.owl](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontomops/OntoMOPs.owl) +- [ontomops-ogm.ttl](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontomops/ontomops-ogm.ttl) ### Publications @@ -18,4 +18,4 @@ The OntoMOPs ontology is designed to provide and enrich semantic relationships b [1] A. Kondinski, A. Menon, D. Nurkowski, F. Farazi, S. Mosbach, J. Akroyd, and M. Kraft, "Automated Rational Design of Metal-Organic Polyhedra", Technical Report 292, c4e-Preprint Series, Cambridge, 2022 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/OntoMOPs_preprint_4AP6w6C.pdf)). -[2] D. Tran, S. D. Rihm, A. Kondniski, L. Pascazio, F. Saluz, S. Mosbach, J. Akroyd, and M. Kraft, "Natural Language Access Point to Digital Metal-Organic Polyhedra Chemistry in The World Avatar", Technical Report 327, c4e-Preprint Series, Cambridge, 2024 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/c4e_327_nmdt2_MarieICL__preprint_.pdf)). \ No newline at end of file +[2] D. Tran, S. D. Rihm, A. Kondniski, L. Pascazio, F. Saluz, S. Mosbach, J. Akroyd, and M. Kraft, "Natural Language Access Point to Digital Metal-Organic Polyhedra Chemistry in The World Avatar", Technical Report 327, c4e-Preprint Series, Cambridge, 2024 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/c4e_327_nmdt2_MarieICL__preprint__dih1fxm.pdf)). \ No newline at end of file diff --git a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontospecies.md b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontospecies.md index 9bc517966c0..cc7576e5182 100644 --- a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontospecies.md +++ b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontospecies.md @@ -8,7 +8,7 @@ OntoSpecies serves as core ontology within TWA chemistry domain. It is an ontolo ### Download -- [OntoSpecies_v2.owl](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontospecies/OntoSpecies_v2.owl) +- [OntoSpecies_v2.owl](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontospecies/OntoSpecies_v2.owl) ### Access diff --git a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontozeolite.md b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontozeolite.md index 228ea8977b0..003e0806d55 100644 --- a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontozeolite.md +++ b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontozeolite.md @@ -12,8 +12,8 @@ The OntoCrystal ontology provides a semantic representation of crystallographic ### Download -- [OntoZeolite.owl](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontozeolite/ontozeolite.owl) -- [OntoCrystal.owl](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontozeolite/ontocrystal.owl) +- [OntoZeolite.owl](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontozeolite/ontozeolite.owl) +- [OntoCrystal.owl](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontozeolite/ontocrystal.owl) ### Access diff --git a/obsolete/BIMCesiumVisualisation/ifcto3Dtilesnext/README.md b/obsolete/BIMCesiumVisualisation/ifcto3Dtilesnext/README.md index a91491e8bfa..3fe0fbf7919 100644 --- a/obsolete/BIMCesiumVisualisation/ifcto3Dtilesnext/README.md +++ b/obsolete/BIMCesiumVisualisation/ifcto3Dtilesnext/README.md @@ -1,3 +1,4 @@ + # IFC2Tileset Agent ## Description @@ -128,4 +129,5 @@ As Git does not allow empty directories, `.gitignore` files have been added to t - Fridge - Ensure that the assets are classified as Furniture or Generic Models for the converter to recognise them - `Furniture` are exported as IfcFurnishingElement while `Generic Models` are exported as IfcBuildingElementProxy - - For new asset types, please include their name (or part of) into line 60 of `agent/ifc2gltf.py` \ No newline at end of file + - For new asset types, please include their name (or part of) into line 60 of `agent/ifc2gltf.py` + \ No newline at end of file diff --git a/obsolete/JPS_DES/README.md b/obsolete/JPS_DES/README.md index e041684115e..ca822490c11 100644 --- a/obsolete/JPS_DES/README.md +++ b/obsolete/JPS_DES/README.md @@ -1,3 +1,4 @@ + # README for Distributed Energy System ## Python Dependencies @@ -134,4 +135,4 @@ Then, it calls upon the BlockchainWrapper agent that communicates with the block ## TODO: - [ ] TesseractOCR isn't the best for weather forecast, and its error rate has been increasing. Find another way of getting real weather data, but the current method works for now. (i.e. incomplete data comprehension due to using OCR as a scraping method) - [ ] virtual environment for python - + diff --git a/obsolete/JPS_Version_0/BMS/BMSMap/LICENSE.md b/obsolete/JPS_Version_0/BMS/BMSMap/LICENSE.md index 722f2f10529..3e5f5b619c7 100644 --- a/obsolete/JPS_Version_0/BMS/BMSMap/LICENSE.md +++ b/obsolete/JPS_Version_0/BMS/BMSMap/LICENSE.md @@ -8,7 +8,7 @@ https://github.com/kekscom/Color.js Copyright (c) 2017, Jan Marsch Triangulate.js -https://github.com/OSMBuildings/Triangulation +https://github.com/OSMBuildings/Triangulate Copyright (c) 2016, Jan Marsch, OSM Buildings diff --git a/obsolete/JPS_Version_0/BMS/BMSMap/README.md b/obsolete/JPS_Version_0/BMS/BMSMap/README.md index fea47e880b6..2354e128910 100644 --- a/obsolete/JPS_Version_0/BMS/BMSMap/README.md +++ b/obsolete/JPS_Version_0/BMS/BMSMap/README.md @@ -13,7 +13,7 @@ OSM Buildings is a JavaScript library for visualizing OpenStreetMap building geo The library version in this repository is a WebGL only variant of OSM Buildings. At some point it will fully integrate the Classic 2.5D version. -For the latest information about the project [follow us on Twitter](https://twitter.com/osmbuildings), read [our blog](http://blog.osmbuildings.org), or just mail us at mail@osmbuildings.org. +For the latest information about the project [follow us on Twitter](https://twitter.com/osmbuildings), read [our blog](https://medium.com/@osmbuildings), or just mail us at mail@osmbuildings.org. ### Not sure which version to use? diff --git a/ontology-tools/CMCLOntoChemExp/README.md b/ontology-tools/CMCLOntoChemExp/README.md index 790d04466f0..1cc866a3ee7 100644 --- a/ontology-tools/CMCLOntoChemExp/README.md +++ b/ontology-tools/CMCLOntoChemExp/README.md @@ -45,7 +45,7 @@ Under construction... - For the purpose of linking to OntoSpecies - ontospecies.uniquespeciesiri.kb.server.url - the server address where triple-store for OntoSpecies ABox files located - ontospecies.uniquespeciesiri.kb.repository.id - the namespace of the triple-store that contains OntoSpecies ABox files - - ontospecies.uniquespeciesiri.kb.abox.iri - the base URL of the OntoSpecies ABox files, should be http://www.theworldavatar.com/kb/ontospecies/ by default + - ontospecies.uniquespeciesiri.kb.abox.iri - the base URL of the OntoSpecies ABox files, should be http://www.theworldavatar.com/kb/ontospecies/ by default - For provenance information of the experiment data - Under construction... will be updated in v1.3... - For controlling if generated ABox files are to be uploaded to triple-store automatically @@ -61,7 +61,7 @@ Under construction... ## Useful links -Automated Calibration of a Poly(oxymethylene) Dimethyl Ether Oxidation Mechanism Using the Knowledge Graph Technology [[paper](https://doi.org/10.1021/acs.jcim.0c01322)] [[preprint](https://como.ceb.cam.ac.uk/preprints/262/)] +Automated Calibration of a Poly(oxymethylene) Dimethyl Ether Oxidation Mechanism Using the Knowledge Graph Technology [[paper](https://pubs.acs.org/doi/10.1021/acs.jcim.0c01322)] [[preprint](https://como.ceb.cam.ac.uk/preprints/262/)] @@ -76,7 +76,7 @@ Automated Calibration of a Poly(oxymethylene) Dimethyl Ether Oxidation Mechanism - Made class `BibliographyLink` `EQUIVALENT-TO` class `OntoKin:Reference` - Added class `Velocity`, `SootYield`, `MassBurningRate`, `Mass`, `SpecificSurfaceArea`, `Material`, `Fraction`, `Voltage`, `Temperature`, `Length`, `Pressure`, `Density`, `Volume`, `FlowRate`, `Time`, `VolumetricFlowRate`, `ResidenceTime`, `LaminarBurningVelocity`, `Distance`, `InitialComposition`, `IgnitionDelay`, `Composition`, `Concentration`, `EquivalenceRatio`, `TemperatureInReferenceState`, `PressureInReferenceState`, `VolumetricFlowRateInReferenceState`, `ReactorLength`, `Diameter`, `JunctionArchitecture`, `DonorConstructionType`, `Acceptor`, `Donor`, `HomoEnergy`, `LumoEnergy`, `HomoLumoEnergyGap`, `OpticalEnergyGap`, `OpenCircuitPotential`, `ShortCircuitCurrentDensity`, `PowerConversionEfficiency`, `FillFactor` as subclass of `DimensionalQuantity` - Added class `http://xmlns.com/foaf/0.1/Agent`, `http://xmlns.com/foaf/0.1/Person`, `http://xmlns.com/foaf/0.1/Organization`, `http://purl.org/ontology/bibo/Journal` - - Inherited class `OntoKin:PublicationSpecification`, `OntoKin:JournalSpecification`, `OntoKin:ProceedingsSpecification`, `OntoKin:PreprintSpecification` from [`OntoKin`](http://theworldavatar.com/ontology/ontokin/OntoKin.owl) ontology + - Inherited class `OntoKin:PublicationSpecification`, `OntoKin:JournalSpecification`, `OntoKin:ProceedingsSpecification`, `OntoKin:PreprintSpecification` from [`OntoKin`](https://raw.githubusercontent.com/TheWorldAvatar/ontology/refs/heads/main/ontology/ontokin/OntoKin.owl) ontology - Relationships @@ -89,7 +89,7 @@ Automated Calibration of a Poly(oxymethylene) Dimethyl Ether Oxidation Mechanism - `` to make the direct connection between measured data point `X` with the physical `DimensionalQuantity` it represents - ``, `` - Redundant `hasDataPointX` and `hasUncertainty` related to `X1`-`X11` - - Inherited publication-related object property from [`OntoKin`](http://theworldavatar.com/ontology/ontokin/OntoKin.owl) ontology + - Inherited publication-related object property from [`OntoKin`](https://raw.githubusercontent.com/TheWorldAvatar/ontology/refs/heads/main/ontology/ontokin/OntoKin.owl) ontology - `` - `` - `` @@ -108,7 +108,7 @@ Automated Calibration of a Poly(oxymethylene) Dimethyl Ether Oxidation Mechanism - `` - `` - Redundant `hasValue` related to `X1`-`X11` - - Inherited publication-related data property from [`OntoKin`](http://theworldavatar.com/ontology/ontokin/OntoKin.owl) ontology + - Inherited publication-related data property from [`OntoKin`](https://raw.githubusercontent.com/TheWorldAvatar/ontology/refs/heads/main/ontology/ontokin/OntoKin.owl) ontology - `` - `` - `` diff --git a/thermo/README.md b/thermo/README.md index 857c26eb6c4..916d5e21d7e 100644 --- a/thermo/README.md +++ b/thermo/README.md @@ -32,19 +32,18 @@ Repository Name: thermochemistry * OpenBabel: This is a headache. The project JOpenBabel (v2.3.1 or v2.4.1) does not seem to exist in any maven repository any more. - The maven repository at the Chemistry Department (https://maven.ch.cam.ac.uk/m2repo) also does not exist any more. + The maven repository at the Chemistry Department also does not exist any more. Therefore, both have been commented out in all pom.xml files. For the CoMoThermodynamics project, this seems to be without consequence, as there does not appear to be a genuine dependency. The CoMoOpenBabel project still builds, but in order to run the tests, one needs the babel command-line executable as well as the DLL/shared object library (but no jar apparently). NB The name of the DLL/shared object library is hard-coded in CoMoOpenBabel/src/main/.../openbabel/util/OpenBabelUtil.java, currently as openbabel_java (works for Windows only). The CoMoTools project is the only project with a genuine OpenBabel dependency, through the source file CoMoTools/src/main/.../tools/structure/util/OpenBabelCompoundConverter.java, but it does not appear to be used by anything in the repository so it has been excluded from the build (by renaming the file). References: - http://openbabel.org/wiki/Main_Page + https://openbabel.org/docs/index.html https://sourceforge.net/projects/openbabel/ (NB This does install a .jar file.) sudo yum install {openbabel,openbabel-devel} (NB This installs babel command-line executable and shared library libopenbabel.so, but no jars.) - Note perhaps also: http://dev.cyfronet.pl/mvnrepo/openbabel/openbabel/ - http://openbabel.org/docs/current/UseTheLibrary/Java.html#macosx-and-linux - https://openbabel.org/docs/dev/Installation/install.html + Note perhaps also: https://openbabel.org/docs/UseTheLibrary/Java.html#macosx-and-linux + https://openbabel.org/docs/Installation/install.html * Jmol/JSmol: https://sourceforge.net/projects/jmol/files/ (https://sourceforge.net/projects/jsmol/ is deprecated) diff --git a/thermo/obda-thermochemistry/README.md b/thermo/obda-thermochemistry/README.md index fccc5a8019f..5c9c773340d 100644 --- a/thermo/obda-thermochemistry/README.md +++ b/thermo/obda-thermochemistry/README.md @@ -1,7 +1,7 @@ # Developing an Ontology Based Data Access (OBDA) Project ### Authors -* [Nenad Krdzavac](caresssd@hermes.cam.ac.uk) -* [Feroz Farazi](msff2@cam.ac.uk) +* [Nenad Krdzavac](mailto:caresssd@hermes.cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) OBDA is a means to access and query data stored in databases using SPARQL. This short document aims to describe the steps required to develop an OBDA project using Java and PostgreSQL relational database management system. @@ -84,7 +84,7 @@ For adding these data to the *tb_books* table, you can use the following INSERT You can either copy the *exampleBooks.owl* ontology from [here](https://www.dropbox.com/home/IRP3%20CAPRICORN%20shared%20folder/_JPS%20Development/data) to the *resources* (src/main/resources) folder in the Maven project you created or develop the same ontology by following the steps below and put this under the same folder. If you already have copied the ontology into the resourced folder, you can go to the next section. -* Create an ontology with the OntologyIRI *http://theworldavatar.com/ontology/obda/exampleBooks.owl*. Include the classes from the following hierarchy in this ontology. Classes which have the same indentation are siblings, and classes which have different indentations are connected with subclass of relations. Classes indented to the right are subclasses of the class which is indented to the left and above. For example, Author and Book are siblings, AudioBook and E-Book are siblings, AudioBook is a subclass of Book and E-Book is a subclass of Book. +* Create an ontology with the OntologyIRI *https://raw.githubusercontent.com/cambridge-cares/TheWorldAvatar/main/thermo/obda-thermochemistry/resources/books/exampleBooks.owl*. Include the classes from the following hierarchy in this ontology. Classes which have the same indentation are siblings, and classes which have different indentations are connected with subclass of relations. Classes indented to the right are subclasses of the class which is indented to the left and above. For example, Author and Book are siblings, AudioBook and E-Book are siblings, AudioBook is a subclass of Book and E-Book is a subclass of Book. Author EmergingWriter @@ -125,7 +125,7 @@ It is important to remember that the expressivity of the ontology used in OBDA s The following SPARQL query extracts the code and title of books. Save this query in a file called *book_code_title.rq* and put this file under the resources folder of the Maven project. - PREFIX books: + PREFIX books: SELECT DISTINCT ?book ?title WHERE { ?book a books:Book . @@ -138,7 +138,7 @@ The following SPARQL query extracts the code and title of books. Save this query Create a mapping file called *books_all.obda* in the resources folder of the Maven project and put the following three blocks of code into this file by maintaining the order of their appearance. This file establishes mapping(s) between a SPARQL query and the database via the ontology. [PrefixDeclaration] - : http://theworldavatar.com/ontology/obda/exampleBooks.owl# + : https://raw.githubusercontent.com/cambridge-cares/TheWorldAvatar/main/thermo/obda-thermochemistry/resources/books/exampleBooks.owl# owl: http://www.w3.org/2002/07/owl# rdf: http://www.w3.org/1999/02/22-rdf-syntax-ns# rdfs: http://www.w3.org/2000/01/rdf-schema# @@ -166,7 +166,7 @@ Do not forget to provide your user name and password for PostgreSQL in the Sourc The mapping file contains three blocks. -* It can be understood from the name *PrefixDeclaration* that the *first block* includes prefix declarations including *owl*, *rdf*, and *:*, which represents the prefix of the default namespace (the Ontology IRI followed by #) and in this particular example project it is *http://theworldavatar.com/ontology/obda/exampleBooks.owl#*. +* It can be understood from the name *PrefixDeclaration* that the *first block* includes prefix declarations including *owl*, *rdf*, and *:*, which represents the prefix of the default namespace (the Ontology IRI followed by #) and in this particular example project it is *https://raw.githubusercontent.com/cambridge-cares/TheWorldAvatar/main/thermo/obda-thermochemistry/resources/books/exampleBooks.owl#*. * The *second block* is called *SourceDeclaration*, which includes information about *sourceUri* that is the name of database. The feature *connectionUrl* represents the Java Database Connectivity (*JDBC*) for the target database. *username* and *password* are credentials for accessing the database. The *driverClass* is the driver class for the database. diff --git a/thermo/spin-thermochemistry/README.md b/thermo/spin-thermochemistry/README.md index d23e30a7e48..9e41b42348a 100644 --- a/thermo/spin-thermochemistry/README.md +++ b/thermo/spin-thermochemistry/README.md @@ -1,7 +1,7 @@ # Developing a SPARQL Inferencing Notation (SPIN) Project ### Authors -* [Nenad Krdzavac](caresssd@hermes.cam.ac.uk) -* [Feroz Farazi](msff2@cam.ac.uk) +* [Nenad Krdzavac](mailto:caresssd@hermes.cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) SPIN is an industry-standard to represent SPARQL rules and constraints on Semantic Web models [[1](https://spinrdf.org)]. This short document aims to describe the steps required to develop a SPIN project using Java. diff --git a/web/augmented-uk/docs/data.md b/web/augmented-uk/docs/data.md index 2bb233c6862..5bcbf4d5b39 100644 --- a/web/augmented-uk/docs/data.md +++ b/web/augmented-uk/docs/data.md @@ -38,14 +38,14 @@ Data directories: Once a year, the UK government publishes a Digest of UK Energy Statistics (DUKES); note this was formally published by the Department for Business, Energy and Industrial Strategy (BEIS) before it was dissolved, subsequent publications should be from the new Department for Energy Security and Net Zero (DESNZ). -Read the associated [DUKES Data](./docs/data-dukes.md) page for details on how the DUKES data was acquired and processed. +Read the associated [DUKES Data](./data-dukes.md) page for details on how the DUKES data was acquired and processed. #### Associated files -- [Uploader config](./augmented-uk/config/uploader/config/dukes_2023.json) -- [Ontop mapping](./augmented-uk/config/uploader/data/dukes_2023/dukes_2023.obda) +- [Uploader config](../inputs/uploader/config/dukes2023.json) +- [Ontop mapping](../inputs/uploader/data/dukes_2023/dukes_2023.obda) - Note that at the time of writing, this mapping utilises TBoxes that do not appear within the OntoEIP ontology. Nothing in the mapping contradicts the ontology, but the existing ontology does not contain enough concepts to cover all of the concepts provided by DUKES. -- [OntoEIP ontology](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoeip) +- [OntoEIP ontology](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoeip) - Note that when uploading the ontology files, you may need to rename any Turtle files with the `.ttl` extension. The stack data uploader assumes that `.owl` files are using the XML format, if an `.owl` file is using Turtle then this will cause errors during upload. Open the OntoEIP ontology link and find the resource_network folder. Put 'resource_network.ttl' into 'inputs/data/ontoeip' folder. Contact CMCL if you need the newest dukes data. @@ -65,10 +65,10 @@ No pre-processing is needed on this data set, we're using it as is. #### Associated files -- [Uploader config](./augmented-uk/config/uploader/config/population.json) -- [Geospatial SQL Query](./augmented-uk/config/uploader/config/sql/dukes_2023_pop.sql) +- [Uploader config](../inputs/uploader/config/population.json) +- [Geospatial SQL Query](../inputs/uploader/config/sql/dukes_2023_pop.sql) - An SQL query to determine the number of people within a 1KM radius of each power plant. -- [Raster style](./augmented-uk/config/uploader/config/sld/uk-population-style.sld) +- [Raster style](../inputs/uploader/config/sld/uk-population-style.sld) - SLD file to style the population raster data in GeoServer. ### Digest of UK Energy Statistics (DUKES) @@ -87,5 +87,5 @@ Shapefiles are obtained from [national forestry inventory 2020](https://data-for ### Streetlamps, traffic signals and England highways -Links to sources (mostly local council data portals) are in each relevant [data folder](inputs/config/uploader/data). +Links to sources (mostly local council data portals) are in each relevant [data folder](../inputs/uploader/data). diff --git a/web/augmented-uk/inputs/uploader/data/street_light/king's_lynn/README.md b/web/augmented-uk/inputs/uploader/data/street_light/king's_lynn/README.md index cc922797bd9..01081e2447e 100644 --- a/web/augmented-uk/inputs/uploader/data/street_light/king's_lynn/README.md +++ b/web/augmented-uk/inputs/uploader/data/street_light/king's_lynn/README.md @@ -1,3 +1,4 @@ + Add Norfolk street light data here from https://www.whatdotheyknow.com/request/street_light_locations_and_refer. Convert the Excel Spreadsheet to CSV form. Merge the two CSVs into one. diff --git a/web/augmented-uk/inputs/uploader/data/ukpn/vector/132kv-overhead-lines/README.md b/web/augmented-uk/inputs/uploader/data/ukpn/vector/132kv-overhead-lines/README.md index da2c8a11918..eae78c31937 100644 --- a/web/augmented-uk/inputs/uploader/data/ukpn/vector/132kv-overhead-lines/README.md +++ b/web/augmented-uk/inputs/uploader/data/ukpn/vector/132kv-overhead-lines/README.md @@ -1 +1 @@ -Add 132kV overhead line ShapeFile or geojsonfiles here from https://ukpowernetworks.opendatasoft.com/explore/dataset/132kv-overhead-lines. \ No newline at end of file +Add 132kV overhead line ShapeFile or geojsonfiles here from https://ukpowernetworks.opendatasoft.com/explore/dataset/ukpn-132kv-overhead-lines. \ No newline at end of file diff --git a/web/augmented-uk/inputs/uploader/data/ukpn/vector/132kv-poles-towers/README.md b/web/augmented-uk/inputs/uploader/data/ukpn/vector/132kv-poles-towers/README.md index a81a1edfdb9..e515712173d 100644 --- a/web/augmented-uk/inputs/uploader/data/ukpn/vector/132kv-poles-towers/README.md +++ b/web/augmented-uk/inputs/uploader/data/ukpn/vector/132kv-poles-towers/README.md @@ -1 +1 @@ -Add 132kV poles and towers ShapeFile or geojson files here from https://ukpowernetworks.opendatasoft.com/explore/dataset/132kv-poles-towers. \ No newline at end of file +Add 132kV poles and towers ShapeFile or geojson files here from https://ukpowernetworks.opendatasoft.com/explore/dataset/ukpn-132kv-poles-towers/. \ No newline at end of file diff --git a/web/augmented-uk/inputs/uploader/data/ukpn/vector/33kv-overhead-lines/README.md b/web/augmented-uk/inputs/uploader/data/ukpn/vector/33kv-overhead-lines/README.md index ce5663ac712..93284ff7e7b 100644 --- a/web/augmented-uk/inputs/uploader/data/ukpn/vector/33kv-overhead-lines/README.md +++ b/web/augmented-uk/inputs/uploader/data/ukpn/vector/33kv-overhead-lines/README.md @@ -1 +1 @@ -Add 33kV overhead line ShapeFile or geojson files here from https://ukpowernetworks.opendatasoft.com/explore/dataset/33kv-overhead-lines. \ No newline at end of file +Add 33kV overhead line ShapeFile or geojson files here from https://ukpowernetworks.opendatasoft.com/explore/dataset/ukpn-33kv-overhead-lines. \ No newline at end of file diff --git a/web/augmented-uk/inputs/uploader/data/ukpn/vector/ukpn-66kv-overhead-lines-shapefile/README.md b/web/augmented-uk/inputs/uploader/data/ukpn/vector/ukpn-66kv-overhead-lines-shapefile/README.md index 5df36b07839..6af685d8668 100644 --- a/web/augmented-uk/inputs/uploader/data/ukpn/vector/ukpn-66kv-overhead-lines-shapefile/README.md +++ b/web/augmented-uk/inputs/uploader/data/ukpn/vector/ukpn-66kv-overhead-lines-shapefile/README.md @@ -1 +1 @@ -Add 33kV poles and towers ShapeFile or geojson files here from https://ukpowernetworks.opendatasoft.com/explore/dataset/ukpn-66kv-overhead-lines-shapefile. \ No newline at end of file +Add 33kV poles and towers ShapeFile or geojson files here from https://ukpowernetworks.opendatasoft.com/explore/dataset/ukpn-66kv-overhead-lines-shapefile/. \ No newline at end of file diff --git a/web/docs/README.md b/web/docs/README.md index b354fcc4efa..1f74cde4337 100644 --- a/web/docs/README.md +++ b/web/docs/README.md @@ -8,6 +8,7 @@ Note that these documentation pages are a constant work in progress, and will be The TWA project has a publicly facing website located at [https://theworldavatar.io](https://theworldavatar.io). This is site was created by, and it hosted at, CMCL in Cambridge. It runs from Docker containers using standard [Wordpress](https://wordpress.com/) installations, behind a single reverse proxy created using [NGINX](https://www.nginx.com/). Two versions of the site are hosted at once; production (available by the aforementioned URL), and development (accessed using the `dev.` subdomain). + For more details on how the TWA website was created and hosted, see the [repository here](https://github.com/cmcl-io/theworldavatar.io). Note that this is a private repository created by CMCL, you'll need their permission to access it. ### Making changes diff --git a/web/pylon-visualisation/README.md b/web/pylon-visualisation/README.md index 9edab475454..ecbd501f881 100644 --- a/web/pylon-visualisation/README.md +++ b/web/pylon-visualisation/README.md @@ -3,4 +3,4 @@ Visualisation of pylon data from National Grid and UK Power Networks compared with vegetation data. ## Requirements -Please see [Building The Image](..\example-mapbox-vis\README.md#building-the-image) for the requirements. [Forestry](../../../Deploy/stacks/dynamic/example_datasets/inputs/data/forestry/), [cropmap](../../../Deploy/stacks/dynamic/example_datasets/inputs/data/cropmap/), [UK Power Networks' pylons](../../../Deploy/stacks/dynamic/example_datasets/inputs/data/ukpn_pylons/), and [National Grid pylons](../../../Deploy/stacks/dynamic/example_datasets/inputs/data/ng_pylons/) data are required to be uploaded using the configuration file [pylons-and-veg.json](../../../Deploy/stacks/dynamic/example_datasets/inputs/config/pylons-and-veg.json). Instructions on loading data into the stack can be found [here](../../../Deploy/stacks/dynamic/stack-data-uploader/README.md). \ No newline at end of file +Please see [Spinning up the example stack](../twa-vis-framework/example/README.md#spinning-up-the-example-stack) for the requirements. [Forestry](../../Deploy/stacks/dynamic/examples/datasets/inputs/data/forestry/), [cropmap](../../Deploy/stacks/dynamic/examples/datasets/inputs/data/cropmap/), [UK Power Networks' pylons](../../Deploy/stacks/dynamic/examples/datasets/inputs/data/ukpn_pylons/), and [National Grid pylons](../../Deploy/stacks/dynamic/examples/datasets/inputs/data/ng_pylons/) data are required to be uploaded using the configuration file [pylons-and-veg.json](../../Deploy/stacks/dynamic/examples/datasets/inputs/config/pylons-and-veg.json). Instructions on loading data into the stack can be found [here](../../Deploy/stacks/dynamic/stack-data-uploader/README.md). \ No newline at end of file diff --git a/web/twa-vis-framework/docs/cesium.md b/web/twa-vis-framework/docs/cesium.md index d3b96a34ef9..d31a503a66b 100644 --- a/web/twa-vis-framework/docs/cesium.md +++ b/web/twa-vis-framework/docs/cesium.md @@ -127,7 +127,7 @@ The default location of the Cesium map can be set using the below format in the An example CesiumJS visualisation has been committed to repository to act both as an example, and a template for users putting together new visualisations. -You can find the visualisation, along with documentation of how it was put together, in the [example-cesium-vis](../example-cesium-vis/) directory. +You can find the visualisation, along with documentation of how it was put together, in the [example](../example/) directory.

diff --git a/web/twa-vis-framework/docs/mapbox.md b/web/twa-vis-framework/docs/mapbox.md index 5f6b78614b4..58329aa1ed7 100644 --- a/web/twa-vis-framework/docs/mapbox.md +++ b/web/twa-vis-framework/docs/mapbox.md @@ -141,7 +141,7 @@ The default location of the Mapbox map can be set using the below format in the An example Mapbox visualisation has been committed to repository to act both as an example, and a template for users putting together new visualisations. -You can find the visualisation, along with documentation of how it was put together, in the [example-mapbox-vis](../example-mapbox-vis/) directory. +You can find the visualisation, along with documentation of how it was put together, in the [example](../example/) directory.

diff --git a/web/twa-vis-framework/docs/overview.md b/web/twa-vis-framework/docs/overview.md index b6bf8b81c7d..fc691a7a63f 100644 --- a/web/twa-vis-framework/docs/overview.md +++ b/web/twa-vis-framework/docs/overview.md @@ -12,7 +12,7 @@ Once displayed, a number of standard interaction handlers are also added. These ## Mapping providers -At the time of writing the available mapping providers are [Mapbox](https://www.mapbox.com/) and [Cesium](https://cesium.com/platform/Cesium/). The core differences between providers is as follows: +At the time of writing the available mapping providers are [Mapbox](https://www.mapbox.com/) and [Cesium](https://cesium.com/platform/). The core differences between providers is as follows: * Mapbox can only handle 2D data (with the option to extrude 2D polygons into basic 3D polyhedrons) from local files or from [WMS endpoints](https://en.wikipedia.org/wiki/Web_Map_Service). Unlike Cesium (see below), Mapbox can display 2D vector data (including use of SVGs for icons, under certain conditions) if the data is hosted using the [Mapbox Vector Tiles](https://docs.mapbox.com/data/tilesets/guides/vector-tiles-introduction/) format. It is however quite customisable and has relatively small performance overhead. Unless you're plotting 3D building data, it's advised to use this mapping provider. @@ -164,13 +164,13 @@ It's worth noting that these credential files should **not** be committed; to th Display of meta and timeseries data is also a feature offered by the TWA-VF (regardless of the chosen mapping provider). However, the processing of getting this system setup can be quite lengthy. -To query for dynamic data, each selectable feature of your data also needs to contain `iri` and `endpoint` properties. Once selected, these are sent to a remote agent ([FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-feature-info-agent/Agents/FeatureInfoAgent)) running in a stack. Data is queried from the knowledge graph and/or relational database, then returned for display in the visualisation's side panel. +To query for dynamic data, each selectable feature of your data also needs to contain `iri` and `endpoint` properties. Once selected, these are sent to a remote agent ([FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent)) running in a stack. Data is queried from the knowledge graph and/or relational database, then returned for display in the visualisation's side panel. A breakdown of the requirements to use this system are below, for more information check out the FeatureInfoAgent's documentation. * A stack instance needs to be running (at some location, can be remote), containing: * A Blazegraph instance holding metadata on the visualised features. - * An instance of the [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-feature-info-agent/Agents/FeatureInfoAgent) with a mapping of the possible feature classes to pre-written SPARQL queries. These queries must return data in a specific tabular format. + * An instance of the [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent) with a mapping of the possible feature classes to pre-written SPARQL queries. These queries must return data in a specific tabular format. * If applicable, a PostgreSQL instance containing time series data. * Geospatial data needs to contain `iri`, and `endpoint` fields for each feature (regardless of how the data is served, i.e. locally or via WMS). * The `iri` field needs to contain the full IRI of the feature as represented in the knowledge graph. diff --git a/web/twa-vis-framework/docs/troubleshooting.md b/web/twa-vis-framework/docs/troubleshooting.md index 0a01d2d4c95..adbbef5e7d0 100644 --- a/web/twa-vis-framework/docs/troubleshooting.md +++ b/web/twa-vis-framework/docs/troubleshooting.md @@ -13,7 +13,7 @@ This section is relevant to all uses of the TWA-VF, regardless of the selected m | Issue/Question | Solution/Answer | | ----------- | ----------- | | Do my data files have to be hosted online? | Both mapping providers require a valid URL to load data files, this does mean that they have to be accessible online. However, data files can be included within the visualisation container (which uses Apache to host a web server) so that they can be accessed via a URL relative to the visualisation's hosted directory (i.e. "/data/tileset.json"). | -| Visualisation not updating after changes | Try clearing ([or disabling](https://www.webinstinct.com/faq/how-to-disable-browser-cache#:~:text=When%20you're%20in%20Google,close%20out%20of%20Developer%20Tools.)) your browser cache before reloading the page. | +| Visualisation not updating after changes | Try clearing ([or disabling](https://stackoverflow.com/a/7000899)) your browser cache before reloading the page. | | Visualisation not updating after changes | If running the visualisation within a Docker container, you may need to rebuild the Docker image and run a new container to see recent file changes. | | No data is shown | If no data is shown _and_ no layer tree is built then this suggests that one (or more) of the user defined JSON files is invalid. Please use an external validator tool (or website) to ensure that the JSON is valid. | | Hovering over a feature does nothing | The framework supports mouse hovering effects if the input data contains certain metadata fields. To show a small description box the geospatial data needs to contain a `name` and `description` field. | diff --git a/web/twa-vis-framework/docs/tutorial-mapbox.md b/web/twa-vis-framework/docs/tutorial-mapbox.md index eb669826a45..2432a8302c8 100644 --- a/web/twa-vis-framework/docs/tutorial-mapbox.md +++ b/web/twa-vis-framework/docs/tutorial-mapbox.md @@ -60,7 +60,7 @@ These raw CSV files also contain some strange characters that aren't supported i ## Writing an ontology -As an example, a very simple sample NHS ontology has been put together to describe the concepts within this tutorial's data set. This ontology has been created as a CSV file, and uploaded via the use of the [TBox Generator](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/converter), see the Stack Data Uploader's documentation for more details on how to upload it. +As an example, a very simple sample NHS ontology has been put together to describe the concepts within this tutorial's data set. This ontology has been created as a CSV file, and uploaded via the use of the [TBox Generator](https://github.com/TheWorldAvatar/BaseLib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/converter), see the Stack Data Uploader's documentation for more details on how to upload it. A copy of the simple ontology used can be seen below as well as in the TWA repository [here](./resources/nhs.csv). @@ -281,7 +281,7 @@ If you haven't already, it's worth reading through the [Overview](./overview.md) The first step here is to spin up an empty visualisation. When creating a new visualisation, it is recommended that the committed example visualisation is used. -To that end, copy the [example Mapbox visualisation](../example-mapbox-vis/) (the one containing the `run.sh` and `docker-compose.yml` files) to a new directory (of your choice) on your local machine. Using the README file within, you should be able to then spin up a docker container hosting the visualisation. +To that end, copy the [example Mapbox visualisation](../example/) (the one containing the `run.sh` and `docker-compose.yml` files) to a new directory (of your choice) on your local machine. Using the README file within, you should be able to then spin up a docker container hosting the visualisation. If you then access the visualisation (usually at `localhost`), you should see the example visualisation along with its sample data in Cambridge, India, and Singapore. diff --git a/web/twa-vis-framework/docs/tutorials.md b/web/twa-vis-framework/docs/tutorials.md index f6a730f5bf9..53b1cbec25c 100644 --- a/web/twa-vis-framework/docs/tutorials.md +++ b/web/twa-vis-framework/docs/tutorials.md @@ -13,7 +13,7 @@ A curated list of recommended external, non TWA-VF tutorials is shown below.
**Mapbox:** -* [Add custom markers to a map with Mapbox GL JS](https://docs.mapbox.com/help/tutorials/custom-markers-gl-js-video/) +* [Add custom markers to a map with Mapbox GL JS](https://docs.mapbox.com/help/tutorials/custom-markers-gl-js/) * [Get started with Mapbox GL JS expressions](https://docs.mapbox.com/help/tutorials/mapbox-gl-js-expressions/) * [Create interactive hover effects with Mapbox GL JS](https://docs.mapbox.com/help/tutorials/create-interactive-hover-effects-with-mapbox-gl-js/) diff --git a/web/twa-vis-framework/library/README.md b/web/twa-vis-framework/library/README.md index 00a49233b93..c22fa14c712 100644 --- a/web/twa-vis-framework/library/README.md +++ b/web/twa-vis-framework/library/README.md @@ -39,7 +39,7 @@ To function correctly, visualisations using this framework also needs to include * [Turf](https://turfjs.org/) * [Hummingbird Treeview](https://github.com/hummingbird-dev/hummingbird-treeview) -An example of the required import statements should be available in the example [Mapbox](../example-mapbox-vis/webspace/index.html) and [Cesium](../example-cesium-vis/webspace/index.html) visualisations. +An example of the required import statements should be available in the example [Mapbox](../example/mapbox.md) and [Cesium](../example/cesium.md) visualisations. ## Architecture @@ -93,11 +93,11 @@ The following automated GitHub actions have been setup for the TWA-VF (all defin ## Planned changes -An overview of bug reports, feature requests, and open PRs can be see using the [TWA Visualisation Framework](https://github.com/orgs/cambridge-cares/projects/1) project. Any new reports or requests should be linked to this project to ensure that it contains a complete overview of all related information. +An overview of bug reports, feature requests, and open PRs can be see using the [TWA Visualisation Framework](https://github.com/TheWorldAvatar/viz) project. Any new reports or requests should be linked to this project to ensure that it contains a complete overview of all related information. ### Issues -Bugs should be reported as GitHub issues using the `TWA-VF:` prefix along with a short name for the issue. A detailed description of the issue along with reproduction steps, and if possible, [an image of the issue](https://gist.github.com/NawalJAhmed/2168f7659c08b6a033e7f6daf8db69a6). +Bugs should be reported as GitHub issues using the `TWA-VF:` prefix along with a short name for the issue. A detailed description of the issue along with reproduction steps, and if possible, [an image of the issue](https://gist.github.com/namirjahmed/2168f7659c08b6a033e7f6daf8db69a6). Issue reporting a bug should also use the provided `bug` tag and link to the TWA Visualisation Framework project. From 9dc54e0d1c289dadd595b76330d3357aae258822 Mon Sep 17 00:00:00 2001 From: Myles Date: Sat, 8 Mar 2025 16:12:17 -0500 Subject: [PATCH 03/30] fix broken links --- .mlc_config.json | 3 +++ Agents/AccessAgent/README.md | 2 +- Agents/AndroidStatusAgent/README.md | 2 +- Agents/BMSQueryAgent/README.md | 2 +- Agents/BMSUpdateAgent/README.md | 2 +- Agents/BuildingIdentificationAgent/README.md | 6 +++--- Agents/DashboardAgent/README.md | 4 ++-- Agents/DataBridgeAgent/README.md | 4 ++-- Agents/DerivationAgentPythonExample/README.md | 4 ++-- Agents/EmailAgent/README.md | 2 +- Agents/FeatureInfoAgent/README.md | 4 ++-- Agents/FeatureInfoAgent/sample/README.md | 2 +- Agents/GeoSegmentAgent/README.md | 2 +- Agents/HeatEmissionAgent/README.md | 2 +- Agents/HistoricalNTUEnergyAgent/README.md | 2 +- Agents/LSOAInputAgent/README.md | 2 +- Agents/NTUDataAgent/README.md | 2 +- Agents/NTUEnergyClusterAgent/README.md | 4 ++-- Agents/NTUP2PEnergyAgent/README.md | 2 +- Agents/NTUPVLibAgent/README.md | 2 +- Agents/OpenMeteoAgent/README.md | 2 +- Agents/PIPSRequestAgent/README.md | 2 +- Agents/PIPSTimeSeriesAgent/README.md | 2 +- Agents/PVLibAgent/README.md | 2 +- Agents/SolarkatasterAgent/README.md | 2 +- Agents/TrajectoryQueryAgent/README.md | 2 +- Agents/UserAgent/README.md | 2 +- Agents/VisBackendAgent/README.md | 4 ++-- Apps/BMSQueryApp/README.md | 6 +++--- Apps/Modules/camera/README.md | 2 +- Apps/Modules/login/README.md | 4 ++-- Apps/PirmasensToiletApp/README.md | 4 ++-- Apps/PirmasensToiletApp/inputs/data/README.md | 4 ++-- Deploy/stacks/Singapore/README.md | 2 +- Deploy/stacks/cares-lab/README.md | 2 +- Deploy/stacks/db/fileserver/README.md | 2 +- Deploy/stacks/timeline/readme.md | 14 +++++++------- .../site/user/pages/02.explore/01.marie/marie.md | 2 +- .../pages/02.explore/02.digital-twin/default.md | 2 +- .../pages/02.explore/02.digital-twin/template.md | 2 +- .../web/website/site/user/plugins/error/README.md | 4 ++-- .../website/site/user/plugins/form/CHANGELOG.md | 1 + .../web/website/site/user/plugins/form/README.md | 2 +- .../plugins/form/vendor/google/recaptcha/README.md | 5 ++--- .../site/user/plugins/page-inject/README.md | 2 +- .../website/site/user/plugins/problems/README.md | 2 +- .../web/website/site/user/themes/quark/README.md | 2 +- .../vendor/gregwar/image/Gregwar/Image/README.md | 3 ++- .../website/site/vendor/league/climate/README.md | 1 - .../website/site/vendor/miljar/php-exif/README.md | 9 +-------- .../website/site/vendor/monolog/monolog/README.md | 11 ++--------- .../web/website/site/vendor/nyholm/psr7/README.md | 3 +-- .../site/vendor/php-http/message-factory/README.md | 7 +------ .../site/vendor/rockettheme/toolbox/README.md | 6 +++--- JPS_ARBITRAGE/README.md | 2 +- web/augmented-uk/README.md | 4 ++-- .../stack-manager-inputs/README.md | 2 +- web/twa-vis-framework/docs/overview.md | 4 ++-- web/twa-vis-framework/docs/tutorial-mapbox.md | 4 ++-- web/twa-vis-framework/example/mapbox.md | 2 +- 60 files changed, 89 insertions(+), 106 deletions(-) diff --git a/.mlc_config.json b/.mlc_config.json index 48a354a7b80..3e463229331 100644 --- a/.mlc_config.json +++ b/.mlc_config.json @@ -17,6 +17,9 @@ }, { "pattern": "https://maven.pkg.github.com/cambridge-cares/TheWorldAvatar/?" + }, + { + "pattern": "https://twitter.com/?" } ], "aliveStatusCodes": [200, 403, 0] diff --git a/Agents/AccessAgent/README.md b/Agents/AccessAgent/README.md index 1f364d5f4c9..d4093b19d4c 100644 --- a/Agents/AccessAgent/README.md +++ b/Agents/AccessAgent/README.md @@ -54,7 +54,7 @@ This will test the agent in a production environment including connections to th ## 2. Deployment Instructions -The Access Agent can be deployed in a standalone Docker container or as part of The World Avatar [stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +The Access Agent can be deployed in a standalone Docker container or as part of The World Avatar [stack](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). ### 2.1 Standalone Container diff --git a/Agents/AndroidStatusAgent/README.md b/Agents/AndroidStatusAgent/README.md index 56e62d052f8..d353d172806 100644 --- a/Agents/AndroidStatusAgent/README.md +++ b/Agents/AndroidStatusAgent/README.md @@ -9,7 +9,7 @@ Because DTVF can only send request instead of taking in any, logging the status AndroidStatusAgent in visualisation use case # 1. Setup -This agent is designed to run in stack, which is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +This agent is designed to run in stack, which is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). A successful setup will result in 9 containers: - 8 [default containers](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) - AndroidStatusAgent diff --git a/Agents/BMSQueryAgent/README.md b/Agents/BMSQueryAgent/README.md index cd0eda626a0..8499cc405e8 100644 --- a/Agents/BMSQueryAgent/README.md +++ b/Agents/BMSQueryAgent/README.md @@ -9,7 +9,7 @@ To achieve a balance between response speed and body size, the agent breaks the - Once the room is determined, users can send `retrieve/equipment?RoomIRI=` to get all the equipment in the selected room. # 1. Setup -This agent is designed to run in stack, which is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +This agent is designed to run in stack, which is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). A successful setup will result in 9 containers (optional 10): - Default containers - Stack Manager (exits when spins up all other containers) diff --git a/Agents/BMSUpdateAgent/README.md b/Agents/BMSUpdateAgent/README.md index 853cedaa217..f4a5816c442 100644 --- a/Agents/BMSUpdateAgent/README.md +++ b/Agents/BMSUpdateAgent/README.md @@ -9,7 +9,7 @@ BMSUpdateAgent is an agent designed for multiple functions: 4) It is able to retrieve the present value for a Bacnet object and update the knowledge graph accordingly. More information is available at the [Update Present Value Route](#34-update-present-value-route). # 1. Setup -This agent is designed to run in stack, which is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +This agent is designed to run in stack, which is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). ## 1.1. Build Image The BMSUpdateAgent is set up to use the Maven repository. You'll need to provide your credentials in single-word text files located like this: diff --git a/Agents/BuildingIdentificationAgent/README.md b/Agents/BuildingIdentificationAgent/README.md index 4c6742ed8da..7c26c393b9d 100644 --- a/Agents/BuildingIdentificationAgent/README.md +++ b/Agents/BuildingIdentificationAgent/README.md @@ -22,18 +22,18 @@ which must have a 'scope' that [allows you to publish and install packages](http #### Stack containers -This agent requires the following tools, which **MUST** run on the same stack. The details for setting them up are explained at [stack manager page](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +This agent requires the following tools, which **MUST** run on the same stack. The details for setting them up are explained at [stack manager page](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). (1) PostgreSQL database -The agent is designed to use the stack PostgreSQL. It requires the buildings data to be stored in a schema called 'citydb' in the 'postgres' database. The buildings data can be instantiated in the required format by uploading the raw data using the [stack data uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader). The user-specified table must be in the same 'postgres' database that contains the buildings data. +The agent is designed to use the stack PostgreSQL. It requires the buildings data to be stored in a schema called 'citydb' in the 'postgres' database. The buildings data can be instantiated in the required format by uploading the raw data using the [stack data uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader). The user-specified table must be in the same 'postgres' database that contains the buildings data. ### 1.2 Docker deployment - Build this agent's image by executing `docker compose build` within this folder. Do not start the container. - Copy the `buildingidentificationagent.json` file from the `stack-manager-input-config` folder into the `TheWorldAvatar/Deploy/stacks/dynamic/stack-manager/inputs/config/services` folder of the stack manager. -- Start the stack manager as usual following [these instructions](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +- Start the stack manager as usual following [these instructions](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). ## 2. Agent Routes diff --git a/Agents/DashboardAgent/README.md b/Agents/DashboardAgent/README.md index 7afe80436a2..ac6f8701a21 100644 --- a/Agents/DashboardAgent/README.md +++ b/Agents/DashboardAgent/README.md @@ -21,7 +21,7 @@ repo_username.txt should contain your Github username. repo_password.txt should which must have a 'scope' that [allows you to publish and install packages](https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry#authenticating-to-github-packages). ##### Stack containers -This agent requires the following tools, which **MUST** run on the same stack. Please read more from the [stack manager page](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) for setting these containers up. +This agent requires the following tools, which **MUST** run on the same stack. Please read more from the [stack manager page](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) for setting these containers up. (1) [Grafana](https://grafana.com/docs/grafana/latest/) dashboard - Requires the deployment of the built-in optional `grafana` service on the stack to configure and set up dashboards @@ -58,7 +58,7 @@ docker compose -f "./docker/docker-compose.test.yml" up -d --build - Build this agent's image by issuing `docker compose build` within this folder. Do not start the container. - Copy the `dashboard-agent.json` file from the `stack-manager-input-config` folder into the `inputs/config/services` folder of the stack manager, adjusting the absolute path of the bind mount as required. Please review the [different routes](#2-agent-route) to understand the purpose of these bind mounts. See [sample bind mounts](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#bind-mounts) for the configuration syntax. -- Start the stack manager as usual following [these instructions](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +- Start the stack manager as usual following [these instructions](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). ### 2. Agent Route The agent currently offers two API routes: diff --git a/Agents/DataBridgeAgent/README.md b/Agents/DataBridgeAgent/README.md index fd0be1592f4..cc548fd4a4e 100644 --- a/Agents/DataBridgeAgent/README.md +++ b/Agents/DataBridgeAgent/README.md @@ -44,12 +44,12 @@ docker-compose up -d ``` docker build -t data-bridge-agent:versionNo . ``` - 2) Add the `/docker/data-bridge-agent.json` to the [`stack-manager/inputs/config/services`](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services) directory + 2) Add the `/docker/data-bridge-agent.json` to the [`stack-manager/inputs/config/services`](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services) directory - Please ensure the version numbers are targeted at the right image. If you are building it, please update the version number accordingly. 3) Modify the absolute path of the agent's `config` folder to your absolute file path - For Windows users using WSL on Docker, the file path should start with `/mnt/c/`, which is equivalent to `C://` 4) Include this agent service into the stack configuration file at `stack-manager/inputs/config/.json` - - Read more in the [Stack Configuration](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) section + - Read more in the [Stack Configuration](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) section 5) Start the stack as per normal If the agent is successfully started, the endpoint at `http://localhost:3838/data-bridge-agent/status` should return the following message. diff --git a/Agents/DerivationAgentPythonExample/README.md b/Agents/DerivationAgentPythonExample/README.md index 13f1547ee97..30a8fc2cc3c 100644 --- a/Agents/DerivationAgentPythonExample/README.md +++ b/Agents/DerivationAgentPythonExample/README.md @@ -140,7 +140,7 @@ For developers new to `Run and Debug` configurations, please refer to these offi - [Use Docker Compose: Debug](https://code.visualstudio.com/docs/containers/docker-compose#_debug) - [Debugpy](https://github.com/microsoft/debugpy) -For developers interested to see more example of possible configurations, including those relevant to the usage of [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager), please refer to [`PropertyValueEstimationAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/PropertyValueEstimationAgent). +For developers interested to see more example of possible configurations, including those relevant to the usage of [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager), please refer to [`PropertyValueEstimationAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/PropertyValueEstimationAgent). ### Develop tests for new agents @@ -238,7 +238,7 @@ docker run --env-file --name derivation_agent_python_example ghc ## Adapt agent to work with stack > **NOTE** This agent example will be updated to incorporate Stack in the next iteration. -This agent example has been adapted to work with a Docker stack spun up by the [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) for a real use-case. For more information, please refer to [`PropertyValueEstimationAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/PropertyValueEstimationAgent). +This agent example has been adapted to work with a Docker stack spun up by the [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) for a real use-case. For more information, please refer to [`PropertyValueEstimationAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/PropertyValueEstimationAgent).   diff --git a/Agents/EmailAgent/README.md b/Agents/EmailAgent/README.md index 05b6e4d544a..cb225258fe6 100644 --- a/Agents/EmailAgent/README.md +++ b/Agents/EmailAgent/README.md @@ -13,7 +13,7 @@ Please note that it is not the intention for this EmailAgent to be used to send ## Building the Image -The agent directory contains the required files to build a Docker Image for the EmailAgent service; the `Dockerfile` file contains the instructions to build an Image; before making any changes to it, please consult the application's developer or the system administrators at CMCL. Files have also been added to ensure the agent is compatible for deplyment in a [stack environment](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md). +The agent directory contains the required files to build a Docker Image for the EmailAgent service; the `Dockerfile` file contains the instructions to build an Image; before making any changes to it, please consult the application's developer or the system administrators at CMCL. Files have also been added to ensure the agent is compatible for deplyment in a [stack environment](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md). Please note the caveats below before attempting to build the service using Docker: diff --git a/Agents/FeatureInfoAgent/README.md b/Agents/FeatureInfoAgent/README.md index 4897f80d628..8189374fa48 100644 --- a/Agents/FeatureInfoAgent/README.md +++ b/Agents/FeatureInfoAgent/README.md @@ -18,7 +18,7 @@ These SPARQL queries are written on a class-by-class (TBox) basis; this should m At the time of writing, the FIA has a few restrictions that all deploying developers should be aware of. These are as follows: -- The FIA can only be run within a [TWA Stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +- The FIA can only be run within a [TWA Stack](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). - The FIA can only report meta and time data that is contained within the same stack as the agent itself. - The FIA can only return time series data on series that uses the [Instant](https://docs.oracle.com/javase/8/docs/api/java/time/Instant.html) class. @@ -74,7 +74,7 @@ For the FIA to function, a number of configuration steps need to take place befo **Note:** As of version `3.0.0` of the FeatureInfoAgent, the configuration format has changed to support new options. The new format is documented below, but the older format is also supported. To support the newer features, it is recommended that developers write new configurations using the new format, and existing configurations are manually updated wherever possible. -Follow the below configuration steps within the `fia-queries` subdirectory of the TWA stack manager's data directory. Volumes that are used by containers running with the TWA Stack are populated by named subdirectories within the stack manager's [data directory](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/data). For more details, read the [TWA Stack Manager documentation](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +Follow the below configuration steps within the `fia-queries` subdirectory of the TWA stack manager's data directory. Volumes that are used by containers running with the TWA Stack are populated by named subdirectories within the stack manager's [data directory](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/data). For more details, read the [TWA Stack Manager documentation](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). The configuration file should be a JSON file named `fia-config.json`, contained within it should be: diff --git a/Agents/FeatureInfoAgent/sample/README.md b/Agents/FeatureInfoAgent/sample/README.md index 62d2194617c..e2ecd7b98a2 100644 --- a/Agents/FeatureInfoAgent/sample/README.md +++ b/Agents/FeatureInfoAgent/sample/README.md @@ -1,6 +1,6 @@ # Feature Info Agent - Sample -This directory contains a small set of sample data, configurations, and scripts that can be used to spin up a [TWA Stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) instance with a copy of the Feature Info Agent, and enough data to test it in a typical runtime environment. +This directory contains a small set of sample data, configurations, and scripts that can be used to spin up a [TWA Stack](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) instance with a copy of the Feature Info Agent, and enough data to test it in a typical runtime environment. Sample data used in this example is generated and no regard has been given for correctness. It is not suggested that people use this example as a starting point for their own projects. diff --git a/Agents/GeoSegmentAgent/README.md b/Agents/GeoSegmentAgent/README.md index cc5c4d360fa..f2dcd829039 100644 --- a/Agents/GeoSegmentAgent/README.md +++ b/Agents/GeoSegmentAgent/README.md @@ -24,7 +24,7 @@ docker build -t geosegment_agent:1.0.0 . This will use the Dockerfile to build an image named `geosegment_agent` tagged with `1.0.0`. ### Note: The GeoSegment Agent can only run within a Docker stack. -The stack is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager), which is beyond the scope of this README. +The stack is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager), which is beyond the scope of this README. A successful setup will result in 9 containers (or more): - Default containers diff --git a/Agents/HeatEmissionAgent/README.md b/Agents/HeatEmissionAgent/README.md index d6ab88595f5..2db84136326 100644 --- a/Agents/HeatEmissionAgent/README.md +++ b/Agents/HeatEmissionAgent/README.md @@ -44,7 +44,7 @@ The data needed to estimate the heat emissions of various types of industrial fa ##### Stack containers -If the agent is being run as part of a stack, the user can opt to use a namespace located in the stack blazegraph. The procedure for spinning up the stack is described at [stack manager page](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +If the agent is being run as part of a stack, the user can opt to use a namespace located in the stack blazegraph. The procedure for spinning up the stack is described at [stack manager page](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). #### 1.2 Docker Deployment diff --git a/Agents/HistoricalNTUEnergyAgent/README.md b/Agents/HistoricalNTUEnergyAgent/README.md index 0bf02cdb29e..005a5350ad4 100644 --- a/Agents/HistoricalNTUEnergyAgent/README.md +++ b/Agents/HistoricalNTUEnergyAgent/README.md @@ -146,7 +146,7 @@ The Dockerfile will automatically copy all properties files and mapping folder a ### [Option 2] Run in a Docker Stack **Note: Please follow instructions in Option 1 to build the agent first before proceeding with Option 2. ** -Running this agent in a docker stack is a more advanced option as it facilitate interactions between other agents for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +Running this agent in a docker stack is a more advanced option as it facilitate interactions between other agents for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). A successful setup will result in 9 containers (optional 10): - Default containers - Stack Manager (exits when spins up all other containers) diff --git a/Agents/LSOAInputAgent/README.md b/Agents/LSOAInputAgent/README.md index fb24027571a..67c7ff48c8a 100644 --- a/Agents/LSOAInputAgent/README.md +++ b/Agents/LSOAInputAgent/README.md @@ -4,7 +4,7 @@ The `LSOAInput agent` is dedicated to process data around the UK Lower-layer Sup This agent extract data and turn it into Knowledge Graph. The data, such as electricity consumption, gas consumption, fuel poverty, climate (temperature) and geometric shape are used to perform the use case of analysing the deployment of heat pump. The data instatiated in the knowledge graph follows [Ontoclimate](http://www.theworldavatar.com/ontology/ontogasgrid/ontoclimate.owl), [Ontofuelpoverty](http://www.theworldavatar.com/ontology/ontofuelpoverty/ontofuelpoverty.owl) and [Ontogasgrid](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/GasGrid) ontologies in the [TheWorldAvatar](https://github.com/cambridge-cares/TheWorldAvatar). -The agent is implemented as Docker container to be deployed to a Docker stack spun up by the [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +The agent is implemented as Docker container to be deployed to a Docker stack spun up by the [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). Please note that the use of derive informtaion framework for heat pump analysis only need to run part of this agent in advance (to upload temperature data), other functions serve as an supplementary functions to uploaded triples to the blazegraph. diff --git a/Agents/NTUDataAgent/README.md b/Agents/NTUDataAgent/README.md index 80783021731..0531485a33c 100644 --- a/Agents/NTUDataAgent/README.md +++ b/Agents/NTUDataAgent/README.md @@ -70,7 +70,7 @@ More information about adding custom containers to the stack can be found [here] ### 4. Spin up a Docker Stack **Note: The docker container must run within the same stack as the HistoricalNTUEnergyAgent to get access and query the NTU Power Network Knowledge Graph for calculation.** -Running this agent in a docker stack can facilitate interactions between other agents and endpoints (Postgres, Blazegraph, etc,.) for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +Running this agent in a docker stack can facilitate interactions between other agents and endpoints (Postgres, Blazegraph, etc,.) for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). ## Run Once the stack is up and running, the agent can be activated by sending a Curl request as shown below with the paramters stack=true and the desired date: diff --git a/Agents/NTUEnergyClusterAgent/README.md b/Agents/NTUEnergyClusterAgent/README.md index 64d00e1a5fe..1a0841c25e6 100644 --- a/Agents/NTUEnergyClusterAgent/README.md +++ b/Agents/NTUEnergyClusterAgent/README.md @@ -11,7 +11,7 @@ For the agent to process opf results a power system must be instantiated by the #### NTU Power System Knowledge Graph - For details to instantiate the NTU Power System Knowledge Graph, pleaes refer to the [HistoricalNTUEnergyAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HistoricalNTUEnergyAgent). -- Both NTUEnergyClusterAgent and HistoricalNTUEnergyAgent should run in the same stack in which they interact with the same Blazegraph and Postgres endpoints. For details to spin up a stack, please refer to the [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +- Both NTUEnergyClusterAgent and HistoricalNTUEnergyAgent should run in the same stack in which they interact with the same Blazegraph and Postgres endpoints. For details to spin up a stack, please refer to the [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). # 2. Build & Run This part of the README explain the instruction to build the agent. @@ -65,7 +65,7 @@ More information about adding custom containers to the stack can be found [here] ### [Step 3] Spin up a Docker Stack **Note: The docker container must run within the same stack as the [HistoricalNTUEnergyAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HistoricalNTUEnergyAgent) to get access and query the NTU Power Network Knowledge Graph for calculation.** -Running this agent in a docker stack can facilitate interactions between other agents and endpoints (Postgres, Blazegraph, etc,.) for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +Running this agent in a docker stack can facilitate interactions between other agents and endpoints (Postgres, Blazegraph, etc,.) for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). A successful setup will result in 10 containers (optional 11): - Default containers - Stack Manager (exits when spins up all other containers) diff --git a/Agents/NTUP2PEnergyAgent/README.md b/Agents/NTUP2PEnergyAgent/README.md index a50db8fcdd4..dee68ad86d3 100644 --- a/Agents/NTUP2PEnergyAgent/README.md +++ b/Agents/NTUP2PEnergyAgent/README.md @@ -59,7 +59,7 @@ More information about adding custom containers to the stack can be found [here] ### [Step 3] Spin up a Docker Stack **Note: The docker container must run within the same stack as the [HistoricalNTUEnergyAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/1496-dev-instantiate-historic-ntuenergyconsumptiondata-2/Agents/HistoricalNTUEnergyAgent) to get access and query the NTU Power Network Knowledge Graph for calculation.** -Running this agent in a docker stack can facilitate interactions between other agents and endpoints (Postgres, Blazegraph, etc,.) for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +Running this agent in a docker stack can facilitate interactions between other agents and endpoints (Postgres, Blazegraph, etc,.) for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). A successful setup will result in 10 containers (optional 11): - Default containers - Stack Manager (exits when spins up all other containers) diff --git a/Agents/NTUPVLibAgent/README.md b/Agents/NTUPVLibAgent/README.md index 678a0a99f3e..a6309a4d6d6 100644 --- a/Agents/NTUPVLibAgent/README.md +++ b/Agents/NTUPVLibAgent/README.md @@ -105,7 +105,7 @@ NTUPVLib is intended for deployment in a stack (option 2). For others, refer to #### [Option 2] As a stacked docker container -Running this agent in a docker stack is a more advanced option as it facilitate interactions between other agents for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +Running this agent in a docker stack is a more advanced option as it facilitate interactions between other agents for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). A successful setup will result in 9 containers (optional 10): - Default containers diff --git a/Agents/OpenMeteoAgent/README.md b/Agents/OpenMeteoAgent/README.md index e1392bb49bf..fe394b83d6b 100644 --- a/Agents/OpenMeteoAgent/README.md +++ b/Agents/OpenMeteoAgent/README.md @@ -31,7 +31,7 @@ The docker image uses TheWorldAvatar maven repository (https://maven.pkg.github. ``` ### 2.2. Stack Set Up -The agent is designed to run in the stack. To start the stack, spin up the [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager). +The agent is designed to run in the stack. To start the stack, spin up the [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). ### 2.3. Blazegraph Set Up The agent is designed to use the stack Blazegraph. Please ensure that the Blazegraph namespace corresponding to ```route.label``` in ```./openmeteo-agent/src/main/resources/config.properties```, is set up in the stack Blazegraph with geospatial capabilities. diff --git a/Agents/PIPSRequestAgent/README.md b/Agents/PIPSRequestAgent/README.md index 7e6fc60ffee..127a9c3b088 100644 --- a/Agents/PIPSRequestAgent/README.md +++ b/Agents/PIPSRequestAgent/README.md @@ -6,7 +6,7 @@ The agent also allows for the option of including a client certificate (P12 form # Prerequisite 1. It is necessary to have Keycloak set up properly. Refer to the official [Keycloak guides](https://www.keycloak.org/guides#getting-started) for how to get started. -2. The TWA (TheWorldAvatar) stack can also be used to set up the Keycloak service along with a variety of other services. Refer to [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) for more information. +2. The TWA (TheWorldAvatar) stack can also be used to set up the Keycloak service along with a variety of other services. Refer to [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) for more information. 3) It is necessary to have the PIPSTimeSeriesAgent set up properly. Refer to [PIPSTimeSeriesAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/PIPSTimeSeriesAgent). diff --git a/Agents/PIPSTimeSeriesAgent/README.md b/Agents/PIPSTimeSeriesAgent/README.md index 4d91caf4c2d..2edf660c835 100644 --- a/Agents/PIPSTimeSeriesAgent/README.md +++ b/Agents/PIPSTimeSeriesAgent/README.md @@ -4,7 +4,7 @@ This agent is designed to receive an access token, carry out verification with K # Prerequisite 1. It is necessary to have Keycloak set up properly. Refer to the official [Keycloak guides](https://www.keycloak.org/guides#getting-started) for how to get started. -2. The TWA (TheWorldAvatar) stack can also be used to set up the Keycloak service along with a variety of other services. Refer to [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) for more information. +2. The TWA (TheWorldAvatar) stack can also be used to set up the Keycloak service along with a variety of other services. Refer to [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) for more information. 3. It is necessary to have a PostgreSQL database set up properly. The tables and columns should have a structure similar to how the [OPCUAAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/OPCUAAgent) construct its tables and columns. This agent is originally designed to work with the [OPCUAAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/OPCUAAgent) but it is possible to reuse the agent for other databases as long as they have a similar structure. diff --git a/Agents/PVLibAgent/README.md b/Agents/PVLibAgent/README.md index 495ddad3543..eb92d501f33 100644 --- a/Agents/PVLibAgent/README.md +++ b/Agents/PVLibAgent/README.md @@ -146,7 +146,7 @@ If the agent runs successfully, you should see a returned Object that is similar #### [Option 2] As a stacked docker container -Running this agent in a docker stack is a more advanced option as it facilitate interactions between other agents for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +Running this agent in a docker stack is a more advanced option as it facilitate interactions between other agents for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). A successful setup will result in 9 containers (optional 10): - Default containers diff --git a/Agents/SolarkatasterAgent/README.md b/Agents/SolarkatasterAgent/README.md index efd54ed5816..8d9cdb7c275 100644 --- a/Agents/SolarkatasterAgent/README.md +++ b/Agents/SolarkatasterAgent/README.md @@ -36,7 +36,7 @@ curl -X POST --header "Content-Type: application/json" -d "{'table':'stadt_pirma ## Build Instructions ### Stack set up -The agent has been implemented to work with stack, which requires the stack to be [set up](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) and for the Solarkataster data to be [uploaded to stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader), before building and running the agent. +The agent has been implemented to work with stack, which requires the stack to be [set up](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) and for the Solarkataster data to be [uploaded to stack](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader), before building and running the agent. Before building, change the placeholder `` in `./solarkataster_agent/src/main/resources/config.properties` and `./docker-compose.yml` to the name of your stack. diff --git a/Agents/TrajectoryQueryAgent/README.md b/Agents/TrajectoryQueryAgent/README.md index 5dca7087a91..3e76153f1b0 100644 --- a/Agents/TrajectoryQueryAgent/README.md +++ b/Agents/TrajectoryQueryAgent/README.md @@ -7,7 +7,7 @@ TrajectoryQueryAgent is an agent that handles trajectory related tasks. It curre ## Requirements -Launch [stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) with the default containers and the following additional containers: +Launch [stack](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) with the default containers and the following additional containers: - information from [SensorLoggerMobileAppAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/SensorLoggerMobileAppAgent) to be instantiated diff --git a/Agents/UserAgent/README.md b/Agents/UserAgent/README.md index 5b9238d6094..32a99da55d7 100644 --- a/Agents/UserAgent/README.md +++ b/Agents/UserAgent/README.md @@ -72,7 +72,7 @@ Check this Keycloak [guide](https://www.keycloak.org/docs/latest/authorization_s ### 4.3 Starting with the stack-manager The agent has been implemented to work in the stack, which requires the UserAgent Docker container to be deployed in the stack. To do so, place [user-agent.json](stack-manager-config/inputs/config/services/user-agent.json) in the [stack-manager config directory]. -Then, run `./stack.sh start ` in the [stack-manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) main folder. This will spin up the agent in the stack. +Then, run `./stack.sh start ` in the [stack-manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) main folder. This will spin up the agent in the stack. ## 5. Build and debug ## 5.1 Credentials diff --git a/Agents/VisBackendAgent/README.md b/Agents/VisBackendAgent/README.md index ef633910556..b142355daff 100644 --- a/Agents/VisBackendAgent/README.md +++ b/Agents/VisBackendAgent/README.md @@ -40,7 +40,7 @@ The Vis-Backend Agent is a supporting service to The World Avatar's [viz](https: ## 1. Agent Deployment -The agent is designed for execution through a Docker container within [The World Avatar's stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). It cannot run as a standalone container, and other deployment workflows are beyond the scope of this document. +The agent is designed for execution through a Docker container within [The World Avatar's stack](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). It cannot run as a standalone container, and other deployment workflows are beyond the scope of this document. ### 1.1 Preparation @@ -101,7 +101,7 @@ docker compose -f "./docker/docker-compose.test.yml" up -d --build 2. Update the environment variables in `./docker/vis-backend-agent.json` if required. 3. Copy the `./docker/vis-backend-agent.json` file into the `inputs/config/services` directory of the stack manager. 4. Ensure the bind mount path is correctly set in the stack configuration for `vis-resources`. -5. Start the stack manager as usual following [these instructions](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +5. Start the stack manager as usual following [these instructions](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). **DEBUGGING ENVIRONMENT** Follow the same steps as the **PRODUCTION ENVIRONMENT**, but use the `vis-backend-agent-debug.json` file instead in step 3. diff --git a/Apps/BMSQueryApp/README.md b/Apps/BMSQueryApp/README.md index c8807454eba..797904ec3da 100644 --- a/Apps/BMSQueryApp/README.md +++ b/Apps/BMSQueryApp/README.md @@ -7,12 +7,12 @@ This is an Android app project to monitor and control lab devices. The minimum a Device Control. ## Development Setup -All agents and database should be deployed in a server with local [stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager), and the app acts as a standalone frontend. This section is aimed for local development setup. +All agents and database should be deployed in a server with local [stack](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager), and the app acts as a standalone frontend. This section is aimed for local development setup. Agents required: - [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent) - [BMSQueryAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BMSQueryAgent) -- [BMSUpdateAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BMSWriteAgent) +- [BMSUpdateAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BMSUpdateAgent) ### Feature Info Agent Configuration Before launching Feature Info Agent in the stack, config files need to be copied into the feature info agent folder. Copy all `.sparql` files in `feature-info-agent-queries/` to `TheWorldAvatar/Agent/FeatureInfoAgent/queries`. @@ -27,5 +27,5 @@ Update the endpoints and IRIs in `Apps/BMSQueryApp/app/src/main/res/values/confi ### Authentication This app uses [AppAuth](https://github.com/openid/AppAuth-Android) to communicate with OpenID Connect providers. -1. Register app client in Keycloak as OpenID Connect type client. Check [here](https://www.keycloak.org/docs/23.0.4/server_admin/#_oidc_clients) for complete setup guide. The app isn't a resource client, so no authorization is needed to setup. +1. Register app client in Keycloak as OpenID Connect type client. Check [here](https://www.keycloak.org/docs/latest/server_admin/index.html#_oidc_clients) for complete setup guide. The app isn't a resource client, so no authorization is needed to setup. 2. Replace `` with the stack address in Apps/BMSQueryApp/app/src/main/res/raw/auth_config.json \ No newline at end of file diff --git a/Apps/Modules/camera/README.md b/Apps/Modules/camera/README.md index 584041097a8..83ad3d779e1 100644 --- a/Apps/Modules/camera/README.md +++ b/Apps/Modules/camera/README.md @@ -22,5 +22,5 @@ Since multiple feature modules can extend the base camera fragment for different ## Extend Camera Fragment The base camera fragment need to be extended for the capture result to be used for different use cases. Navigating directly to this fragment and click the capture button won't have any effect. Here are some helpful links: -- [QR code scanning example](https://github.com/cambridge-cares/TheWorldAvatar/tree/1584-asset-management-app/Apps/AssetManagementApp/feature/qrscan) +- [QR code scanning example](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Apps/AssetManagementApp/feature/qrscan) - [Google ML Kit](https://developers.google.com/ml-kit): provide various vision APIs such as barcode scanning, face detection and text recognition \ No newline at end of file diff --git a/Apps/Modules/login/README.md b/Apps/Modules/login/README.md index f5b5e727f22..7ff4a829cf2 100644 --- a/Apps/Modules/login/README.md +++ b/Apps/Modules/login/README.md @@ -10,14 +10,14 @@ This module contains two parts: core and feature. It integrates [AppAuth for And > Import :core:login 2. Open the `Modules` tab and click the `+` to add new modules under `core` 3. In the pop up `Create New Module` window, click `Import...` -4. Select [/core/login](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Apps/Modules/core/login) as source location +4. Select [/core/login](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Apps/Modules/login/core/login) as source location 5. Set the `Module name` to `:core:login` 1. You may see warning about missing `:core:utils` module. Ignore this warning while importing and fix the error in code after import. > Import :feature:login 6. Click the `+` to add new modules under `feature` 7. In the pop up `Create New Module` window, click `Import...` -8. Select [/feature/login](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Apps/Modules/feature/login) as source location +8. Select [/feature/login](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Apps/Modules/login/feature/login) as source location 9. Set the `Module name` to `:feature:login` 10. Click `Finish` and wait for the module to be imported diff --git a/Apps/PirmasensToiletApp/README.md b/Apps/PirmasensToiletApp/README.md index eeafbffc87d..29d8bb568dd 100644 --- a/Apps/PirmasensToiletApp/README.md +++ b/Apps/PirmasensToiletApp/README.md @@ -48,11 +48,11 @@ Any data access required should be contacted through someone working on the repo ### 2.1 Backend Services -The app will require a running [The World Avatar stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) as the backend. +The app will require a running [The World Avatar stack](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) as the backend. ### 2.1.1 Stack Data Uploader -Data specified in [this section](#12-data-sources) should be uploaded using the [Stack Data Uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader). All the relevant configurations and settings are included in the [`./inputs/data/`](./inputs/data/) directory. +Data specified in [this section](#12-data-sources) should be uploaded using the [Stack Data Uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader). All the relevant configurations and settings are included in the [`./inputs/data/`](./inputs/data/) directory. Briefly, the app will only require the `Wasgau` and `Toilet` datasets, as well as the associated ontologies and OBDA mappings for the base functionality of this application. Extended configuration are for routing purposes. diff --git a/Apps/PirmasensToiletApp/inputs/data/README.md b/Apps/PirmasensToiletApp/inputs/data/README.md index 3fbcd73d6b7..e0df644d0e0 100644 --- a/Apps/PirmasensToiletApp/inputs/data/README.md +++ b/Apps/PirmasensToiletApp/inputs/data/README.md @@ -1,6 +1,6 @@ # Stack Data Uploader contents -This directory contains the different data contents for the [`stack-data-uploader`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader). +This directory contains the different data contents for the [`stack-data-uploader`](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader). ## Table of Contents @@ -32,7 +32,7 @@ Additional datasets required for routing purposes: ## 2. Configuration File -As per the [documentation](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader), the relevant configuration file is as follows. Users are also able to upload icons using the `staticGeoServerData` key. +As per the [documentation](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader), the relevant configuration file is as follows. Users are also able to upload icons using the `staticGeoServerData` key. ```json { diff --git a/Deploy/stacks/Singapore/README.md b/Deploy/stacks/Singapore/README.md index 951aa68f0aa..76f145348e4 100644 --- a/Deploy/stacks/Singapore/README.md +++ b/Deploy/stacks/Singapore/README.md @@ -3,7 +3,7 @@ This repository contains the instructions, directory structure, and configuratio ## 1. Preparations ### Knowledge of the stack tools adopted in The World Avatar -Please read through the [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) and [Stack Data Uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader) to set up your stack accordingly. +Please read through the [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) and [Stack Data Uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader) to set up your stack accordingly. At the moment, a working understanding of these two tools will suffice for the deployment of the Singapore stack. diff --git a/Deploy/stacks/cares-lab/README.md b/Deploy/stacks/cares-lab/README.md index 4742fee9b2c..02ac9a6703c 100644 --- a/Deploy/stacks/cares-lab/README.md +++ b/Deploy/stacks/cares-lab/README.md @@ -3,7 +3,7 @@ This repository contains the instructions, directory structure, and configuratio ## 1. Preparations ### Knowledge of the stack tools adopted in The World Avatar -Please read through the [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) to set up your stack accordingly. +Please read through the [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) to set up your stack accordingly. ## 2. Deployment Workflow ### Stack manager diff --git a/Deploy/stacks/db/fileserver/README.md b/Deploy/stacks/db/fileserver/README.md index 082010a9705..d92e7bf8441 100644 --- a/Deploy/stacks/db/fileserver/README.md +++ b/Deploy/stacks/db/fileserver/README.md @@ -35,7 +35,7 @@ You should now be able to access the fileserver with a username `fs_user` and th ## Integration with the Stack Manager -The fileserver can also be integrated with the stack manager. Please refer to the general stack-manager documentation for more information, especially the "Specifying a custom container" section. ([Link to stack manager README](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md#specifying-custom-containers)) +The fileserver can also be integrated with the stack manager. Please refer to the general stack-manager documentation for more information, especially the "Specifying a custom container" section. ([Link to stack manager README](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md#specifying-custom-containers)) In short, you need to add a service configuration file and include the file server in the stack configuration file. The following example can be used as a starting point for the service configuration file. Make sure to name the secrets file with the stored credentials "file_server_password", otherwise a default password will be used. After spinning up the stack, you should be able to access the file server with the username `fs_user` and the credentials you set. diff --git a/Deploy/stacks/timeline/readme.md b/Deploy/stacks/timeline/readme.md index 5b43e4cbdc1..d77803d3728 100644 --- a/Deploy/stacks/timeline/readme.md +++ b/Deploy/stacks/timeline/readme.md @@ -4,7 +4,7 @@ Stack name should be `timeline-test`. -Prepare these secret files in [](./stack-manager/test/inputs/secrets) +Prepare these secret files in [](./test/stack-manager/inputs/secrets) - geoserver_password - postgis_password @@ -18,9 +18,9 @@ Set value of KC_DB_PASSWORD to match value in postgis_password. Set value of KC_HOSTNAME_ADMIN and KC_HOSTNAME to external URL of KeyCloak depending on where it is deployed, e.g. `http://localhost:58085/keycloak` Preconfigured realm and client settings can be found in dropbox `IRP3 CAPRICORN shared folder\_TWA_Shared_Data\Timeline`. Copy the files from the folder to the stack [data folder](test/stack-manager/inputs/data). They are required for user agent since [version 2.0.0](https://github.com/orgs/cambridge-cares/packages/container/package/user-agent). - + > If the stack is deployed in remote server, please update the `auth-server-url` in [user-agent.json](test/stack-manager/inputs/data/user_agent_keycloak.json) accordingly. `rootUrl` for user-agent client in [timeline-realm.json](test/stack-manager/inputs/data/keycloak/data/timeline-realm.json) should be updated as well. - + You may need to change the permissions of the keycloak startup script, i.e. ```bash @@ -49,7 +49,7 @@ then restart stack manager again: Stack name should be `timeline`. -Prepare these secret files in [](./stack-manager/prod/inputs/secrets) +Prepare these secret files in [](./test/stack-manager/inputs/secrets) - geoserver_password - postgis_password @@ -120,9 +120,9 @@ IMPORT FOREIGN SCHEMA public 3. Replace in vis-files/keycloak.json with the URL of the KeyCloak server, needs to be an address that can be accessed from client and the server. 1. The configuration assumes a realm called `timeline` exists and a client called `desktop-vis` is set up correctly with the correct redirect urls. 4. Upload [./shacl/timeline.ttl](./shacl/timeline.ttl) to the kb namespace on Blazegraph -5. Download contents of [https://github.com/TheWorldAvatar/viz/tree/main/code/public/images/defaults] into [./test/vis/vis-files/public/images/defaults](./test/vis/vis-files/public/images/defaults). +5. Download contents of https://github.com/TheWorldAvatar/viz/tree/main/code/public/images/defaults into [./test/vis/vis-files/public/images/defaults](./test/vis/vis-files/public/images/defaults). 6. Run `docker compose up -d` in [./test/vis/](./test/vis/). 7. Visualisation will be accessible at port 3000, e.g. http://[IP_ADDRESS]:3000 -[keycloak-test.json]: ./stack-manager/test/inputs/config/services/keycloak-test.json -[keycloak-prod.json]: ./stack-manager/test/inputs/config/services/keycloak-prod.json \ No newline at end of file +[keycloak-test.json]: ./test/stack-manager/inputs/config/services/keycloak-test.json +[keycloak-prod.json]: ./prod/stack-manager/inputs/config/services/keycloak-prod.json \ No newline at end of file diff --git a/Deploy/stacks/web/website/site/user/pages/02.explore/01.marie/marie.md b/Deploy/stacks/web/website/site/user/pages/02.explore/01.marie/marie.md index 4703f1ad3d1..976301f1974 100644 --- a/Deploy/stacks/web/website/site/user/pages/02.explore/01.marie/marie.md +++ b/Deploy/stacks/web/website/site/user/pages/02.explore/01.marie/marie.md @@ -134,4 +134,4 @@ slug: marie
-[plugin:content-inject](/modular/partners) +[plugin:content-inject](../../modular/partners) diff --git a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/default.md b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/default.md index b73f95f966f..61bfe8d2c0f 100644 --- a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/default.md +++ b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/default.md @@ -77,4 +77,4 @@ slug: digital-twin

-[plugin:content-inject](/modular/partners) \ No newline at end of file +[plugin:content-inject](../../modular/partners) \ No newline at end of file diff --git a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/template.md b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/template.md index a8b89ee695c..c26ee16a704 100644 --- a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/template.md +++ b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/template.md @@ -34,4 +34,4 @@ slug: slug
-[plugin:content-inject](/modular/partners) +[plugin:content-inject](../../modular/partners) diff --git a/Deploy/stacks/web/website/site/user/plugins/error/README.md b/Deploy/stacks/web/website/site/user/plugins/error/README.md index ef24726bdd1..9604805fbe7 100644 --- a/Deploy/stacks/web/website/site/user/plugins/error/README.md +++ b/Deploy/stacks/web/website/site/user/plugins/error/README.md @@ -12,7 +12,7 @@ Installing the Error plugin can be done in one of two ways. Our GPM (Grav Packag ## GPM Installation (Preferred) -The simplest way to install this plugin is via the [Grav Package Manager (GPM)](http://learn.getgrav.org/advanced/grav-gpm) through your system's Terminal (also called the command line). From the root of your Grav install type: +The simplest way to install this plugin is via the [Grav Package Manager (GPM)](https://learn.getgrav.org/16/cli-console/grav-cli-gpm) through your system's Terminal (also called the command line). From the root of your Grav install type: bin/gpm install error @@ -75,7 +75,7 @@ As development for the Error plugin continues, new versions may become available ## GPM Update (Preferred) -The simplest way to update this plugin is via the [Grav Package Manager (GPM)](http://learn.getgrav.org/advanced/grav-gpm). You can do this with this by navigating to the root directory of your Grav install using your system's Terminal (also called command line) and typing the following: +The simplest way to update this plugin is via the [Grav Package Manager (GPM)](https://learn.getgrav.org/16/cli-console/grav-cli-gpm). You can do this with this by navigating to the root directory of your Grav install using your system's Terminal (also called command line) and typing the following: bin/gpm update error diff --git a/Deploy/stacks/web/website/site/user/plugins/form/CHANGELOG.md b/Deploy/stacks/web/website/site/user/plugins/form/CHANGELOG.md index 16878729a02..d001fcce591 100644 --- a/Deploy/stacks/web/website/site/user/plugins/form/CHANGELOG.md +++ b/Deploy/stacks/web/website/site/user/plugins/form/CHANGELOG.md @@ -1,3 +1,4 @@ + # v4.3.1 ## 01/31/2021 diff --git a/Deploy/stacks/web/website/site/user/plugins/form/README.md b/Deploy/stacks/web/website/site/user/plugins/form/README.md index d171e9e0a1c..2c22edc2651 100644 --- a/Deploy/stacks/web/website/site/user/plugins/form/README.md +++ b/Deploy/stacks/web/website/site/user/plugins/form/README.md @@ -21,7 +21,7 @@ enabled: true # How to use the Form Plugin The Learn site has two pages describing how to use the Form Plugin: -- [Forms](http://learn.getgrav.org/advanced/forms) +- [Forms](https://learn.getgrav.org/17/forms) - [Add a contact form](http://learn.getgrav.org/forms/forms/example-form) # Using email diff --git a/Deploy/stacks/web/website/site/user/plugins/form/vendor/google/recaptcha/README.md b/Deploy/stacks/web/website/site/user/plugins/form/vendor/google/recaptcha/README.md index 92e8deae7bd..7119f9eab73 100644 --- a/Deploy/stacks/web/website/site/user/plugins/form/vendor/google/recaptcha/README.md +++ b/Deploy/stacks/web/website/site/user/plugins/form/vendor/google/recaptcha/README.md @@ -1,5 +1,5 @@ # reCAPTCHA PHP client library - + [![Build Status](https://travis-ci.org/google/recaptcha.svg)](https://travis-ci.org/google/recaptcha) [![Coverage Status](https://coveralls.io/repos/github/google/recaptcha/badge.svg)](https://coveralls.io/github/google/recaptcha) [![Latest Stable Version](https://poser.pugx.org/google/recaptcha/v/stable.svg)](https://packagist.org/packages/google/recaptcha) @@ -55,8 +55,7 @@ own autoloader or require the needed files directly in your code. ## Usage First obtain the appropriate keys for the type of reCAPTCHA you wish to -integrate for v2 at https://www.google.com/recaptcha/admin or v3 at -https://g.co/recaptcha/v3. +integrate for v2 or v3 at https://www.google.com/recaptcha/admin. Then follow the [integration guide on the developer site](https://developers.google.com/recaptcha/intro) to add the reCAPTCHA diff --git a/Deploy/stacks/web/website/site/user/plugins/page-inject/README.md b/Deploy/stacks/web/website/site/user/plugins/page-inject/README.md index abe980499a6..229816db634 100644 --- a/Deploy/stacks/web/website/site/user/plugins/page-inject/README.md +++ b/Deploy/stacks/web/website/site/user/plugins/page-inject/README.md @@ -8,7 +8,7 @@ Installing the Page Inject plugin can be done in one of two ways. Our GPM (Grav ## GPM Installation (Preferred) -The simplest way to install this plugin is via the [Grav Package Manager (GPM)](http://learn.getgrav.org/advanced/grav-gpm) through your system's Terminal (also called the command line). From the root of your Grav install type: +The simplest way to install this plugin is via the [Grav Package Manager (GPM)](https://learn.getgrav.org/16/cli-console/grav-cli-gpm) through your system's Terminal (also called the command line). From the root of your Grav install type: bin/gpm install page-inject diff --git a/Deploy/stacks/web/website/site/user/plugins/problems/README.md b/Deploy/stacks/web/website/site/user/plugins/problems/README.md index ad7fd6f8eea..7033e181ebe 100644 --- a/Deploy/stacks/web/website/site/user/plugins/problems/README.md +++ b/Deploy/stacks/web/website/site/user/plugins/problems/README.md @@ -14,7 +14,7 @@ Installing the Problems plugin can be done in one of two ways. Our GPM (Grav Pac ## GPM Installation (Preferred) -The simplest way to install this plugin is via the [Grav Package Manager (GPM)](http://learn.getgrav.org/advanced/grav-gpm) through your system's Terminal (also called the command line). From the root of your Grav install type: +The simplest way to install this plugin is via the [Grav Package Manager (GPM)](https://learn.getgrav.org/17/cli-console/grav-cli-gpm) through your system's Terminal (also called the command line). From the root of your Grav install type: bin/gpm install problems diff --git a/Deploy/stacks/web/website/site/user/themes/quark/README.md b/Deploy/stacks/web/website/site/user/themes/quark/README.md index a65d82171bc..66705cb858f 100644 --- a/Deploy/stacks/web/website/site/user/themes/quark/README.md +++ b/Deploy/stacks/web/website/site/user/themes/quark/README.md @@ -33,7 +33,7 @@ The theme by itself is useful, but you may have an easier time getting up and ru ## GPM Installation (Preferred) -The simplest way to install this theme is via the [Grav Package Manager (GPM)](http://learn.getgrav.org/advanced/grav-gpm) through your system's Terminal (also called the command line). From the root of your Grav install type: +The simplest way to install this theme is via the [Grav Package Manager (GPM)](https://learn.getgrav.org/16/cli-console/grav-cli-gpm) through your system's Terminal (also called the command line). From the root of your Grav install type: bin/gpm install quark diff --git a/Deploy/stacks/web/website/site/vendor/gregwar/image/Gregwar/Image/README.md b/Deploy/stacks/web/website/site/vendor/gregwar/image/Gregwar/Image/README.md index 7d2e47788c9..e5b3e011620 100644 --- a/Deploy/stacks/web/website/site/vendor/gregwar/image/Gregwar/Image/README.md +++ b/Deploy/stacks/web/website/site/vendor/gregwar/image/Gregwar/Image/README.md @@ -32,7 +32,7 @@ Image::open('in.png') ->negate() ->save('out.jpg'); ``` - + Here are the resize methods: * `resize($width, $height, $background)`: resizes the image, will preserve scale and never @@ -127,6 +127,7 @@ You can also create image from scratch using: ``` Where 200 is the width and 100 the height + ## Saving the image diff --git a/Deploy/stacks/web/website/site/vendor/league/climate/README.md b/Deploy/stacks/web/website/site/vendor/league/climate/README.md index 3481c883799..b4633ee61f7 100644 --- a/Deploy/stacks/web/website/site/vendor/league/climate/README.md +++ b/Deploy/stacks/web/website/site/vendor/league/climate/README.md @@ -2,7 +2,6 @@ [![Latest Version](https://img.shields.io/github/tag/thephpleague/climate.svg?style=flat&label=release)](https://github.com/thephpleague/climate/tags) [![Software License](https://img.shields.io/badge/license-MIT-brightgreen.svg?style=flat)](LICENSE.md) -[![Build Status](https://github.com/thephpleague/climate/workflows/.github/workflows/buildcheck.yml/badge.svg?branch=master)](https://github.com/thephpleague/climate/actions?query=branch%3Amaster+workflow%3Abuildcheck) [![Coverage Status](https://img.shields.io/scrutinizer/coverage/g/thephpleague/climate.svg?style=flat)](https://scrutinizer-ci.com/g/thephpleague/climate/code-structure) [![Quality Score](https://img.shields.io/scrutinizer/g/thephpleague/climate.svg?style=flat)](https://scrutinizer-ci.com/g/thephpleague/climate) [![Total Downloads](https://img.shields.io/packagist/dt/league/climate.svg?style=flat)](https://packagist.org/packages/league/climate) diff --git a/Deploy/stacks/web/website/site/vendor/miljar/php-exif/README.md b/Deploy/stacks/web/website/site/vendor/miljar/php-exif/README.md index aef3eb6db56..1626d98971a 100644 --- a/Deploy/stacks/web/website/site/vendor/miljar/php-exif/README.md +++ b/Deploy/stacks/web/website/site/vendor/miljar/php-exif/README.md @@ -1,4 +1,4 @@ -# [PHPExif v0.6.4](http://github.com/PHPExif/php-exif) [![Build Status](https://travis-ci.org/PHPExif/php-exif.png?branch=master)](https://travis-ci.org/PHPExif/php-exif) [![Coverage Status](https://coveralls.io/repos/PHPExif/php-exif/badge.svg?branch=master)](https://coveralls.io/r/PHPExif/php-exif?branch=master) [![Code Climate](https://codeclimate.com/github/PHPExif/php-exif/badges/gpa.svg)](https://codeclimate.com/github/PHPExif/php-exif) +# [PHPExif v0.6.4](http://github.com/PHPExif/php-exif) [![Build Status](https://travis-ci.org/PHPExif/php-exif.png?branch=master)](https://travis-ci.org/PHPExif/php-exif) [![Coverage Status](https://coveralls.io/repos/PHPExif/php-exif/badge.svg?branch=master)](https://coveralls.io/r/PHPExif/php-exif?branch=master) PHPExif is a library which gives you easy access to the EXIF meta-data of an image. @@ -15,13 +15,6 @@ PHPExif serves as a wrapper around some native or CLI tools which access this EX composer require miljar/php-exif ``` - -## Usage - -[Before v0.3.0](Resources/doc/usage_0.2.1.md) - -[v0.3.0+](Resources/doc/usage.md) - ## Contributing Please submit all pull requests against the correct branch. The release branch for the next version is a branch with the same name as the next version. Bugfixes should go in the master branch, unless they are for code in a new release branch. diff --git a/Deploy/stacks/web/website/site/vendor/monolog/monolog/README.md b/Deploy/stacks/web/website/site/vendor/monolog/monolog/README.md index a578eb22892..04534a427e4 100644 --- a/Deploy/stacks/web/website/site/vendor/monolog/monolog/README.md +++ b/Deploy/stacks/web/website/site/vendor/monolog/monolog/README.md @@ -1,4 +1,4 @@ -# Monolog - Logging for PHP [![Build Status](https://img.shields.io/travis/Seldaek/monolog.svg)](https://travis-ci.org/Seldaek/monolog) +# Monolog - Logging for PHP [![Build Status](https://img.shields.io/travis/Seldaek/monolog.svg)](https://travis-ci.org/Seldaek/monolog) [![Total Downloads](https://img.shields.io/packagist/dt/monolog/monolog.svg)](https://packagist.org/packages/monolog/monolog) [![Latest Stable Version](https://img.shields.io/packagist/v/monolog/monolog.svg)](https://packagist.org/packages/monolog/monolog) @@ -40,13 +40,6 @@ $log->addWarning('Foo'); $log->addError('Bar'); ``` -## Documentation - -- [Usage Instructions](doc/01-usage.md) -- [Handlers, Formatters and Processors](doc/02-handlers-formatters-processors.md) -- [Utility classes](doc/03-utilities.md) -- [Extending Monolog](doc/04-extending.md) - ## Third Party Packages Third party handlers, formatters and processors are @@ -77,7 +70,7 @@ Bugs and feature request are tracked on [GitHub](https://github.com/Seldaek/mono - [XOOPS 2.6](http://xoops.org/) comes out of the box with Monolog. - [Aura.Web_Project](https://github.com/auraphp/Aura.Web_Project) comes out of the box with Monolog. - [Nette Framework](http://nette.org/en/) can be used with Monolog via [Kdyby/Monolog](https://github.com/Kdyby/Monolog) extension. -- [Proton Micro Framework](https://github.com/alexbilbie/Proton) comes out of the box with Monolog. +- [Proton Micro Framework](https://github.com/alexbilbie/Proton) comes out of the box with Monolog. ### Author diff --git a/Deploy/stacks/web/website/site/vendor/nyholm/psr7/README.md b/Deploy/stacks/web/website/site/vendor/nyholm/psr7/README.md index 4c53bdf5d87..1d9df943c2b 100644 --- a/Deploy/stacks/web/website/site/vendor/nyholm/psr7/README.md +++ b/Deploy/stacks/web/website/site/vendor/nyholm/psr7/README.md @@ -1,7 +1,6 @@ # PSR-7 implementation [![Latest Version](https://img.shields.io/github/release/Nyholm/psr7.svg?style=flat-square)](https://github.com/Nyholm/psr7/releases) -[![Build Status](https://img.shields.io/travis/Nyholm/psr7/master.svg?style=flat-square)](https://travis-ci.org/Nyholm/psr7) [![Code Coverage](https://img.shields.io/scrutinizer/coverage/g/Nyholm/psr7.svg?style=flat-square)](https://scrutinizer-ci.com/g/Nyholm/psr7) [![Quality Score](https://img.shields.io/scrutinizer/g/Nyholm/psr7.svg?style=flat-square)](https://scrutinizer-ci.com/g/Nyholm/psr7) [![Total Downloads](https://poser.pugx.org/nyholm/psr7/downloads)](https://packagist.org/packages/nyholm/psr7) @@ -101,7 +100,7 @@ $response = $psr17Factory->createResponse(200)->withBody($responseBody); ## Our goal -This package is currently maintained by [Tobias Nyholm](http://nyholm.se) and +This package is currently maintained by [Tobias Nyholm](http://tnyholm.se) and [Martijn van der Ven](https://vanderven.se/martijn/). They have decided that the goal of this library should be to provide a super strict implementation of [PSR-7](https://www.php-fig.org/psr/psr-7/) that is blazing fast. diff --git a/Deploy/stacks/web/website/site/vendor/php-http/message-factory/README.md b/Deploy/stacks/web/website/site/vendor/php-http/message-factory/README.md index 4654495a72f..471bbda2170 100644 --- a/Deploy/stacks/web/website/site/vendor/php-http/message-factory/README.md +++ b/Deploy/stacks/web/website/site/vendor/php-http/message-factory/README.md @@ -18,12 +18,7 @@ $ composer require php-http/message-factory ## Documentation -Please see the [official documentation](http://php-http.readthedocs.org/en/latest/message-factory/). - - -## Contributing - -Please see [CONTRIBUTING](CONTRIBUTING.md) and [CONDUCT](CONDUCT.md) for details. +Please see the [official documentation](https://docs.php-http.org/en/latest/message/message-factory.html). ## Security diff --git a/Deploy/stacks/web/website/site/vendor/rockettheme/toolbox/README.md b/Deploy/stacks/web/website/site/vendor/rockettheme/toolbox/README.md index 324d8a48169..4afe9f8f4c6 100644 --- a/Deploy/stacks/web/website/site/vendor/rockettheme/toolbox/README.md +++ b/Deploy/stacks/web/website/site/vendor/rockettheme/toolbox/README.md @@ -3,9 +3,9 @@ [![PHPStan](https://img.shields.io/badge/PHPStan-enabled-brightgreen.svg?style=flat)](https://github.com/phpstan/phpstan) [![Latest Version](http://img.shields.io/packagist/v/rockettheme/toolbox.svg?style=flat)](https://packagist.org/packages/rockettheme/toolbox) [![Software License](https://img.shields.io/badge/license-MIT-brightgreen.svg?style=flat)](LICENSE) -[![Build Status](https://img.shields.io/travis/rockettheme/toolbox/master.svg?style=flat)](https://travis-ci.org/rockettheme/toolbox) -[![Coverage Status](https://img.shields.io/scrutinizer/coverage/g/rockettheme/toolbox.svg?style=flat)](https://scrutinizer-ci.com/g/rockettheme/toolbox/code-structure) -[![Quality Score](https://img.shields.io/scrutinizer/g/rockettheme/toolbox.svg?style=flat)](https://scrutinizer-ci.com/g/rockettheme/toolbox) +[![Build Status](https://img.shields.io/travis/rockettheme/toolbox/master.svg?style=flat)](https://travis-ci.org/rockettheme/toolbox) +[![Coverage Status](https://img.shields.io/scrutinizer/coverage/g/rockettheme/toolbox.svg?style=flat)](https://scrutinizer-ci.com/g/rockettheme/toolbox/code-structure) +[![Quality Score](https://img.shields.io/scrutinizer/g/rockettheme/toolbox.svg?style=flat)](https://scrutinizer-ci.com/g/rockettheme/toolbox) [![Total Downloads](https://img.shields.io/packagist/dt/rockettheme/toolbox.svg?style=flat)](https://packagist.org/packages/rockettheme/toolbox) RocketTheme\Toolbox package contains a set of reusable PHP interfaces, classes and traits. diff --git a/JPS_ARBITRAGE/README.md b/JPS_ARBITRAGE/README.md index b72379360ff..773a2a56526 100644 --- a/JPS_ARBITRAGE/README.md +++ b/JPS_ARBITRAGE/README.md @@ -35,7 +35,7 @@ How do we know if it's not working? If the python logger logging to tomcat serve - https://www.icis.com/resources/news/2013/11/08/9723077/se-asia-to-china-palm-oil-freight-rates-may-fall-on-weak-demand/ - Non_Residential_Programmes_Electricity_Tariffs.aspx - missing - 2. exchange_rates.pyw + 2. exchange_rates.pyw - http://apilayer.net/api/live?access_key=402d77f0850c35adfa5a797e325262dd¤cies=CNY,SGD&source=USD&format=1 3. FAME_download.pyw diff --git a/web/augmented-uk/README.md b/web/augmented-uk/README.md index 0629c5b7848..4f5c3d3e3aa 100644 --- a/web/augmented-uk/README.md +++ b/web/augmented-uk/README.md @@ -1,6 +1,6 @@ # Visualisation of Augmented UK -The augmented UK visualisation has been put together as a single stack with no requirements on any external services. Both the data required for the visualisation, and the visualisation itself are hosted within the stack instance. For more information on the stack, read the [documentation here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +The augmented UK visualisation has been put together as a single stack with no requirements on any external services. Both the data required for the visualisation, and the visualisation itself are hosted within the stack instance. For more information on the stack, read the [documentation here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). This directory contains the documentation, configuration files, and associated scripts for a visualisation window into the base world of The World Avatar (focusing on assets within the United Kingdom). Whilst other data and capabilities related to the project may exist elsewhere in The World Avatar, this documentation only covers the steps needed to acquire, upload, and visualise data used in the deployed visualisation currently available from [The World Avatar's website](https://theworldavatar.io). @@ -51,7 +51,7 @@ To support metadata for the visualisation, the related stack needs to also launc ### 2.3 Deploying the stack -Before following the deployment steps, please read the following section to get a better understanding of the workflow. Once the correct files for each data source have been acquired, we need to first spin up an instance of the stack (see [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) for details on how to do this). Once ready, we have to upload data using the [data uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader) into a relational database. Please read the [uploader's documentation](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader) before commencing upload as this file will not detail the generic upload process. +Before following the deployment steps, please read the following section to get a better understanding of the workflow. Once the correct files for each data source have been acquired, we need to first spin up an instance of the stack (see [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) for details on how to do this). Once ready, we have to upload data using the [data uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader) into a relational database. Please read the [uploader's documentation](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader) before commencing upload as this file will not detail the generic upload process. With each data set come a number of pre-written associated files (configurations, queries, styles etc.). These files are documented along with their corresponding data source on the [Data](./docs/data.md) page. diff --git a/web/kingslynn-flood-routing/stack-manager-inputs/README.md b/web/kingslynn-flood-routing/stack-manager-inputs/README.md index d79198f36ee..6c0df5a9538 100644 --- a/web/kingslynn-flood-routing/stack-manager-inputs/README.md +++ b/web/kingslynn-flood-routing/stack-manager-inputs/README.md @@ -1,3 +1,3 @@ # Stack Manager Input -Please populate the `secrets` folder as per instructions [here](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md). \ No newline at end of file +Please populate the `secrets` folder as per instructions [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md). \ No newline at end of file diff --git a/web/twa-vis-framework/docs/overview.md b/web/twa-vis-framework/docs/overview.md index fc691a7a63f..20f4d4a54f3 100644 --- a/web/twa-vis-framework/docs/overview.md +++ b/web/twa-vis-framework/docs/overview.md @@ -24,7 +24,7 @@ At the time of writing the available mapping providers are [Mapbox](https://www. Before we can start specifying the data to be hosted within the visualisation, we need to create a Docker container that can host the web files the visualisation uses. This can be done by running a container based on the `twa-vf` image; an image that contains the pre-built TWA-VF libraries (available from the `/var/www/html/twa-vf` directory) and a webserver. -Users can either write their own `docker-compose.yml` file to run a standalone visualisation (i.e. outside of a TWA Stack environment), or use the TWA Stack to create a standard visualisation integrated within a stack instance (see the [TWA Stack Manager documentation](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) for more details on the latter). +Users can either write their own `docker-compose.yml` file to run a standalone visualisation (i.e. outside of a TWA Stack environment), or use the TWA Stack to create a standard visualisation integrated within a stack instance (see the [TWA Stack Manager documentation](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) for more details on the latter). ### Importing the library @@ -154,7 +154,7 @@ The `index.html` file of the example Mapbox & Cesium visualisations has been pro In addition to the aforementioned configuration files, two additional files are required to house a Mapbox username and associated API key. Note these are required, even in Cesium visualisations, as the base map imagery is still provided by Mapbox. -To set these two files, either create and populate `mapbox_username`, and `mapbox_api_key` files within the hosted webspace, or use the stack infrastructure to provide these as Docker secrets. You can learn more about the latter by reading [the stack's documentation](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +To set these two files, either create and populate `mapbox_username`, and `mapbox_api_key` files within the hosted webspace, or use the stack infrastructure to provide these as Docker secrets. You can learn more about the latter by reading [the stack's documentation](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). Once present, these files are queried by the TWA-VF, loading in the required credentials. Note that previous versions of the TWA-VF required these parameters to be set within each visualisation's `index.html` file, this is no longer required (see the example visualisations to learn about the new format). diff --git a/web/twa-vis-framework/docs/tutorial-mapbox.md b/web/twa-vis-framework/docs/tutorial-mapbox.md index 2432a8302c8..86934645329 100644 --- a/web/twa-vis-framework/docs/tutorial-mapbox.md +++ b/web/twa-vis-framework/docs/tutorial-mapbox.md @@ -151,7 +151,7 @@ source SELECT "OrganisationCode" AS id , ## Uploading the data -To upload the data so that it can be accessed as a Virtual Knowledge Graph, and stored as geospatial data in PostGIS, we first need to write a configuration file for [The Stack Data Uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader). Information on how to write the file, where to place it, then upload the data can be see on the data uploader's page. +To upload the data so that it can be accessed as a Virtual Knowledge Graph, and stored as geospatial data in PostGIS, we first need to write a configuration file for [The Stack Data Uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader). Information on how to write the file, where to place it, then upload the data can be see on the data uploader's page. An example configuration file that covers all three CSVs used in tutorial, can be seen below and also found in the TWA repository [here](./resources/nhs.json). @@ -492,7 +492,7 @@ SELECT ?Property (GROUP_CONCAT(?tmp; separator=", ") AS ?Value) WHERE { Now that we've configured the FIA to register a metadata query for IRIs with the `http://theworldavatar.com/ontology/health/nhs.owl#GPPractice` class, we can spin the agent up within our stack. -For information on how to restart the stack with the FIA agent, please see the [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) documentation. +For information on how to restart the stack with the FIA agent, please see the [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) documentation. **3. Configuring the visualisation:**
diff --git a/web/twa-vis-framework/example/mapbox.md b/web/twa-vis-framework/example/mapbox.md index 4f275639c95..d6083861766 100644 --- a/web/twa-vis-framework/example/mapbox.md +++ b/web/twa-vis-framework/example/mapbox.md @@ -28,7 +28,7 @@ A small amount of sample data has been committed to demonstrate the power of the In most deployed visualisations, an online stack of microservices will provide data endpoints through which data can be queried/loaded onto the visualisation. In this example, no online stack is used, solely to remove a lengthy prerequisite step. Instead, sample data in local GeoJSON files have been added (to be hosted by the visualisation's web server) and, in one case, a community provided WMS endpoint connected to. -In production, it is advised that all data is loaded into a geospatial data provider (like GeoServer) and a WMS endpoint used; local files can be utilised but then do not offer the optimisation and caching of services like GeoServer. For more information on how to do this, see the README for the [Stack Data Uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader). +In production, it is advised that all data is loaded into a geospatial data provider (like GeoServer) and a WMS endpoint used; local files can be utilised but then do not offer the optimisation and caching of services like GeoServer. For more information on how to do this, see the README for the [Stack Data Uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader). It's also worth noting that with this example visualisation, no triplestore data or FeatureInfoAgent is used, hence there is no support for dynamic metadata and timeseries data is unavailable. This is something that we plan to work on in future. From 66fbdfb19d41e7fbc7ece89d51844f288f203429 Mon Sep 17 00:00:00 2001 From: Myles Date: Wed, 2 Apr 2025 22:46:57 -0400 Subject: [PATCH 04/30] fix readmes --- .mlc_config.json | 5 ++++- Apps/PirmasensToiletApp/README.md | 2 +- Deploy/stacks/KingsLynn/Utilities/README.md | 4 ++-- Deploy/stacks/Singapore-sea-level-rise/README.md | 6 +++--- .../stacks/UK-building-retrofit/StackDeployment/README.md | 6 +++--- .../stacks/UK-building-retrofit/Utilities/TOPSIS/README.md | 1 + Deploy/stacks/ontop+geoserver/README.md | 2 +- Deploy/stacks/timeline/readme.md | 2 +- .../02.digital-twin/01.power-system/uk_power_system.md | 2 +- .../02.explore/02.digital-twin/02.gas-grid/uk_gas_grid.md | 2 +- .../02.explore/02.digital-twin/03.land-use/land_use.md | 2 +- .../02.explore/02.digital-twin/04.flood-risk/flood_risk.md | 2 +- .../web/website/site/vendor/composer/semver/README.md | 2 +- .../stacks/web/website/site/vendor/doctrine/cache/README.md | 2 +- .../website/site/vendor/erusev/parsedown-extra/README.md | 4 ++-- .../web/website/site/vendor/erusev/parsedown/README.md | 1 - .../web/website/site/vendor/guzzlehttp/psr7/README.md | 2 +- .../web/website/site/vendor/kodus/psr7-server/README.md | 1 - README.md | 2 +- web/pylon-visualisation/README.md | 2 +- web/twa-vis-framework/docs/mapbox.md | 2 +- 21 files changed, 28 insertions(+), 26 deletions(-) diff --git a/.mlc_config.json b/.mlc_config.json index 3e463229331..74701d604b9 100644 --- a/.mlc_config.json +++ b/.mlc_config.json @@ -19,7 +19,10 @@ "pattern": "https://maven.pkg.github.com/cambridge-cares/TheWorldAvatar/?" }, { - "pattern": "https://twitter.com/?" + "pattern": "https?://twitter.com/?" + }, + { + "pattern": "https?://www.dropbox.com/?" } ], "aliveStatusCodes": [200, 403, 0] diff --git a/Apps/PirmasensToiletApp/README.md b/Apps/PirmasensToiletApp/README.md index 29d8bb568dd..e58af70165a 100644 --- a/Apps/PirmasensToiletApp/README.md +++ b/Apps/PirmasensToiletApp/README.md @@ -58,7 +58,7 @@ Briefly, the app will only require the `Wasgau` and `Toilet` datasets, as well a ### 2.1.2 Feature Info Agent -The stack will also require the [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent) service to retrieve metadata. Please read setting up the [built-in service section](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#built-in-containers) for more details on deploying this. This agent will require the following configuration targeted at the toilet class in `fia-config.json`: +The stack will also require the [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent) service to retrieve metadata. Please read setting up the [built-in service section](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#built-in-containers) for more details on deploying this. This agent will require the following configuration targeted at the toilet class in `fia-config.json`: ```json { diff --git a/Deploy/stacks/KingsLynn/Utilities/README.md b/Deploy/stacks/KingsLynn/Utilities/README.md index a4fedfb726f..2bdc6ca89df 100644 --- a/Deploy/stacks/KingsLynn/Utilities/README.md +++ b/Deploy/stacks/KingsLynn/Utilities/README.md @@ -72,5 +72,5 @@ The scripts within the `kg_utils` subdirectory provide functionality to interact [Semantic 3D City Agents README]: https://github.com/cambridge-cares/CitiesKG/tree/develop/agents -[Access Agent README]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_ACCESS_AGENT -[run_uprn_agent_in_chunks.py]: \uprn_agent\run_uprn_agent_in_chunks.py \ No newline at end of file +[Access Agent README]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AccessAgent +[run_uprn_agent_in_chunks.py]: uprn_agent/run_uprn_agent_in_chunks.py \ No newline at end of file diff --git a/Deploy/stacks/Singapore-sea-level-rise/README.md b/Deploy/stacks/Singapore-sea-level-rise/README.md index 3df16bea4c8..a4e97cad7eb 100644 --- a/Deploy/stacks/Singapore-sea-level-rise/README.md +++ b/Deploy/stacks/Singapore-sea-level-rise/README.md @@ -1,5 +1,5 @@ # Singapore Sea-Level Rise -This repository contains the instructions, directory structure and configurations required to deploy Singapore stack for Sea-Level-Rise analysis which builds on top of the existing [Augmented Singapore](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-singapore-stack/Deploy/stacks/Singapore). +This repository contains the instructions, directory structure and configurations required to deploy Singapore stack for Sea-Level-Rise analysis which builds on top of the existing [Augmented Singapore](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/Singapore). ## Data Comprehensive data collated can be found in the [CARES dropbox link](https://www.dropbox.com/scl/fo/s4youc2epx7quqapolgw6/AH_IAMDhH9FppOosYpKd3zs?rlkey=4ab335m057bkv64zs7e8xdn20&dl=0). @@ -58,7 +58,7 @@ curl -X POST http://localhost:3838/buildingflooragent/ ``` - Check contents of ```gfa_floors.floors```, the number of rows should equate the number of buildings ### GFAAgent -[GFAAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-sea-level-rise-singapore/Agents/GFAAgent) computes the Gross Floor Area (GFA) and the construction cost of buildings. +[GFAAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/GFAAgent) computes the Gross Floor Area (GFA) and the construction cost of buildings. - Prequisites: 1) Floors data added by BuildingFloorAgent @@ -196,7 +196,7 @@ There are two sets of postcode data from running the stack data uploader with di 2) sgpostcode - More comprehensive and updated dataset - - Sourced from [https://github.com/isen-ng/singapore-postal-codes-1] + - Sourced from https://github.com/isen-ng/singapore-postal-codes-1 - Linked to buildings using building identification agent by running the HTTP request [postcode_matching.http] ### data.json diff --git a/Deploy/stacks/UK-building-retrofit/StackDeployment/README.md b/Deploy/stacks/UK-building-retrofit/StackDeployment/README.md index f254cecd06f..8ed82b9858d 100644 --- a/Deploy/stacks/UK-building-retrofit/StackDeployment/README.md +++ b/Deploy/stacks/UK-building-retrofit/StackDeployment/README.md @@ -60,7 +60,7 @@ bash ./stack.sh remove CamElyWis-DT -v # Remove individual service bash ./stack.sh remove CamElyWis-DT ``` -After spinning up the stack, the GUI endpoints to the running containers can be accessed via Browser (i.e. adminer, blazegraph, ontop, geoserver). The exact endpoints and login details can be found in the [Stack Manager README](Deploy/stacks/dynamic/stack-manager/README.md). +After spinning up the stack, the GUI endpoints to the running containers can be accessed via Browser (i.e. adminer, blazegraph, ontop, geoserver). The exact endpoints and login details can be found in the [Stack Manager README](https://github.com/TheWorldAvatar/stack/blob/main/stack-manager/README.md). ### Spinning Up the Stack Remotely via SSH @@ -136,8 +136,8 @@ The [Feature Info Agent] serves as an access point for the visualisation, enabli [common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Stack data uploader]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader -[Stack manager]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md +[Stack data uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader +[Stack manager]: https://github.com/TheWorldAvatar/stack/blob/main/stack-manager/README.md [fia_queries]: ./Stack-manager/inputs/data/fia-queries diff --git a/Deploy/stacks/UK-building-retrofit/Utilities/TOPSIS/README.md b/Deploy/stacks/UK-building-retrofit/Utilities/TOPSIS/README.md index a294d85a897..56b63f78a58 100644 --- a/Deploy/stacks/UK-building-retrofit/Utilities/TOPSIS/README.md +++ b/Deploy/stacks/UK-building-retrofit/Utilities/TOPSIS/README.md @@ -70,4 +70,5 @@ result/TOPSIS_result.csv - The default weighting method is `"equal_weight"`, but it can be changed to `"entropy_weight_method"` in `criteria_weight.json`. + [entropy-based approach]: https://www.sciencedirect.com/topics/engineering/entropy-method#:~:text=The%20entropy%20method%20is%20an,indicators%20through%20the%20information%20entropy. \ No newline at end of file diff --git a/Deploy/stacks/ontop+geoserver/README.md b/Deploy/stacks/ontop+geoserver/README.md index a841c9d65ed..514be01d9af 100644 --- a/Deploy/stacks/ontop+geoserver/README.md +++ b/Deploy/stacks/ontop+geoserver/README.md @@ -34,7 +34,7 @@ * [Adminer interface to the PostgreSQL database with default login settings][postgres_web] (the default password can be found in the `postgis/postgres.env` file) * [Ontop web endpoint][ontop_web] * [GeoServer web endpoint][geoserver_web] - + [shapefiles]: https://trac.osgeo.org/gdal/wiki/UserDocs/Shapefiles [postgres_web]: http://localhost:2311/?pgsql=host.docker.internal%3A2317&username=postgres&db=the_world_avatar [ontop_web]: http://localhost:2316/ diff --git a/Deploy/stacks/timeline/readme.md b/Deploy/stacks/timeline/readme.md index faa9b4e7a2e..7b3a4028b1e 100644 --- a/Deploy/stacks/timeline/readme.md +++ b/Deploy/stacks/timeline/readme.md @@ -4,7 +4,7 @@ Stack name should be `timeline-test`. -Prepare these secret files in the [stack secret folder](./stack-manager/test/inputs/secrets) +Prepare these secret files in the [stack secret folder](./test/stack-manager/inputs/secrets) - geoserver_password - postgis_password diff --git a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/01.power-system/uk_power_system.md b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/01.power-system/uk_power_system.md index 7b178b4ee36..04f33e36736 100644 --- a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/01.power-system/uk_power_system.md +++ b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/01.power-system/uk_power_system.md @@ -42,4 +42,4 @@ slug: power-system
-[plugin:content-inject](/modular/partners) \ No newline at end of file +[plugin:content-inject](../../../modular/partners) \ No newline at end of file diff --git a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/02.gas-grid/uk_gas_grid.md b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/02.gas-grid/uk_gas_grid.md index 870e05ebe92..88469b26e19 100644 --- a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/02.gas-grid/uk_gas_grid.md +++ b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/02.gas-grid/uk_gas_grid.md @@ -36,4 +36,4 @@ slug: gas-grid
-[plugin:content-inject](/modular/partners) \ No newline at end of file +[plugin:content-inject](../../../modular/partners) \ No newline at end of file diff --git a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/03.land-use/land_use.md b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/03.land-use/land_use.md index 4023de8e069..46f7717ff16 100644 --- a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/03.land-use/land_use.md +++ b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/03.land-use/land_use.md @@ -38,4 +38,4 @@ slug: land-use
-[plugin:content-inject](/modular/partners) \ No newline at end of file +[plugin:content-inject](../../../modular/partners) \ No newline at end of file diff --git a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/04.flood-risk/flood_risk.md b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/04.flood-risk/flood_risk.md index 1225e40a27b..249bf1d5cbe 100644 --- a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/04.flood-risk/flood_risk.md +++ b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/04.flood-risk/flood_risk.md @@ -35,4 +35,4 @@ slug: flood-risk
-[plugin:content-inject](/modular/partners) \ No newline at end of file +[plugin:content-inject](../../../modular/partners) \ No newline at end of file diff --git a/Deploy/stacks/web/website/site/vendor/composer/semver/README.md b/Deploy/stacks/web/website/site/vendor/composer/semver/README.md index 409b9dcbaeb..3721a769388 100644 --- a/Deploy/stacks/web/website/site/vendor/composer/semver/README.md +++ b/Deploy/stacks/web/website/site/vendor/composer/semver/README.md @@ -5,7 +5,7 @@ Semver library that offers utilities, version constraint parsing and validation. Originally written as part of [composer/composer](https://github.com/composer/composer), now extracted and made available as a stand-alone library. - + [![Build Status](https://travis-ci.org/composer/semver.svg?branch=master)](https://travis-ci.org/composer/semver) diff --git a/Deploy/stacks/web/website/site/vendor/doctrine/cache/README.md b/Deploy/stacks/web/website/site/vendor/doctrine/cache/README.md index c795a058428..5b12f551539 100644 --- a/Deploy/stacks/web/website/site/vendor/doctrine/cache/README.md +++ b/Deploy/stacks/web/website/site/vendor/doctrine/cache/README.md @@ -1,5 +1,5 @@ # Doctrine Cache - + [![Build Status](https://img.shields.io/travis/doctrine/cache/master.svg?style=flat-square)](http://travis-ci.org/doctrine/cache) [![Code Coverage](https://codecov.io/gh/doctrine/dbal/branch/cache/graph/badge.svg)](https://codecov.io/gh/doctrine/dbal/branch/master) diff --git a/Deploy/stacks/web/website/site/vendor/erusev/parsedown-extra/README.md b/Deploy/stacks/web/website/site/vendor/erusev/parsedown-extra/README.md index cee4b5430ab..b1ae3149788 100644 --- a/Deploy/stacks/web/website/site/vendor/erusev/parsedown-extra/README.md +++ b/Deploy/stacks/web/website/site/vendor/erusev/parsedown-extra/README.md @@ -1,7 +1,7 @@ > You might also like [Caret](http://caret.io?ref=parsedown) - our Markdown editor for the Desktop. ## Parsedown Extra - + [![Build Status](https://img.shields.io/travis/erusev/parsedown-extra/master.svg?style=flat-square)](https://travis-ci.org/erusev/parsedown-extra) An extension of [Parsedown](http://parsedown.org) that adds support for [Markdown Extra](https://michelf.ca/projects/php-markdown/extra/). @@ -24,7 +24,7 @@ echo $Extra->text('# Header {.sth}'); # prints:

Header

**Who uses Parsedown Extra?** -[October CMS](http://octobercms.com/), [Bolt CMS](http://bolt.cm/), [Kirby CMS](http://getkirby.com/), [Grav CMS](http://getgrav.org/), [Statamic CMS](http://www.statamic.com/) and [more](https://www.versioneye.com/php/erusev:parsedown-extra/references). +[October CMS](http://octobercms.com/), [Bolt CMS](http://bolt.cm/), [Kirby CMS](http://getkirby.com/), [Grav CMS](http://getgrav.org/), [Statamic CMS](http://www.statamic.com/) and more. **How can I help?** diff --git a/Deploy/stacks/web/website/site/vendor/erusev/parsedown/README.md b/Deploy/stacks/web/website/site/vendor/erusev/parsedown/README.md index b5d9ed2ee33..92691c56f74 100644 --- a/Deploy/stacks/web/website/site/vendor/erusev/parsedown/README.md +++ b/Deploy/stacks/web/website/site/vendor/erusev/parsedown/README.md @@ -2,7 +2,6 @@ ## Parsedown -[![Build Status](https://img.shields.io/travis/erusev/parsedown/master.svg?style=flat-square)](https://travis-ci.org/erusev/parsedown) Better Markdown Parser in PHP diff --git a/Deploy/stacks/web/website/site/vendor/guzzlehttp/psr7/README.md b/Deploy/stacks/web/website/site/vendor/guzzlehttp/psr7/README.md index acfabfdcbe3..6348481ceb8 100644 --- a/Deploy/stacks/web/website/site/vendor/guzzlehttp/psr7/README.md +++ b/Deploy/stacks/web/website/site/vendor/guzzlehttp/psr7/README.md @@ -4,7 +4,7 @@ This repository contains a full [PSR-7](http://www.php-fig.org/psr/psr-7/) message implementation, several stream decorators, and some helpful functionality like query string parsing. - + [![Build Status](https://travis-ci.org/guzzle/psr7.svg?branch=master)](https://travis-ci.org/guzzle/psr7) diff --git a/Deploy/stacks/web/website/site/vendor/kodus/psr7-server/README.md b/Deploy/stacks/web/website/site/vendor/kodus/psr7-server/README.md index b3f5cb3aaef..e8bafaaa799 100644 --- a/Deploy/stacks/web/website/site/vendor/kodus/psr7-server/README.md +++ b/Deploy/stacks/web/website/site/vendor/kodus/psr7-server/README.md @@ -1,7 +1,6 @@ # Helper class to create PSR-7 server request [![Latest Version](https://img.shields.io/github/release/Nyholm/psr7-server.svg?style=flat-square)](https://github.com/Nyholm/psr7-server/releases) -[![Build Status](https://img.shields.io/travis/Nyholm/psr7-server/master.svg?style=flat-square)](https://travis-ci.org/Nyholm/psr7-server) [![Code Coverage](https://img.shields.io/scrutinizer/coverage/g/Nyholm/psr7-server.svg?style=flat-square)](https://scrutinizer-ci.com/g/Nyholm/psr7-server) [![Quality Score](https://img.shields.io/scrutinizer/g/Nyholm/psr7-server.svg?style=flat-square)](https://scrutinizer-ci.com/g/Nyholm/psr7-server) [![Total Downloads](https://poser.pugx.org/nyholm/psr7-server/downloads)](https://packagist.org/packages/nyholm/psr7-server) diff --git a/README.md b/README.md index 2a7ea8dbba8..cbbfe18c779 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ The World Avatar represents information in a dynamic knowledge graph using techn Listed below are a number of the key technical features available within The World Avatar ecosystem. More information on these, and other features, can be seen on [The World Avatar Wiki](https://github.com/cambridge-cares/TheWorldAvatar/wiki). **TWA Stack:**
-The knowledge graph and its agents are hosted using collections of containers. How to use them is explained in the [stack manager](./Deploy/stacks/dynamic/stack-manager) and [stack data uploader](./Deploy/stacks/dynamic/stack-data-uploader) folders. +The knowledge graph and its agents are hosted using collections of containers. How to use them is explained in the [stack manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) and [stack data uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader) folders. **TWA Base Library:**
The [base lib](./JPS_BASE_LIB) is a collection of functionality that is shared among many parts of the code. Core functions include the ability to generate and upload TBoxes, query KGs and RDBs, implement RESTful APIs, and triple cloning. diff --git a/web/pylon-visualisation/README.md b/web/pylon-visualisation/README.md index ecbd501f881..af70e1cb839 100644 --- a/web/pylon-visualisation/README.md +++ b/web/pylon-visualisation/README.md @@ -3,4 +3,4 @@ Visualisation of pylon data from National Grid and UK Power Networks compared with vegetation data. ## Requirements -Please see [Spinning up the example stack](../twa-vis-framework/example/README.md#spinning-up-the-example-stack) for the requirements. [Forestry](../../Deploy/stacks/dynamic/examples/datasets/inputs/data/forestry/), [cropmap](../../Deploy/stacks/dynamic/examples/datasets/inputs/data/cropmap/), [UK Power Networks' pylons](../../Deploy/stacks/dynamic/examples/datasets/inputs/data/ukpn_pylons/), and [National Grid pylons](../../Deploy/stacks/dynamic/examples/datasets/inputs/data/ng_pylons/) data are required to be uploaded using the configuration file [pylons-and-veg.json](../../Deploy/stacks/dynamic/examples/datasets/inputs/config/pylons-and-veg.json). Instructions on loading data into the stack can be found [here](../../Deploy/stacks/dynamic/stack-data-uploader/README.md). \ No newline at end of file +Please see [Spinning up the example stack](../twa-vis-framework/example/README.md#spinning-up-the-example-stack) for the requirements. [Forestry](https://github.com/TheWorldAvatar/stack/tree/main/examples/datasets/inputs/data/forestry/), [cropmap](https://github.com/TheWorldAvatar/stack/tree/main/examples/datasets/inputs/data/cropmap/), [UK Power Networks' pylons](https://github.com/TheWorldAvatar/stack/tree/main/examples/datasets/inputs/data/ukpn_pylons/), and [National Grid pylons](https://github.com/TheWorldAvatar/stack/tree/main/examples/datasets/inputs/data/ng_pylons/) data are required to be uploaded using the configuration file [pylons-and-veg.json](https://github.com/TheWorldAvatar/stack/tree/main/examples/datasets/inputs/config/pylons-and-veg.json). Instructions on loading data into the stack can be found [here](https://github.com/TheWorldAvatar/stack/blob/main/stack-data-uploader/README.md). \ No newline at end of file diff --git a/web/twa-vis-framework/docs/mapbox.md b/web/twa-vis-framework/docs/mapbox.md index 58329aa1ed7..0e0a968ba39 100644 --- a/web/twa-vis-framework/docs/mapbox.md +++ b/web/twa-vis-framework/docs/mapbox.md @@ -116,7 +116,7 @@ Mapbox also supports a system called [SDF Icons](https://docs.mapbox.com/help/tr These are effectively PNG icons that only contains transparency and a single colour; when used it allows Mapbox to dynamically change the colour of icons based on their dynamic styling system. As SDF icons are loaded slightly differently to regular ones, any icons you wish to be treated as SDF icons will need `-sdf` added to their file name. -Icons can be uploaded to and served by GeoServer via the [stack-data-uploader](../../../Deploy/stacks/dynamic/stack-data-uploader/README.md#staticgeoserverdata). +Icons can be uploaded to and served by GeoServer via the [stack-data-uploader](https://github.com/TheWorldAvatar/stack/blob/main/stack-data-uploader/README.md#staticgeoserverdata).
From 1856c433b96a18f66dac32d19022cf68c6cff593 Mon Sep 17 00:00:00 2001 From: Myles MacDonald Date: Sat, 12 Apr 2025 14:51:33 -0400 Subject: [PATCH 05/30] fix links --- .mlc_config.json | 23 ++++++++++++++++-- AR/CARESLab/README.md | 2 +- Agents/APIAgent/README.md | 20 ++++++++-------- Agents/AQMeshInputAgent/README.md | 2 +- Agents/AccessAgent/README.md | 2 +- Agents/AirQualityAgent/README.md | 12 +++++----- Agents/AndroidStatusAgent/README.md | 6 ++--- Agents/AverageSquareMetrePriceAgent/README.md | 14 +++++------ Agents/BMSBacnetAgent/README.md | 2 +- Agents/BMSInstantiationAgent/README.MD | 4 ++-- Agents/BMSQueryAgent/README.md | 6 ++--- Agents/BMSUpdateAgent/README.md | 10 ++++---- Agents/BuildingFloorAgent/README.md | 6 +++-- Agents/BuildingMatchingAgent/README.md | 2 +- Agents/CARESWeatherStationAgent/README.md | 4 ++-- Agents/CEAAgent/README.md | 10 ++++---- Agents/CarparkAgent/README.md | 6 ++--- Agents/CopCalculationAgent/README.md | 14 +++++------ Agents/DashboardAgent/README.md | 22 ++++++++--------- Agents/DataBridgeAgent/README.md | 8 +++---- Agents/DerivationAgentPythonExample/README.md | 8 +++---- Agents/DistrictHeatingAgent/README.md | 2 +- .../README.md | 16 ++++++------- .../README.md | 6 ++--- .../README.md | 4 ++-- Agents/ESPHomeAgent/README.md | 8 +++---- Agents/ESPHomeUpdateAgent/README.md | 4 ++-- .../README.md | 16 ++++++------- Agents/FHSashAndOccupancyAgent/README.MD | 6 ++--- Agents/FenlandTrajectoryAgent/README.md | 24 +++++++++---------- Agents/FilterAgent/README.md | 2 +- Agents/FloodAssessmentAgent/README.md | 18 +++++++------- Agents/FloodWarningAgent/README.md | 8 +++---- Agents/ForecastingAgent/README.md | 12 +++++----- Agents/GFAAgent/README.md | 2 +- Agents/HMLandRegistryAgent/README.md | 8 +++---- Agents/HistoricalAQMeshAgent/README.md | 2 +- Agents/HistoricalNTUEnergyAgent/README.md | 4 ++-- Agents/HistoricalNUSDavisAgent/README.md | 2 +- .../HistoricalPirmasensStationAgent/README.md | 2 +- Agents/Ifc2OntoBIMAgent/README.md | 2 +- Agents/Ifc2TilesetAgent/README.md | 4 ++-- Agents/IfcOwlConverterAgent/README.md | 2 +- .../InequalityIndexCalculationAgent/README.md | 18 +++++++------- Agents/IsochroneAgent/README.md | 14 +++++------ Agents/LSOAInputAgent/README.md | 10 ++++---- Agents/MackayCalculatorAgent/readme.md | 4 ++-- Agents/MackayDataAgent/README.md | 10 ++++---- Agents/MetOfficeAgent/README.md | 14 +++++------ Agents/NTUDataAgent/README.md | 2 +- Agents/NTUEnergyClusterAgent/README.md | 2 +- Agents/NTUForecastingAgent/README.md | 2 +- Agents/NTUP2PEnergyAgent/README.md | 4 ++-- Agents/NTUPVLibAgent/README.md | 6 ++--- Agents/NUSDavisWeatherStationAgent/README.md | 2 +- Agents/NetworkAnalysisAgent/README.md | 8 +++---- Agents/OPFAgent/README.md | 2 +- Agents/OSMAgent/README.md | 20 ++++++++-------- Agents/OntoMatchAgent/README.md | 4 ++-- Agents/PVLibAgent/README.md | 10 ++++---- Agents/PropertyValueEstimationAgent/README.md | 14 +++++------ Agents/RFIDQueryAgent/README.MD | 10 ++++---- Agents/RFIDUpdateAgent/README.md | 2 +- .../MetOfficeWindSensorAgent/README.md | 6 ++--- Agents/RenewableEnergyAgents/README.md | 4 ++-- .../README.md | 16 ++++++------- Agents/RxnOptGoalAgent/README.md | 4 ++-- Agents/RxnOptGoalIterAgent/README.md | 4 ++-- Agents/SeaLevelImpactAgent/README.md | 2 +- Agents/SensorLoggerMobileAppAgent/README.md | 10 ++++---- Agents/SmartMeterAgent/README.md | 4 ++-- Agents/ThingsBoardAgent/README.md | 2 +- Agents/ThingspeakAgent/README.MD | 2 +- Agents/TimeSeriesExample/README.md | 4 ++-- Agents/TrafficIncidentAgent/README.md | 6 ++--- Agents/TrajectoryQueryAgent/README.md | 4 ++-- Agents/TravellingSalesmanAgent/README.md | 8 +++---- Agents/UserAgent/README.md | 4 ++-- Agents/UtilityCostCalculationAgent/README.md | 18 +++++++------- Agents/ZeoliteAgent/README.md | 2 +- Agents/_DerivationPaper/README.md | 2 +- Agents/utils/chemistry_and_robots/README.md | 6 ++--- Agents/utils/python-utils/README.md | 2 +- .../Stack_Deployment/README.md | 16 ++++++------- Deploy/stacks/Chile/README.md | 5 ++-- .../KingsLynn/StackDeployment/README.md | 18 +++++++------- Deploy/stacks/Pirmasens/README.md | 5 +++- .../StackDeployment/README.md | 2 +- web/twa-vis-framework/example/README.md | 2 +- 89 files changed, 338 insertions(+), 315 deletions(-) diff --git a/.mlc_config.json b/.mlc_config.json index 74701d604b9..94f184d79b3 100644 --- a/.mlc_config.json +++ b/.mlc_config.json @@ -21,9 +21,28 @@ { "pattern": "https?://twitter.com/?" }, + { + "pattern": "https?://www.w3.org/?" + }, + { + "pattern": "https?://www.ontology-of-units-of-measure.org/?" + }, + { + "pattern": "https?://kg.cmclinnovations.com/mods-agent(/[^ ]*)?" + }, + { + "pattern": "https?://github.com/[^\/]+/[^\/]+/issues(/[^ ]*)?" + }, { "pattern": "https?://www.dropbox.com/?" + }, + { + "pattern": "https?://docs.unity3d.com/?" } - ], - "aliveStatusCodes": [200, 403, 0] + ], + "aliveStatusCodes": [ + 200, + 403, + 0 + ] } \ No newline at end of file diff --git a/AR/CARESLab/README.md b/AR/CARESLab/README.md index 7d76cc8860a..d60a3f17528 100644 --- a/AR/CARESLab/README.md +++ b/AR/CARESLab/README.md @@ -105,7 +105,7 @@ A complete guide of setting up **new** MRTK3 project can be found at [here](http Run in Unity is the most easy and convenient way to test during development. Check [here](https://learn.microsoft.com/en-us/windows/mixed-reality/mrtk-unity/mrtk3-input/packages/input/input-simulation#how-to-use-mrtk3-input-simulation-mrtk3-input-simulator-default-controls) for keys to control. Other resources: -- [Debug C# code in Unity](https://docs.unity3d.com/Manual/ManagedCodeDebugging.html) +- [Debug C# code in Unity](https://docs.unity3d.com/Manual/managed-code-debugging.html) ### Test and Deploy on Device or Emulator diff --git a/Agents/APIAgent/README.md b/Agents/APIAgent/README.md index d1d60a1c0b5..9c445543319 100644 --- a/Agents/APIAgent/README.md +++ b/Agents/APIAgent/README.md @@ -4,7 +4,7 @@ This `API Agent` can be used to manage the automatic instantiation and updating The agent is integrated with the [Derived Information Framework]'s (DIF) to ensure proper data provenance. API information and API-Data-to-TimeSeries mappings are defined under a meta-data instance in KG. TS data is then considered as the derived quantity of the meta-data. The required meta-data triples to derive an API-downloaded TS instance are described in the [required derivation markup](#12-required-derivation-markup) section below. -Once a API is registered using the [`DerivationClient`](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/derivation), API agent automatically manages a periodical re-downloading of the data from that API. +Once a API is registered using the [`DerivationClient`](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/derivation), API agent automatically manages a periodical re-downloading of the data from that API. Ontology definition of API meta-data relies on [Web of Things (WoT) Hypermedia Controls Ontology]. We also extend [RDF Mapping Language (RML)] for TimeSeries data as `rml4ts Ontology`. @@ -135,25 +135,25 @@ One may notice the agent, once started, creates a property file named `APIAgent. [allows you to publish and install packages]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry#authenticating-to-github-packages [py4jps]: https://pypi.org/project/py4jps/#description -[TimeSeriesClient]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries +[TimeSeriesClient]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries [Darts]: https://unit8co.github.io/darts/index.html [Prophet]: https://unit8co.github.io/darts/generated_api/darts.models.forecasting.prophet_model.html [Facebook Prophet]: https://github.com/facebook/prophet [Github container registry]: https://ghcr.io [personal access token]: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens -[Derived Information Framework]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/derivation -[Stack manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[derivation agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent +[Derived Information Framework]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/derivation +[Stack manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[derivation agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent -[OntoTimeSeries]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries -[OntoDerivation]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoderivation -[rml4ts.owl]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/rml4ts/rml4ts.owl +[OntoTimeSeries]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotimeseries +[OntoDerivation]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoderivation +[rml4ts.owl]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/rml4ts/rml4ts.owl [docker compose file]: ./docker-compose.yml [Web of Things (WoT) Hypermedia Controls Ontology]:https://www.w3.org/2019/wot/hypermedia [RDF Mapping Language (RML)]:https://rml.io/specs/rml/ [test_triples]: ./test_triples -[api_temperature.ttl]: ./tbox_dev/test_triples/api_temperature.ttl +[api_temperature.ttl]: ./test_triples/api_temperature.ttl [calculations module]: ./data_classes/calculations.py -[api_pvcapacity.ttl]: ./tbox_dev/test_triples/api_pvcapacity.ttl \ No newline at end of file +[api_pvcapacity.ttl]: ./test_triples/api_pvcapacity.ttl \ No newline at end of file diff --git a/Agents/AQMeshInputAgent/README.md b/Agents/AQMeshInputAgent/README.md index 1a7f2faf908..2232d65efe0 100644 --- a/Agents/AQMeshInputAgent/README.md +++ b/Agents/AQMeshInputAgent/README.md @@ -3,7 +3,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the AQMesh air quality measuring station. It's only purpose is to retrieve new data (if available) from the API and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS base lib to interact with both the KG and database. Before explaining the usage of the agent, we will briefly summarize the AQMesh API that is diff --git a/Agents/AccessAgent/README.md b/Agents/AccessAgent/README.md index d4093b19d4c..490cf089577 100644 --- a/Agents/AccessAgent/README.md +++ b/Agents/AccessAgent/README.md @@ -41,7 +41,7 @@ If building a new version of the image, the new image should be pushed to the Gi docker push ghcr.io/cambridge-cares/access-agent:X.Y.Z ``` -where X.Y.Z is the new version number. Please also ensure that you are logged in to the docker registry. Follow [step 1 of this](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) for clarity. +where X.Y.Z is the new version number. Please also ensure that you are logged in to the docker registry. Follow [step 1 of this](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) for clarity. #### Integration tests diff --git a/Agents/AirQualityAgent/README.md b/Agents/AirQualityAgent/README.md index eaccf677d43..7912db90c2a 100644 --- a/Agents/AirQualityAgent/README.md +++ b/Agents/AirQualityAgent/README.md @@ -153,20 +153,20 @@ Markus Hofmeister (mh807@cam.ac.uk), March 2023 [allows you to publish and install packages]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry#authenticating-to-github-packages [CMCL Docker registry]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts [Create SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/create-with-openssh/ [Github container registry]: https://ghcr.io [Github package repository]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Packages [http://localhost:5002/airqualityagent]: http://localhost:5002/airqualityagent [Java Development Kit version >=11]: https://adoptium.net/en-GB/temurin/releases/?version=11 [JDBC driver]: https://jdbc.postgresql.org/download/ -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB -[OntoEMS]: https://raw.githubusercontent.com/cambridge-cares/TheWorldAvatar/main/JPS_Ontology/ontology/ontoems/OntoEMS.owl +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main +[OntoEMS]: https://raw.githubusercontent.com/TheWorldAvatar/ontology/refs/heads/main/ontology/ontoems/OntoEMS.owl [personal access token]: https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token [py4jps]: https://pypi.org/project/py4jps/#description -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [Upload SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/to-existing-droplet/ [VSCode via SSH]: https://code.visualstudio.com/docs/remote/ssh [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar diff --git a/Agents/AndroidStatusAgent/README.md b/Agents/AndroidStatusAgent/README.md index d353d172806..41c2586039c 100644 --- a/Agents/AndroidStatusAgent/README.md +++ b/Agents/AndroidStatusAgent/README.md @@ -11,7 +11,7 @@ Because DTVF can only send request instead of taking in any, logging the status # 1. Setup This agent is designed to run in stack, which is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). A successful setup will result in 9 containers: -- 8 [default containers](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) +- 8 [default containers](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) - AndroidStatusAgent ## 1.1 Config BMSQueryAgent in Stack @@ -58,10 +58,10 @@ config/ |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-custom-containers). ## 1.2 Spin Up Stack -Follow these [steps](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) to spin up the stack. +Follow these [steps](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) to spin up the stack. # 2. Usage This agent support both POST and GET requests. diff --git a/Agents/AverageSquareMetrePriceAgent/README.md b/Agents/AverageSquareMetrePriceAgent/README.md index c98ea4502eb..b73dc7fbbfe 100644 --- a/Agents/AverageSquareMetrePriceAgent/README.md +++ b/Agents/AverageSquareMetrePriceAgent/README.md @@ -251,16 +251,16 @@ Markus Hofmeister (mh807@cam.ac.uk), March 2023 [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Derivation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent -[Derivation Agent configuration]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/python_derivation_agent/pyderivationagent/conf/agent_conf.py +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Derivation Agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent +[Derivation Agent configuration]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent/pyderivationagent/conf/agent_conf.py [EPC Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/EnergyPerformanceCertificateAgent -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [OntoBuiltEnv]: http://www.theworldavatar.com/ontology/ontobuiltenv/OntoBuiltEnv.owl [HM Property Sales Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HMLandRegistryAgent -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md#spinning-up-a-stack -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md#spinning-up-a-stack +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [The World Avatar]: https://github.com/cambridge-cares/TheWorldAvatar diff --git a/Agents/BMSBacnetAgent/README.md b/Agents/BMSBacnetAgent/README.md index 7c42856920b..6fca2674993 100644 --- a/Agents/BMSBacnetAgent/README.md +++ b/Agents/BMSBacnetAgent/README.md @@ -50,7 +50,7 @@ This agent automatically runs the update job periodically once deployed and does [config file]: ./config/config.py -[TimeSeriesClient]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries +[TimeSeriesClient]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries [BMSQueryAgent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BMSQueryAgent diff --git a/Agents/BMSInstantiationAgent/README.MD b/Agents/BMSInstantiationAgent/README.MD index 091031923ce..a41406dbb6c 100644 --- a/Agents/BMSInstantiationAgent/README.MD +++ b/Agents/BMSInstantiationAgent/README.MD @@ -1,8 +1,8 @@ # BMS Instantiation Agent -This agent is designed to instantiate devices found in Building Management System (BMS) based on [OntoDevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice), [OntoBMS](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobms) and [OntoCAPE](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontocape). +This agent is designed to instantiate devices found in Building Management System (BMS) based on [OntoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice), [OntoBMS](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobms) and [OntoCAPE](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontocape). -The agent mainly uses the [remote store client](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/query/RemoteStoreClient.java) from the JPS_BASE_LIB to interact with the knowledge graph. +The agent mainly uses the [remote store client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/query/RemoteStoreClient.java) from the JPS_BASE_LIB to interact with the knowledge graph. ## Usage This part of the README describes the usage of the agent. The module itself can be packaged into an executable war, deployed as a web servlet on tomcat. Sending the appropriate request to the correct URL will initiate the agent. Since it uses the remote store client to interact with the knowledge graph , the knowledge graph will be required to be set-up beforehand. diff --git a/Agents/BMSQueryAgent/README.md b/Agents/BMSQueryAgent/README.md index 8499cc405e8..e64cbe71bd8 100644 --- a/Agents/BMSQueryAgent/README.md +++ b/Agents/BMSQueryAgent/README.md @@ -25,7 +25,7 @@ A successful setup will result in 9 containers (optional 10): For the BMSQueryAgent to return results, it is assumed that there is already knowledge graph in the Blazegraph. -BMSQueryAgent does not depend on [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent), but they are used together to create time series visualisation in the [BMS Query App](https://github.com/cambridge-cares/TheWorldAvatar/tree/1502-android-app-for-data-visualisation/Apps/BMSQueryApp). +BMSQueryAgent does not depend on [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent), but they are used together to create time series visualisation in the [BMS Query App](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Apps/BMSQueryApp). ## 1.1 Config BMSQueryAgent in Stack @@ -54,10 +54,10 @@ config/ |_ ... |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-custom-containers). ## 1.2 Spin Up Stack -Follow the [steps](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) to spin up the stack. +Follow the [steps](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) to spin up the stack. # 2. Usage The agent accepts three paths: diff --git a/Agents/BMSUpdateAgent/README.md b/Agents/BMSUpdateAgent/README.md index f4a5816c442..9370b2e1f02 100644 --- a/Agents/BMSUpdateAgent/README.md +++ b/Agents/BMSUpdateAgent/README.md @@ -53,10 +53,10 @@ config/ |_ ... |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#specifying-custom-containers). ## 1.3. Spin Up Stack -Follow these [steps](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) to spin up the stack. +Follow these [steps](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) to spin up the stack. ## 2. Client Properties File Each of the route requires a client.properties file containing specific keys and values. The client.properties file are then mapped to an environment variable in the docker container. Refer to the `ENV` section in `stack-manager-input-config-service/bms-update-agent.json` for more information. @@ -104,13 +104,13 @@ More information can be found in the example property file `updateTriplesClient. # 3. Authorization Setup ## 3.1 Setup Stack and Keycloak -Please refer to [Deploy/stacks/dynamic/examples/services/keycloak](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/examples/services/keycloak) for setup guide. +Please refer to [Deploy/stacks/dynamic/examples/services/keycloak](https://github.com/TheWorldAvatar/stack/tree/main/examples/services/keycloak) for setup guide. ## 3.2 Configure Client After BMSUpdateAgent client has been registered in Keycloak, 1. Download client adapter from keycloak admin console. -2. Copy [stack-manager-input/secets/bms_updateagent_keycloak.json](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/BMSUpdateAgent/stack-manager-input/secrets/bms-update-agent-keycloak.json) to the stack manager's input secrets folder. +2. Copy [stack-manager-input/secets/bms_updateagent_keycloak.json](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/BMSUpdateAgent/stack-manager-input/config/services/bms-update-agent.json) to the stack manager's input secrets folder. 3. Replace `realm`, `resource` and `secret` in the copied secret file 4. Replace `STACK_NAME` in the copied secret file 5. (DEVELOPER) Update src/main/webapp/WEB-INF/web.xml to set up authorization on more endpoints. @@ -159,7 +159,7 @@ In order for the agent to query for `bacnetObjectId` and `bacnetDeviceId`, the [ - `dataIRI` the data IRI that is linked to the `bacnetObjectId` and `bacnetDeviceId`. - `value` the value to write to the Bacnet object - The query run by the agent is structured based on [OntoBMS](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontobms/OntoBMS.owl): + The query run by the agent is structured based on [OntoBMS](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontobms/OntoBMS.owl): ``` ontobms:hasBacnetObjectID "bacnetObjectId" ; diff --git a/Agents/BuildingFloorAgent/README.md b/Agents/BuildingFloorAgent/README.md index 0cdbaf86017..2e685ecd70e 100644 --- a/Agents/BuildingFloorAgent/README.md +++ b/Agents/BuildingFloorAgent/README.md @@ -6,7 +6,7 @@ This agent has been developed to improve the number of floors for 3D buildings. 3) Cat. C: the data is estimate calculated by the height of buildilng ### 1.1 Requirements -The agent requires 3D building models based on the CityGML standard. These models must be uploaded through the [stack-data-uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader#citydb-data). +The agent requires 3D building models based on the CityGML standard. These models must be uploaded through the [stack-data-uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader#citydb-data). ## 2. Building the Agent The agent is designed for execution through a Docker container. Other deployment workflows are beyond the scope of this document. Follow the steps below to build and deploy the agent. @@ -49,7 +49,9 @@ According to the general standard of Singapore, there are two cases: 2) Domestic building: 1st floor height is 3.6m, the rest floor height is 2.8m ### 2.3 Retrieving BuildingFloorAgent's image -The Building Floor Agent should be pulled automatically with the stack-manager, if not you can pull the latest version from [cambridge_cares package](https://github.com/orgs/cambridge-cares/packages/container/package/buildingflooragent) using `docker pull ghcr.io/cambridge-cares/buildingflooragent:` + + +The Building Floor Agent should be pulled automatically with the stack-manager, if not you can pull the latest version from cambridge_cares package using `docker pull ghcr.io/cambridge-cares/buildingflooragent:` ### 2.4 Starting with the stack-manager The agent has been implemented to work in the stack. To do so, place buildingfloor.json in the [stack-manager config directory]. diff --git a/Agents/BuildingMatchingAgent/README.md b/Agents/BuildingMatchingAgent/README.md index a10b72ca894..957efbbabe9 100644 --- a/Agents/BuildingMatchingAgent/README.md +++ b/Agents/BuildingMatchingAgent/README.md @@ -1,7 +1,7 @@ # Building Matching Agent ### Introduction -The Building Matching Agent is used to link a building instantiated in [OntoBuiltEnv](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontobuiltenv/OntoBuiltEnv.owl) to its corresponding instance instantiated in [OntoCityGML](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontocitygml/OntoCityGML.owl). The link is created by using UPRNs as the identifiers. +The Building Matching Agent is used to link a building instantiated in [OntoBuiltEnv](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobuiltenv/OntoBuiltEnv.owl) to its corresponding instance instantiated in [OntoCityGML](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontocitygml/OntoCityGML.owl). The link is created by using UPRNs as the identifiers. ### Input The agent accepts 3 input parameters in a JSONObject format with the keys: "ocgml", "obe" and "prefixIRI" where, "ocgml" is the endpoint containing buildings instantiated in OntoCityGML, "obe" is the endpoint containing buildings instantiated in OntoBuiltEnv and "prefixIRI" is the complete IRI of the OntoCityGML namespace (i.e. the IRI string used to prefix all OntoCityGml instances at creation). diff --git a/Agents/CARESWeatherStationAgent/README.md b/Agents/CARESWeatherStationAgent/README.md index cddb47ca8bc..3bcb203c12a 100644 --- a/Agents/CARESWeatherStationAgent/README.md +++ b/Agents/CARESWeatherStationAgent/README.md @@ -2,7 +2,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the weather station located in the vicinity of the CARES Lab. Its only purpose is to retrieve new data (if available) from the API and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the KG and database. Before explaining the usage of the agent, we will briefly summarize the weather station API that is @@ -39,7 +39,7 @@ taken at a timestamp between the first and third image. ## Usage This part of the README describes the usage of the input agent. The module itself can be packaged into an executable war, deployed as a web servlet on tomcat. Sending the appropriate request to the correct URL will initiate the agent. -The agent instantiates the weather reading retrieved via the API as timeseries in the knowledge graph. In addition, it will check and instantiate the ABoxes for the weather station and the quantities it measures based on these ontologies [ontology-of-units-of-measure](https://github.com/cambridge-cares/OM/tree/master), [OntoDevice](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontodevice/OntoDevice.owl), [OntoEMS](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontoems/OntoEMS.owl), , [OntoTimeSeries](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontotimeseries/OntoTimeSeries.owl). An example of the ABox is shown below: +The agent instantiates the weather reading retrieved via the API as timeseries in the knowledge graph. In addition, it will check and instantiate the ABoxes for the weather station and the quantities it measures based on these ontologies [ontology-of-units-of-measure](https://github.com/cambridge-cares/OM/tree/master), [OntoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice/OntoDevice.owl), [OntoEMS](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoems/OntoEMS.owl), , [OntoTimeSeries](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotimeseries/OntoTimeSeries.owl). An example of the ABox is shown below: ``` rdf:type ontoems:ReportingStation ; ontoems:reports . diff --git a/Agents/CEAAgent/README.md b/Agents/CEAAgent/README.md index f2bb7a644a1..b317ca9a827 100644 --- a/Agents/CEAAgent/README.md +++ b/Agents/CEAAgent/README.md @@ -63,7 +63,7 @@ The agent employs a configuration file - [CEAAgentConfig.properties], the conten 4. `terrain.database`: _Optional_ field for the RDB database name containing the terrain data. 5. `terrain.table`: _Optional_ field for the RDB table containing the terrain data. -The file location of `CEAAgentConfig.properties` can be set in the stack service configuration file at [cea-agent.json]. The default location is set to the [current file location](./cea-agent/src/main/resources/CEAAgentConfig.properties) and need not be modified if you transfer the [cea-agent.json] to the [stack manager's services](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services) directory. +The file location of `CEAAgentConfig.properties` can be set in the stack service configuration file at [cea-agent.json]. The default location is set to the [current file location](./cea-agent/src/main/resources/CEAAgentConfig.properties) and need not be modified if you transfer the [cea-agent.json] to the [stack manager's services](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services) directory. ### 2.2. Agent Dependencies @@ -114,7 +114,7 @@ Run `./stack.sh start ` in the [stack-manager] directory to start th ### 2.6. Visualisation -Visualisation of CEA outputs can be achieved with the [TWA Visualisation Framework] and [FeatureInfoAgent]. The [TWA Visualisation Framework] can be deployed according to the [documentation](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#example---including-a-visualisation). +Visualisation of CEA outputs can be achieved with the [TWA Visualisation Framework] and [FeatureInfoAgent]. The [TWA Visualisation Framework] can be deployed according to the [documentation](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#example---including-a-visualisation). For the [FeatureInfoAgent], the `.sparql` and `.json` files for CEA data is available at [feature-info-agent-input](./feature-info-agent-input). This must be placed inside the `stack-manager/input/data/fia-queries` directory following the [FeatureInfoAgent] instructions. @@ -383,8 +383,8 @@ The agent will attempt to retrieve terrain data for an area containing the targe [CEAAgentConfig.properties]: ./cea-agent/src/main/resources/CEAAgentConfig.properties [cea-agent.json]: ./stack-manager-input-config/cea-agent.json [cea-agent-debug.json]: ./stack-manager-input-config/cea-agent-debug.json -[stack-data-uploader]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config services]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services +[stack-data-uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config services]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services [TWA Visualisation Framework]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/web/twa-vis-framework [FeatureInfoAgent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent diff --git a/Agents/CarparkAgent/README.md b/Agents/CarparkAgent/README.md index a62e2a1f718..0a4c269e9dc 100644 --- a/Agents/CarparkAgent/README.md +++ b/Agents/CarparkAgent/README.md @@ -2,8 +2,8 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the carparks located in Singapore. Its only purpose is to retrieve new data (if available) from the API and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [Timeseries Client](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and the [Remote Store Client](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/query/RemoteStoreClient.java) -from the JPS_BASE_LIB to interact with both the KG and database to mantain the KG instances and timeseries. In addition, the agent will instantiate the carpark's geolocation information in postGIS and Geoserver via the [GDAL Client](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-clients/src/main/java/com/cmclinnovations/stack/clients/gdal/GDALClient.java) and [Geoserver Client](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-clients/src/main/java/com/cmclinnovations/stack/clients/geoserver/GeoServerClient.java). The agent is also able to interact with the [Building Identification Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BuildingIdentificationAgent) to match the carparks to their nearest building based on the carpark's geolocation information (latitude, longitude etc). +agent uses the [Timeseries Client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and the [Remote Store Client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/query/RemoteStoreClient.java) +from the JPS_BASE_LIB to interact with both the KG and database to mantain the KG instances and timeseries. In addition, the agent will instantiate the carpark's geolocation information in postGIS and Geoserver via the [GDAL Client](https://github.com/TheWorldAvatar/stack/tree/main/stack-clients/src/main/java/com/cmclinnovations/stack/clients/gdal/GDALClient.java) and [Geoserver Client](https://github.com/TheWorldAvatar/stack/tree/main/stack-clients/src/main/java/com/cmclinnovations/stack/clients/geoserver/GeoServerClient.java). The agent is also able to interact with the [Building Identification Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BuildingIdentificationAgent) to match the carparks to their nearest building based on the carpark's geolocation information (latitude, longitude etc). ## Carpark API The carpark information are retrieved via two different APIs. @@ -109,7 +109,7 @@ curl -X POST --header "Content-Type: application/json" -d "{\"delay\":\"0\",\"in ``` #### Create route -This request instantiates the ABoxes for the carparks based on [ontoCarpark](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontocarpark/OntoCarpark.owl) and matches each carpark to the closest building (within 100m) via the [Building Identification Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BuildingIdentificationAgent). The carparks meta data are stored in the sparql endpoints indicated in the [client.properties](#client-properties) while the carpark's geolocation and matched buildings data are stored based on the locations indicated in the `stack-manager-input-config-service/carpark-agent.json` ([Geolocation data configurations](#geolocation-data-configurations)). The request has the following format: +This request instantiates the ABoxes for the carparks based on [ontoCarpark](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontocarpark/OntoCarpark.owl) and matches each carpark to the closest building (within 100m) via the [Building Identification Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BuildingIdentificationAgent). The carparks meta data are stored in the sparql endpoints indicated in the [client.properties](#client-properties) while the carpark's geolocation and matched buildings data are stored based on the locations indicated in the `stack-manager-input-config-service/carpark-agent.json` ([Geolocation data configurations](#geolocation-data-configurations)). The request has the following format: ``` curl -X POST http://localhost:3838/carpark-agent/create ``` diff --git a/Agents/CopCalculationAgent/README.md b/Agents/CopCalculationAgent/README.md index 042e96be244..f5e9dc9adaf 100644 --- a/Agents/CopCalculationAgent/README.md +++ b/Agents/CopCalculationAgent/README.md @@ -145,16 +145,16 @@ Jieyang Xu (jx309@cam.ac.uk), May 2023 [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Derivation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent -[Derivation Agent configuration]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/python_derivation_agent/pyderivationagent/conf/agent_conf.py +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Derivation Agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent +[Derivation Agent configuration]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent/pyderivationagent/conf/agent_conf.py [EPC Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-EPCInstantiationAgent/Agents/EnergyPerformanceCertificateAgent -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [OntoRegionalAnalysis]: http://www.theworldavatar.com/ontology/ontoregionalanlysis/OntoRegionalAnalysis.owl [HM Land Registry Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-PropertySalesInstantiationAgent/Agents/HMLandRegistryAgent -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md#spinning-up-a-stack -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md#spinning-up-a-stack +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [The World Avatar]: https://github.com/cambridge-cares/TheWorldAvatar [Average Square Metre Price Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AverageSquareMetrePriceAgent diff --git a/Agents/DashboardAgent/README.md b/Agents/DashboardAgent/README.md index ac6f8701a21..535e52db9d4 100644 --- a/Agents/DashboardAgent/README.md +++ b/Agents/DashboardAgent/README.md @@ -1,6 +1,6 @@ # Dashboard Agent The Dashboard Agent is designed to set up and populate dashboards within a stack. These dashboards will require both spatial topological and time series data to be available within the stack. -Namely, it will require the concept of buildings, facilities, rooms, elements and connected sensors/devices from at minimal the [OntoBIM](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobim) and [OntoDevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice) ontologies. +Namely, it will require the concept of buildings, facilities, rooms, elements and connected sensors/devices from at minimal the [OntoBIM](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobim) and [OntoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice) ontologies. ## Instructions Before you can use the Dashboard Agent, there are some requirements you need to meet. Follow the steps below to ensure you have everything you need to successfully run the agent. @@ -26,7 +26,7 @@ This agent requires the following tools, which **MUST** run on the same stack. P (1) [Grafana](https://grafana.com/docs/grafana/latest/) dashboard - Requires the deployment of the built-in optional `grafana` service on the stack to configure and set up dashboards - For deploying the service, - - include `grafana` as a service following the [stack-manager configuration file](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#stack-configuration) + - include `grafana` as a service following the [stack-manager configuration file](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#stack-configuration) - include a `grafana_password` with your password as a Docker Secret in the `stack-manager/inputs/secrets` directory. - Once deployed, the service can be accessed at the `/analytics` route with a default username of admin. - The following Plugin must be installed: @@ -42,7 +42,7 @@ This agent requires the following tools, which **MUST** run on the same stack. P - Contains triples linking time series to facilities and/or assets - Mandatory structure: - A name must be appended to all buildings, facilities, rooms, assets, sensors, and measures/dataIRIs through the `Instance rdfs:label "name"^^xsd:string` triple. - - All sensor measures are attached according to the [OntoDevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice) ontology. + - All sensor measures are attached according to the [OntoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice) ontology. - Units can be included into the dashboard through the `MeasureInstance om:hasUnit UnitInstance. UnitInstance om:symbol "symbols"^^xsd:string.` triples but are **OPTIONAL**. - Only temperature and relative humidity can be currently retrieved for any room measures. Do note to include a min and max threshold triples for the facility holding these rooms. - ABox samples are documented [here](#3-data-model-requirements). @@ -57,7 +57,7 @@ docker compose -f "./docker/docker-compose.test.yml" up -d --build **PRODUCTION ENVIRONMENT** - Build this agent's image by issuing `docker compose build` within this folder. Do not start the container. - Copy the `dashboard-agent.json` file from the `stack-manager-input-config` folder into the `inputs/config/services` folder of the stack manager, adjusting the absolute path of the bind mount as required. -Please review the [different routes](#2-agent-route) to understand the purpose of these bind mounts. See [sample bind mounts](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#bind-mounts) for the configuration syntax. +Please review the [different routes](#2-agent-route) to understand the purpose of these bind mounts. See [sample bind mounts](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#bind-mounts) for the configuration syntax. - Start the stack manager as usual following [these instructions](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). ### 2. Agent Route @@ -95,7 +95,7 @@ If the agent ran successfully, a JSON Object would be returned as follows. ``` ### 3. Data model requirements -This agent can retrieve the measures and their time series associated with a facility from the knowledge graph. Please ensure that the measure and time series triples conform to [TWA's time series standards](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-cities-ontologies/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and the standard `OM` model as illustrated in Figure 1. +This agent can retrieve the measures and their time series associated with a facility from the knowledge graph. Please ensure that the measure and time series triples conform to [TWA's time series standards](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and the standard `OM` model as illustrated in Figure 1. *Figure 1. Standard Time Series Measure ABox* ```mermaid @@ -113,12 +113,12 @@ The legend for the prefix-namespace is available below. Prefix | Namespace --- | --- [bot](https://w3c-lbd-cg.github.io/bot/) | `https://w3id.org/bot#` -[ontoam](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoassetmanagement) | `https://www.theworldavatar.com/kg/ontoassetmanagement/` -[ontobim](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobim) | `https://www.theworldavatar.com/kg/ontobim/` -[ontodevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice) | `https://www.theworldavatar.com/kg/ontodevice/` -[ontotechsystem](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotechnicalsystem) | `https://www.theworldavatar.com/kg/ontotechnicalsystem/` -[ontotimeseries](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries) | `https://www.theworldavatar.com/kg/ontotimeseries/` -[ontoubemmp](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoubemmp) | `https://www.theworldavatar.com/kg/ontoubemmp/` +[ontoam](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoassetmanagement) | `https://www.theworldavatar.com/kg/ontoassetmanagement/` +[ontobim](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobim) | `https://www.theworldavatar.com/kg/ontobim/` +[ontodevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice) | `https://www.theworldavatar.com/kg/ontodevice/` +[ontotechsystem](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotechnicalsystem) | `https://www.theworldavatar.com/kg/ontotechnicalsystem/` +[ontotimeseries](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotimeseries) | `https://www.theworldavatar.com/kg/ontotimeseries/` +[ontoubemmp](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoubemmp) | `https://www.theworldavatar.com/kg/ontoubemmp/` [om](https://github.com/HajoRijgersberg/OM) | `http://www.ontology-of-units-of-measure.org/resource/om-2/` [omgCD](https://www.omg.org/spec/COMMONS/Designators) | `https://www.omg.org/spec/Commons/Designators/` [saref](https://saref.etsi.org/core/) | `https://saref.etsi.org/core/` diff --git a/Agents/DataBridgeAgent/README.md b/Agents/DataBridgeAgent/README.md index cc548fd4a4e..0850ecddba6 100644 --- a/Agents/DataBridgeAgent/README.md +++ b/Agents/DataBridgeAgent/README.md @@ -69,7 +69,7 @@ There are currently four routes available: - `source`: The source SPARQL endpoint containing the triples to be transferred - `target`: The target SPARQL endpoint intended to store the transferred triples - Sample SPARQL endpoints for Blazegraph are [listed here](#4-sample-blazegraph-endpoints) - - **WARNING**: The transfer of triples may fail for large (>1 million) numbers of triples as it is dependent on the available RAM. The [JPS Base library's cloning tool](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/tools/cloning/CloningTool.java) can be used in that situation. + - **WARNING**: The transfer of triples may fail for large (>1 million) numbers of triples as it is dependent on the available RAM. The [JPS Base library's cloning tool](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/tools/cloning/CloningTool.java) can be used in that situation. - A sample `POST` request using curl on a CLI: ``` curl -X POST --header 'Content-Type: application/json' -d '{ @@ -99,13 +99,13 @@ curl -X GET 'localhost:3838/data-bridge-agent/sql?srcDbName=db' ``` 4. `/timeseries` route: - - Execute the agent's task through an HTTP `POST` request using the [time series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries). This route will instantiate the time series inputs sent in the request into the stack's knowledge graph. + - Execute the agent's task through an HTTP `POST` request using the [time series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries). This route will instantiate the time series inputs sent in the request into the stack's knowledge graph. - The request will require the following parameters: - - `timeClass` : Refers to the time series classes as written in the [time series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries#instantiation-in-kg). + - `timeClass` : Refers to the time series classes as written in the [time series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries#instantiation-in-kg). - `timestamp` : A JSONArray containing the time stamp as strings in the format of `YYYY-MM-DD'T'HH:MM:SS`. - `values` : A JSONObject containing the time series values. A data IRI is inserted as the key and paired with their values as a JSONArray. For example: `{"dataIRI": [1, 2, 3]}`. - `namespace`: Specifies the SPARQL endpoint to store the instantiated time series data. See [Sample Blazegraph endpoints](#4-sample-blazegraph-endpoints) - - `database` : Specifies the database name within the same stack. Do note that this agent is not intended to instantiate data for non-stack databases. If required, please use the [Timeseries Client](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) in your agent instead. + - `database` : Specifies the database name within the same stack. Do note that this agent is not intended to instantiate data for non-stack databases. If required, please use the [Timeseries Client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) in your agent instead. - A sample `POST` request using curl on a CLI: ``` curl -X POST --header 'Content-Type: application/json' -d '{ diff --git a/Agents/DerivationAgentPythonExample/README.md b/Agents/DerivationAgentPythonExample/README.md index 30a8fc2cc3c..40d6e891781 100644 --- a/Agents/DerivationAgentPythonExample/README.md +++ b/Agents/DerivationAgentPythonExample/README.md @@ -3,7 +3,7 @@ ## Purpose This agent folder is intended as a template that one can copy and adapt to turn their own Python code into a derivation agent. The example requires [`pyderivationagent`](https://pypi.org/project/pyderivationagent/)>=1.4.1. -This document covers four stages: development, test, package & publish, and deployment. For each stage, a step-by-step instruction is provided. Before continuing with this tutorial, it is recommended to read the [documentation](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent) of `pyderivationagent` and all the relevant links. +This document covers four stages: development, test, package & publish, and deployment. For each stage, a step-by-step instruction is provided. Before continuing with this tutorial, it is recommended to read the [documentation](https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent) of `pyderivationagent` and all the relevant links. If you identified anything that can be improved to make it easier for newcomers, please feel free to open a [pull request](https://github.com/cambridge-cares/TheWorldAvatar/pulls) or get in touch with the maintainer of the package. @@ -54,7 +54,7 @@ The following command can be used to install all required packages. () $ python -m pip install -e .[dev] ``` -As `pyderivationagent` library relies on the `py4jps` package, Java 11 is required. For Windows, it is recommended to obtain OpenJDK 11 from [here](https://developers.redhat.com/products/openjdk/download) and follow the [instructions](https://access.redhat.com/documentation/en-us/openjdk/11/html-single/installing_and_using_openjdk_11_for_windows/index). For linux environment, one can install via: +As `pyderivationagent` library relies on the `py4jps` package, Java 11 is required. For Windows, it is recommended to obtain OpenJDK 11 from [here](https://developers.redhat.com/products/openjdk/download) and follow the [instructions](https://docs.redhat.com/en/documentation/red_hat_build_of_openjdk/11/html/installing_and_using_red_hat_build_of_openjdk_11_for_windows/index). For linux environment, one can install via: `(Linux)` ```sh @@ -102,7 +102,7 @@ The derivation agent modifies the knowledge graph automatically, it is therefore ### Local agent integration test This example is provided in `docker-compose-testcontainers.yml` file. Other relevant files are provided in the `tests` folder. -1. `dummy_services_secrets` folder: credential for blazegraph container used in test, and potentially auth json file for email services (for more information on this, please refer to the official documentation of [`pyderivationagent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent)) +1. `dummy_services_secrets` folder: credential for blazegraph container used in test, and potentially auth json file for email services (for more information on this, please refer to the official documentation of [`pyderivationagent`](https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent)) 2. `test_triples` folder: test triples for derivation inputs (example ABox), and example TBox where relevant concepts and relationships are defined 3. `agent.env.test` file: agent configuration parameters 4. `conftest.py` for pytest: all pytest fixtures and other utility functions @@ -224,7 +224,7 @@ To release your agent, you may want to update information in `Dockerfile` and `d   ## Deployment -Example of configurations for the agent are provided in `agent.env.example` file. The knowledge graph endpoints used by this agent are specified using `SPARQL_QUERY_ENDPOINT` and `SPARQL_UPDATE_ENDPOINT`, with the credentials specified using `KG_USERNAME` and `KG_PASSWORD`. To avoid commit these information to git at deployment, developer may make a copy of this example file as `agent.env`. As `*.env` entry already exist in `.gitignore`, this new created file will be omitted. Any credentials encoded are safe. The `OntoAgent:Service` IRI of the agent is specified using `ONTOAGENT_SERVICE_IRI`. The periodically time interval to monitor asynchronous derivation is specified by `DERIVATION_PERIODIC_TIMESCALE`. One may also provide `DERIVATION_INSTANCE_BASE_URL` to be used by DerivationClient when creating derivations related instances. `ONTOAGENT_OPERATION_HTTP_URL` can be used to specify the URL of the agent that listens the request for updating synchronous derivations. To help monitoring the agent running status, an email notification feature is also provided and can be set up via `EMAIL_RECIPIENT`, `EMAIL_SUBJECT_PREFIX`, `EMAIL_USERNAME`, `EMAIL_AUTH_JSON_PATH` and `EMAIL_START_END_ASYNC_DERIVATIONS`. More details are provided in the [documentation](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent) of the `pyderivationagent` package. Developers needs to ensure that this file is correctly updated before deploying the Docker Image. +Example of configurations for the agent are provided in `agent.env.example` file. The knowledge graph endpoints used by this agent are specified using `SPARQL_QUERY_ENDPOINT` and `SPARQL_UPDATE_ENDPOINT`, with the credentials specified using `KG_USERNAME` and `KG_PASSWORD`. To avoid commit these information to git at deployment, developer may make a copy of this example file as `agent.env`. As `*.env` entry already exist in `.gitignore`, this new created file will be omitted. Any credentials encoded are safe. The `OntoAgent:Service` IRI of the agent is specified using `ONTOAGENT_SERVICE_IRI`. The periodically time interval to monitor asynchronous derivation is specified by `DERIVATION_PERIODIC_TIMESCALE`. One may also provide `DERIVATION_INSTANCE_BASE_URL` to be used by DerivationClient when creating derivations related instances. `ONTOAGENT_OPERATION_HTTP_URL` can be used to specify the URL of the agent that listens the request for updating synchronous derivations. To help monitoring the agent running status, an email notification feature is also provided and can be set up via `EMAIL_RECIPIENT`, `EMAIL_SUBJECT_PREFIX`, `EMAIL_USERNAME`, `EMAIL_AUTH_JSON_PATH` and `EMAIL_START_END_ASYNC_DERIVATIONS`. More details are provided in the [documentation](https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent) of the `pyderivationagent` package. Developers needs to ensure that this file is correctly updated before deploying the Docker Image. Once the env file is prepared, the docker image can be deployed via: diff --git a/Agents/DistrictHeatingAgent/README.md b/Agents/DistrictHeatingAgent/README.md index 27f30af3ae3..663db27f947 100644 --- a/Agents/DistrictHeatingAgent/README.md +++ b/Agents/DistrictHeatingAgent/README.md @@ -1,6 +1,6 @@ # District Heating Agent -This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the district heating network located in a midsize town in Germany. Its purpose is to instantiate instances of the district heating network. The agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the district heating network located in a midsize town in Germany. Its purpose is to instantiate instances of the district heating network. The agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the KG and database. ## Usage diff --git a/Agents/DistrictHeatingEmissionEstimationAgent/README.md b/Agents/DistrictHeatingEmissionEstimationAgent/README.md index d2809b6c33a..5c344b295df 100644 --- a/Agents/DistrictHeatingEmissionEstimationAgent/README.md +++ b/Agents/DistrictHeatingEmissionEstimationAgent/README.md @@ -165,14 +165,14 @@ Markus Hofmeister (mh807@cam.ac.uk), August 2023 [py4jps]: https://pypi.org/project/py4jps/#description [Github container registry]: https://ghcr.io [personal access token]: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens -[Derived Information Framework]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/derivation -[Stack manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[derivation agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent - -[OntoTimeSeries]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries -[OntoHeatNetwork]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoheatnetwork -[OntoDispersion]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodispersion -[OntoDerivation]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoderivation +[Derived Information Framework]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/derivation +[Stack manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[derivation agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent + +[OntoTimeSeries]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotimeseries +[OntoHeatNetwork]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoheatnetwork +[OntoDispersion]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodispersion +[OntoDerivation]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoderivation [example triples]: ./tests/test_triples/example_abox.ttl diff --git a/Agents/DistrictHeatingOptimisationAgent/README.md b/Agents/DistrictHeatingOptimisationAgent/README.md index 06cc76651a2..c0a451b140c 100644 --- a/Agents/DistrictHeatingOptimisationAgent/README.md +++ b/Agents/DistrictHeatingOptimisationAgent/README.md @@ -205,9 +205,9 @@ Markus Hofmeister (mh807@cam.ac.uk), November 2023 [py4jps]: https://pypi.org/project/py4jps/#description [Github container registry]: https://ghcr.io [personal access token]: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens -[Derived Information Framework]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/derivation -[Stack manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[derivation agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent +[Derived Information Framework]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/derivation +[Stack manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[derivation agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent [preprint 275]: https://como.ceb.cam.ac.uk/preprints/275/ [chained derivations]: https://lucid.app/publicSegments/view/a00b553e-d9d1-4845-97b7-f480e980898e/image.png [chained derivations markup]: https://lucid.app/publicSegments/view/de4041e1-aee2-44d9-82ca-fffca25f5133/image.png diff --git a/Agents/DistrictHeatingOptimisationTriggerAgent/README.md b/Agents/DistrictHeatingOptimisationTriggerAgent/README.md index 48d5ccde2bb..6869ed066b5 100644 --- a/Agents/DistrictHeatingOptimisationTriggerAgent/README.md +++ b/Agents/DistrictHeatingOptimisationTriggerAgent/README.md @@ -13,7 +13,7 @@ The dockerised agent can be deployed as standalone version (i.e., outside a larg Before starting the agent, the `disp:hasOntoCityGMLCityObject` range instances in the [static_point_sources.ttl] file need to be populated manually with the corresponding exhaust outlets/chimneys, as there is currently no way to extract these CityObject IRIs programmatically. The agent will not start in case syntactically invalid IRIs are provided. As the entire [resources] folder is mounted into the container, no rebuilding is required after changing the triples to upload; a simple restart shall be sufficient. -The published agent image assumes the stack name to be `dhstack`. This is because this agent is an integral part of a larger stack and requires another service to be finished before it can start up. To determine when this is the case that service is curled via `dhstack-dh-instantiation`. Further details can be found [here](https://github.com/cambridge-cares/pirmasens/tree/main/districtheating_stack).
+The published agent image assumes the stack name to be `dhstack`. This is because this agent is an integral part of a larger stack and requires another service to be finished before it can start up. To determine when this is the case that service is curled via `dhstack-dh-instantiation`. Further details can be found [here](https://github.com/cambridge-cares/pirmasens/tree/main/districtheating_stack).
To deploy this agent to another stack, please adjust the stack name in the [delayed startup script] prior to re-building the image. @@ -120,7 +120,7 @@ Markus Hofmeister (mh807@cam.ac.uk), July 2023 [OntoTimeSeries (Miro board)]: https://miro.com/app/board/uXjVPFaO5As=/ [OntoHeatNet (Miro board)]: https://miro.com/app/board/uXjVOhnB9_4=/ [chained derivations]: https://lucid.app/publicSegments/view/a00b553e-d9d1-4845-97b7-f480e980898e/image.png -[specifying custom containers]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#specifying-custom-containers +[specifying custom containers]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#specifying-custom-containers [flaskapp init]: ./agent/flaskapp/__init__.py diff --git a/Agents/ESPHomeAgent/README.md b/Agents/ESPHomeAgent/README.md index ebf4882858c..d290f245a45 100644 --- a/Agents/ESPHomeAgent/README.md +++ b/Agents/ESPHomeAgent/README.md @@ -4,7 +4,7 @@ This agent is designed to query for the relevant IRIs and information from the k The agent will toggle the status of a component that is being controlled via the ESPHome web server based on the latest timeseries data value, latest status of the component and the setpoint value that it queries from the knowledge graph.(e.g. controlling the on off state of a cooling fan based on the latest measured temperature and temperature setpoint queried from the knowledge graph.) -The agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and [Access Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_ACCESS_AGENT) to interact with both the KG and database. +The agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and [Access Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AccessAgent) to interact with both the KG and database. Before explaining the usage of the agent, we will briefly summarize the ESPHome API that is contacted by one of the classes in this package to retrieve data. @@ -51,9 +51,9 @@ http://///turn_off 1) It is required to have access to a knowledge graph SPARQL endpoint and Postgres database. These can run on the same machine or need to be accessible from the host machine via a fixed URL. This can be either in form of a Docker container or natively running on a machine. It is not in the scope of this README to explain the set-up of a knowledge graph triple store or Postgres database. -2) An Access Agent instance should be set up and working properly. The necessary routing information should be stored in a triple store such as Blazegraph. Check [Uploading-routing-information](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_ACCESS_AGENT#Uploading-routing-information) for more information. +2) An Access Agent instance should be set up and working properly. The necessary routing information should be stored in a triple store such as Blazegraph. Check [Uploading-routing-information](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AccessAgent#Uploading-routing-information) for more information. -3) It is necessary to have the component and the setpoint that determines when to toggle the component instantiated in the knowledge graph based on [OntoDevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice). It is necessary to have the timeseries of the status of the component instantiated in the knowledge graph, one such agent that does this is [ESPHome Update Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/ESPHomeUpdateAgent). An example of such an instantiation is shown below: +3) It is necessary to have the component and the setpoint that determines when to toggle the component instantiated in the knowledge graph based on [OntoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice). It is necessary to have the timeseries of the status of the component instantiated in the knowledge graph, one such agent that does this is [ESPHome Update Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/ESPHomeUpdateAgent). An example of such an instantiation is shown below: ``` rdf:type ; ; @@ -149,7 +149,7 @@ If you want to spin up this agent as part of a stack, instead of `docker-compose - Replace the contents of `config/client.properties` with `config/client.properties_stack` and the contents of `config/esphome-client.properties` with `config/esphome-client.properties_stack`, inserting the name of your stack. - Build the image via `docker-compose build`. Do not start the container. - Copy the `json` file from the `stack-manager-input-config` folder into `TheWorldAvatar/Deploy/dynamic/stack-manager/inputs/config/services/`. -- Go to the stack manager folder by following this route: `TheWorldAvatar/Deploy/stacks/dynamic/stack-manager/`, check whether there is a `.json` under the sub folder `/inputs/config/` and create one if it doesn't exist. If it exists already, append the agent to the json file. (Read [Stack configuration](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#stack-configuration) for more information.) +- Go to the stack manager folder by following this route: `TheWorldAvatar/Deploy/stacks/dynamic/stack-manager/`, check whether there is a `.json` under the sub folder `/inputs/config/` and create one if it doesn't exist. If it exists already, append the agent to the json file. (Read [Stack configuration](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#stack-configuration) for more information.) - Start the stack manager as usual. This should start the container. diff --git a/Agents/ESPHomeUpdateAgent/README.md b/Agents/ESPHomeUpdateAgent/README.md index f89186636e2..2bb01f54873 100644 --- a/Agents/ESPHomeUpdateAgent/README.md +++ b/Agents/ESPHomeUpdateAgent/README.md @@ -3,7 +3,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding status of components that are controllable via ESPHome. It's only purpose is to retrieve new data (if available) from the API and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS base lib to interact with both the KG and database. Before explaining the usage of the agent, we will briefly summarize the ESPHome API that is contacted by one of the classes in this package to retrieve data. @@ -147,7 +147,7 @@ If you want to spin up this agent as part of a stack, instead of `docker-compose - Replace the contents of `config/client.properties` with `config/client.properties_stack`, inserting the name of your stack. - Build the image via `docker-compose build`. Do not start the container. - Copy the `json` file from the `stack-manager-input-config` folder into `TheWorldAvatar/Deploy/dynamic/stack-manager/inputs/config/services/`. -- Go to the stack manager folder by following this route: `TheWorldAvatar/Deploy/stacks/dynamic/stack-manager/`, check whether there is a `.json` under the sub folder `/inputs/config/` and create one if it doesn't exist. If it exists already, append the agent to the json file. (Read [Stack configuration](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#stack-configuration) for more information.) +- Go to the stack manager folder by following this route: `TheWorldAvatar/Deploy/stacks/dynamic/stack-manager/`, check whether there is a `.json` under the sub folder `/inputs/config/` and create one if it doesn't exist. If it exists already, append the agent to the json file. (Read [Stack configuration](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#stack-configuration) for more information.) - Start the stack manager as usual. This should start the container. diff --git a/Agents/EnergyPerformanceCertificateAgent/README.md b/Agents/EnergyPerformanceCertificateAgent/README.md index f88759dc3eb..dd82f03f2b1 100644 --- a/Agents/EnergyPerformanceCertificateAgent/README.md +++ b/Agents/EnergyPerformanceCertificateAgent/README.md @@ -233,7 +233,7 @@ Markus Hofmeister (mh807@cam.ac.uk), February 2023 [http://localhost:5007/epcagent]: http://localhost:5007/epcagent [Java Runtime Environment version >=11]: https://adoptopenjdk.net/?variant=openjdk8&jvmVariant=hotspot [JDBC driver]: https://jdbc.postgresql.org/download/ -[OntoBuiltEnv]: https://raw.githubusercontent.com/cambridge-cares/TheWorldAvatar/main/JPS_Ontology/ontology/ontobuiltenv/OntoBuiltEnv.owl +[OntoBuiltEnv]: https://raw.githubusercontent.com/TheWorldAvatar/ontology/refs/heads/main/ontology/ontobuiltenv/OntoBuiltEnv.owl [personal access token]: https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token [py4jps]: https://pypi.org/project/py4jps/#description [Upload SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/to-existing-droplet/ @@ -241,13 +241,13 @@ Markus Hofmeister (mh807@cam.ac.uk), February 2023 [Building Matching Readme]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/BuildingMatchingAgent/README.md -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[credentials]: https://github.com/cambridge-cares/TheWorldAvatar/tree/1376-dev-building-matching-agent/Agents/BuildingMatchingAgent/credentials -[Digital Twin Visualisation Framework]:https://github.com/cambridge-cares/TheWorldAvatar/tree/main/web/digital-twin-vis-framework -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md -[Stack Data Uploader]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-data-uploader/README.md -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[credentials]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BuildingMatchingAgent/credentials +[Digital Twin Visualisation Framework]:https://github.com/cambridge-cares/TheWorldAvatar/tree/main/web/twa-vis-framework +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md +[Stack Data Uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader/README.md +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar [CMCL Docker registry]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry diff --git a/Agents/FHSashAndOccupancyAgent/README.MD b/Agents/FHSashAndOccupancyAgent/README.MD index 3d89bef4cdc..9479346c0f0 100644 --- a/Agents/FHSashAndOccupancyAgent/README.MD +++ b/Agents/FHSashAndOccupancyAgent/README.MD @@ -1,6 +1,6 @@ # Fumehood Sash And Occupancy Agent -This agent is designed to query for the sash opening percentage and occupied state of fumehoods and walkin-fumehoods, if any of the fumehoods and walkin-fumehoods is unoccupied and has a sash opening percentage higher than a certain threshold, an email will be sent to the relevant personnel via the [EmailSender class in the JPS Base Lib](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/email/EmailSender.java) and the [Email Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/EmailAgent). The agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and [remote store client](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/query/RemoteStoreClient.java) from the JPS_BASE_LIB to interact with both the KG and database. +This agent is designed to query for the sash opening percentage and occupied state of fumehoods and walkin-fumehoods, if any of the fumehoods and walkin-fumehoods is unoccupied and has a sash opening percentage higher than a certain threshold, an email will be sent to the relevant personnel via the [EmailSender class in the JPS Base Lib](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/email/EmailSender.java) and the [Email Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/EmailAgent). The agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and [remote store client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/query/RemoteStoreClient.java) from the JPS_BASE_LIB to interact with both the KG and database. ## Usage This part of the README describes the usage of the agent. The module itself can be packaged into an executable war, deployed as a web servlet on tomcat. Sending the appropriate request to the correct URL will initiate the agent. Since it uses the time-series client and remote store client to interact with the KG and database, the KG and database will be required to be set-up beforehand. @@ -12,11 +12,11 @@ The [next section](#requirements) will explain the requirements to run the agent 2) It is required to have the [Email Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/EmailAgent) set up beforehand. -3) It is required to have the fumehoods and walkin-fumehoods devices already instantiated in the knowledge graph based on [ontoBMS](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobms) and [ontoDevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice). +3) It is required to have the fumehoods and walkin-fumehoods devices already instantiated in the knowledge graph based on [ontoBMS](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobms) and [ontoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice). 4) In order for the agent to work properly, it is also necessary for the fumehoods and walkin-fumehoods instances to have occupied states and sash opening percentages. -5) The occupied states and sash opening percentages should be instantiated as timeseries in the knowledge graph via the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries). +5) The occupied states and sash opening percentages should be instantiated as timeseries in the knowledge graph via the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries). An example of the instance can be found below: ``` diff --git a/Agents/FenlandTrajectoryAgent/README.md b/Agents/FenlandTrajectoryAgent/README.md index 41b2d2efa8b..ba1b8806826 100644 --- a/Agents/FenlandTrajectoryAgent/README.md +++ b/Agents/FenlandTrajectoryAgent/README.md @@ -110,16 +110,16 @@ Jiying Chen (jc2341@cam.ac.uk), May 2024 [OntoDevice]: https://www.theworldavatar.com/kg/ontodevice [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar -[TimeSeriesClient]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[Stack Manager README]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md -[JPS_Document]: https://github.com/cambridge-cares/TheWorldAvatar/blob/dev-pydantic-rdflib/JPS_BASE_LIB/python_wrapper/docs/examples/additional_java_lib.md +[TimeSeriesClient]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[Stack Manager README]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md +[JPS_Document]: https://github.com/TheWorldAvatar/baselib/tree/main/python_wrapper/docs/examples/additional_java_lib.md [twa]: https://pypi.org/project/twa/ [virtual environment]: https://docs.python.org/3/tutorial/venv.html -[Python wrapper]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_wrapper#installing-additional-java-resources +[Python wrapper]: https://github.com/TheWorldAvatar/baselib/tree/main/python_wrapper#installing-additional-java-resources [allows you to publish and install packages]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry#authenticating-to-github-packages [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts [Create SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/create-with-openssh/ [DataPoint]: https://www.metoffice.gov.uk/services/data/datapoint/about [Github container registry]: https://ghcr.io @@ -127,12 +127,12 @@ Jiying Chen (jc2341@cam.ac.uk), May 2024 [http://localhost:5000/]: http://localhost:5000/ [Java Development Kit version >=11]: https://adoptium.net/en-GB/temurin/releases/?version=11 [JDBC driver]: https://jdbc.postgresql.org/download/ -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [personal access token]: https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token [py4jps]: https://pypi.org/project/py4jps/#description -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-MetOfficeAgent-withinStack/Deploy/stacks/dynamic/stack-clients +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar [Upload SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/to-existing-droplet/ [virtual environment]: https://docs.python.org/3/tutorial/venv.html @@ -148,8 +148,8 @@ Jiying Chen (jc2341@cam.ac.uk), May 2024 [resources]: ./resources [README]: ./README.md [FenlandTrajectoryAgent.json]: ./stack-manager-input-config-service/ -[stack manager configuration service directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services -[stack manager configuration directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/ +[stack manager configuration service directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services +[stack manager configuration directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/ [CURL commands folder]: ./example-requests/curl [SendHTTP]: ./example-requests/SendHTTP [preprocess]: ./example-requests/SendHTTP/gps_preprocess.http diff --git a/Agents/FilterAgent/README.md b/Agents/FilterAgent/README.md index 90c5be746aa..02de752515f 100644 --- a/Agents/FilterAgent/README.md +++ b/Agents/FilterAgent/README.md @@ -20,7 +20,7 @@ A stack-manager config similar to the following will deploy this agent: } ``` -Spin up a stack as per the [documented workflow](../../Deploy/stacks/dynamic/stack-manager/README.md). +Spin up a stack as per the [documented workflow](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md). The filter agent should be spun up along with the entire stack. There is no need to build or deploy the filter agent and its image separately. diff --git a/Agents/FloodAssessmentAgent/README.md b/Agents/FloodAssessmentAgent/README.md index 02bc7ba03a2..4ba798b8115 100644 --- a/Agents/FloodAssessmentAgent/README.md +++ b/Agents/FloodAssessmentAgent/README.md @@ -217,18 +217,18 @@ Markus Hofmeister (mh807@cam.ac.uk), November 2022 [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Derivation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent -[Derivation Agent configuration]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/python_derivation_agent/pyderivationagent/conf/agent_conf.py +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Derivation Agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent +[Derivation Agent configuration]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent/pyderivationagent/conf/agent_conf.py [EPC Instantiation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/EnergyPerformanceCertificateAgent [Flood Warning Instantiation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FloodWarningAgent -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB -[OntoBuiltEnv]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobuiltenv -[OntoFlood]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoflood +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main +[OntoBuiltEnv]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobuiltenv +[OntoFlood]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoflood [Property Value Estimation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/PropertyValueEstimationAgent -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md#spinning-up-a-stack -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md#spinning-up-a-stack +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [The World Avatar]: https://github.com/cambridge-cares/TheWorldAvatar diff --git a/Agents/FloodWarningAgent/README.md b/Agents/FloodWarningAgent/README.md index 865a1b5ec90..3b51e992f0b 100644 --- a/Agents/FloodWarningAgent/README.md +++ b/Agents/FloodWarningAgent/README.md @@ -121,15 +121,15 @@ Markus Hofmeister (mh807@cam.ac.uk), February 2023 [allows you to publish and install packages]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry#authenticating-to-github-packages [Github package repository]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Packages -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [personal access token]: https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token [py4jps]: https://pypi.org/project/py4jps/#description [Environment Agency Real Time flood-monitoring API]: https://environment.data.gov.uk/flood-monitoring/doc/reference#flood-warnings [FloodAssessmentAgent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FloodAssessmentAgent -[OntoFlood]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoflood -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager +[OntoFlood]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoflood +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar diff --git a/Agents/ForecastingAgent/README.md b/Agents/ForecastingAgent/README.md index 1bb6a0de318..a4a4ad3bd3a 100644 --- a/Agents/ForecastingAgent/README.md +++ b/Agents/ForecastingAgent/README.md @@ -300,18 +300,18 @@ Magnus Mueller (mm2692@cam.ac.uk), November 2022 [allows you to publish and install packages]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry#authenticating-to-github-packages [py4jps]: https://pypi.org/project/py4jps/#description -[TimeSeriesClient]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries +[TimeSeriesClient]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries [Darts]: https://unit8co.github.io/darts/index.html [Prophet]: https://unit8co.github.io/darts/generated_api/darts.models.forecasting.prophet_model.html [Facebook Prophet]: https://github.com/facebook/prophet [Github container registry]: https://ghcr.io [personal access token]: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens -[Derived Information Framework]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/derivation -[Stack manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[derivation agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent +[Derived Information Framework]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/derivation +[Stack manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[derivation agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent -[OntoTimeSeries]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries -[OntoDerivation]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoderivation +[OntoTimeSeries]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotimeseries +[OntoDerivation]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoderivation [HTTP forecast error request]: ./resources/HTTP_evaluate_errors.http diff --git a/Agents/GFAAgent/README.md b/Agents/GFAAgent/README.md index 32198884694..d98d2ed81d6 100644 --- a/Agents/GFAAgent/README.md +++ b/Agents/GFAAgent/README.md @@ -5,7 +5,7 @@ This agent has been developed to compute the Gross Floor Area (GFA) and GFA cost 2) Calculate the GFA cost of buildings by GFA and standard unit price of GFA cost. ### 1.1 Requirements -1) The agent requires 3D building models based on the CityGML standard. These models must be uploaded through the [stack-data-uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader#citydb-data). +1) The agent requires 3D building models based on the CityGML standard. These models must be uploaded through the [stack-data-uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader#citydb-data). 2) The agent requires number of floors data. Therefore, the Building Floor Agent should be run firstly. 3) GFA cost standard unit price: The latest standard GFA cost should be integrated in csv file named as [ais_cost.csv](https://github.com/cambridge-cares/TheWorldAvatar/blob/4a5b2b6eaf60be88f95e1561da24b043943fec83/Agents/GFAAgent/stack-data-uploader-input-config/ais_cost.csv), which need to upload to postgresql by stack-data-uploader first. 4) Building usage matching: The mathcing of building cost standard and OntobBuiltEnv, which stores in the [cost_ontobuiltenv.csv](https://github.com/cambridge-cares/TheWorldAvatar/blob/4a5b2b6eaf60be88f95e1561da24b043943fec83/Agents/GFAAgent/src/main/resources/cost_ontobuiltenv.csv). diff --git a/Agents/HMLandRegistryAgent/README.md b/Agents/HMLandRegistryAgent/README.md index 505e32e8131..02a4d6efa3e 100644 --- a/Agents/HMLandRegistryAgent/README.md +++ b/Agents/HMLandRegistryAgent/README.md @@ -171,10 +171,10 @@ Markus Hofmeister (mh807@cam.ac.uk), March 2023 [HM Land Registry SPARQL endpoint]: http://landregistry.data.gov.uk/landregistry/query -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar [EPC Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/EnergyPerformanceCertificateAgent [CMCL Docker Registry]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry diff --git a/Agents/HistoricalAQMeshAgent/README.md b/Agents/HistoricalAQMeshAgent/README.md index 79f88ad744b..1ea5495e597 100644 --- a/Agents/HistoricalAQMeshAgent/README.md +++ b/Agents/HistoricalAQMeshAgent/README.md @@ -3,7 +3,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the AQMesh air quality measuring station. It's only purpose is to retrieve new/historical data from excel files and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS base lib to interact with both the KG and database. ### Data retrieval and pre-processing diff --git a/Agents/HistoricalNTUEnergyAgent/README.md b/Agents/HistoricalNTUEnergyAgent/README.md index 005a5350ad4..82eb8fc78e1 100644 --- a/Agents/HistoricalNTUEnergyAgent/README.md +++ b/Agents/HistoricalNTUEnergyAgent/README.md @@ -187,10 +187,10 @@ config/ |_ ... |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-custom-containers). ##### Spin Up Stack -Follow the [steps](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) to spin up the stack. +Follow the [steps](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) to spin up the stack. # 3. Run diff --git a/Agents/HistoricalNUSDavisAgent/README.md b/Agents/HistoricalNUSDavisAgent/README.md index 4fe7a77fe9b..e611dd64f7a 100644 --- a/Agents/HistoricalNUSDavisAgent/README.md +++ b/Agents/HistoricalNUSDavisAgent/README.md @@ -2,7 +2,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the NUS Davis weather station. It's only purpose is to retrieve new/historical data from excel files and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. -The agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +The agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the KG and database. ### Data retrieval and pre-processing diff --git a/Agents/HistoricalPirmasensStationAgent/README.md b/Agents/HistoricalPirmasensStationAgent/README.md index 234baac7a3c..f2b2dd794bd 100644 --- a/Agents/HistoricalPirmasensStationAgent/README.md +++ b/Agents/HistoricalPirmasensStationAgent/README.md @@ -2,7 +2,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the Pirmasens weather station. It's only purpose is to retrieve new/historical data from csv files and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. -The agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +The agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the KG and database. ### Data retrieval and pre-processing diff --git a/Agents/Ifc2OntoBIMAgent/README.md b/Agents/Ifc2OntoBIMAgent/README.md index 276d4ccdb51..8a89c3e2506 100644 --- a/Agents/Ifc2OntoBIMAgent/README.md +++ b/Agents/Ifc2OntoBIMAgent/README.md @@ -123,7 +123,7 @@ docker-compose up -d If you want to spin up both this agent and the IfcOwlConverterAgent as part of a stack, do the following: - Copy the contents of `config/client.properties_stack` into `config/client.properties`, inserting the name of your stack and the desired namespaces. - Build both images by issuing `docker compose build` in this folder. There is no need to build the IfcOwlConverterAgent separately. Do not start the containers. -- Copy the `json` file from the `stack-manager-input-config` folder of both agents into the `inputs/config/services` folder of the stack manager, adjusting the absolute path of the bind mounts as required. The `data` bind mount for both agents **MUST** be the same, ideally in the `.../stack-manager/inputs/data` directory. See [sample bind mounts](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#bind-mounts) for the configuration syntax. +- Copy the `json` file from the `stack-manager-input-config` folder of both agents into the `inputs/config/services` folder of the stack manager, adjusting the absolute path of the bind mounts as required. The `data` bind mount for both agents **MUST** be the same, ideally in the `.../stack-manager/inputs/data` directory. See [sample bind mounts](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#bind-mounts) for the configuration syntax. - Start the stack manager as usual. This should start both containers. #### 2.3 Running the Agent diff --git a/Agents/Ifc2TilesetAgent/README.md b/Agents/Ifc2TilesetAgent/README.md index f3f519efe70..c9479591e34 100644 --- a/Agents/Ifc2TilesetAgent/README.md +++ b/Agents/Ifc2TilesetAgent/README.md @@ -46,12 +46,12 @@ The agent is designed for deployment on [Docker](#12-docker-deployment). Althoug These dependencies have been added to the Dockerfile. But in the event there is a need to update their links, please read the steps below on how to find and extract the dependencies. 1. **IfcOpenShell** - Required to load and parse IFC files - - Download required version from https://blenderbim.org/docs-python/ifcopenshell-python/installation.html + - Download required version from https://docs.ifcopenshell.org/ifcopenshell-python/installation.html - Extract and place the `ifcopenshell` from `blenderbim/libs/site/packages/` to the `\Lib\site-packages` of either your temporary python environment `venv` or the python program - Delete the remaining extracted content 2. **IfcConvert.exe** - Required to convert IFC to glb format - - Download IfcConvert.exe from: https://blenderbim.org/docs-python/ifcconvert/installation.html + - Download IfcConvert.exe from: https://docs.ifcopenshell.org/ifcconvert/installation.html - Extract it to `` ### 1.2 Docker Deployment: diff --git a/Agents/IfcOwlConverterAgent/README.md b/Agents/IfcOwlConverterAgent/README.md index 0fda1fb3093..8aaf038a3c6 100644 --- a/Agents/IfcOwlConverterAgent/README.md +++ b/Agents/IfcOwlConverterAgent/README.md @@ -68,7 +68,7 @@ docker-compose up -d If you want to spin up this agent as part of a stack, do the following: - Build the image via `docker compose build`. Do not start the container. -- Copy the `json` file from the `stack-manager-input-config` folder into the `inputs/config/services` folder of the stack manager, adjusting the absolute path of the bind mount as required. If you are using this agent with the Ifc2OntoBIM agent, the `data` bind mount for both agents **MUST** be the same, ideally in the `.../stack-manager/inputs/data` directory. See [sample bind mounts](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#bind-mounts) for the configuration syntax. +- Copy the `json` file from the `stack-manager-input-config` folder into the `inputs/config/services` folder of the stack manager, adjusting the absolute path of the bind mount as required. If you are using this agent with the Ifc2OntoBIM agent, the `data` bind mount for both agents **MUST** be the same, ideally in the `.../stack-manager/inputs/data` directory. See [sample bind mounts](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#bind-mounts) for the configuration syntax. - Start the stack manager as usual. This should start the container. #### 2.3 Running the Agent diff --git a/Agents/InequalityIndexCalculationAgent/README.md b/Agents/InequalityIndexCalculationAgent/README.md index f889fa8bc5f..293f9d9e98d 100644 --- a/Agents/InequalityIndexCalculationAgent/README.md +++ b/Agents/InequalityIndexCalculationAgent/README.md @@ -144,7 +144,7 @@ Once the Agent is deployed, it periodically (defined by `DERIVATION_PERIODIC_TIM Details about how to use the agent please see the [home page] of this agent ## Upper level instances instatiation -If you started from an empty namespace, or have not instantiate upper level instances such as `country` or `assumption`, the result would not be able to be associated with them. Therefore it is required to run the [upper_level_ontology_update.py](./inequalityindexcalculationagent/upper_level_ontology_update.py), simply run this command in the powershell terminal: +If you started from an empty namespace, or have not instantiate upper level instances such as `country` or `assumption`, the result would not be able to be associated with them. Therefore it is required to run the [upper_level_ontology_update.py](./inequalityindexagent/upper_level_ontology_update.py), simply run this command in the powershell terminal: ```bash py ./inequalityindexcalculationagent/upper_level_ontology_update.py @@ -168,7 +168,7 @@ Details can be refered to the individual [README.md](./python_scripts/README.md) # Authors Jieyang Xu (jx309@cam.ac.uk), May 2023 -[markup.py]:./inequalityindexcalculationagent/markup.py +[markup.py]:./inequalityindexagent/markup.py [home page]:https://htmlpreview.github.io/?https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/InequalityIndexCalculationAgent/index.html [CopCalculationAgent]:https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/CopCalculationAgent [OntoCAPE]:http://theworldavatar.com/ontology/ontocape/ @@ -187,16 +187,16 @@ Jieyang Xu (jx309@cam.ac.uk), May 2023 [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Derivation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent -[Derivation Agent configuration]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/python_derivation_agent/pyderivationagent/conf/agent_conf.py +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Derivation Agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent +[Derivation Agent configuration]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent/pyderivationagent/conf/agent_conf.py [EPC Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-EPCInstantiationAgent/Agents/EnergyPerformanceCertificateAgent -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [OntoRegionalAnalysis]: http://www.theworldavatar.com/ontology/ontoregionalanlysis/OntoRegionalAnalysis.owl [HM Land Registry Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-PropertySalesInstantiationAgent/Agents/HMLandRegistryAgent -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md#spinning-up-a-stack -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md#spinning-up-a-stack +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [The World Avatar]: https://github.com/cambridge-cares/TheWorldAvatar [Average Square Metre Price Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AverageSquareMetrePriceAgent diff --git a/Agents/IsochroneAgent/README.md b/Agents/IsochroneAgent/README.md index 98d6db4ede0..2575b3b421f 100644 --- a/Agents/IsochroneAgent/README.md +++ b/Agents/IsochroneAgent/README.md @@ -12,7 +12,7 @@ The agent has been implemented to work in the stack. Follow the instructions in ### 2.2. Uploading OSM Data via stack-data-uploader 1) Download desired bounding box from [BBBike.org](https://extract.bbbike.org/) or [GeoFabrik](https://download.geofabrik.de/) in `.pbf` format. -2) `.pbf` uploaded via [stack-data-uploader] in [osm2pgrouting](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader#osm-data) data type. +2) `.pbf` uploaded via [stack-data-uploader] in [osm2pgrouting](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader#osm-data) data type. ### 2.3. Uploading population data via stack-data-uploader 1) Download High Resolution Population Density Maps from [HDX - Meta Data For Good](https://data.humdata.org/dataset/germany-high-resolution-population-density-maps-demographic-estimates?). @@ -45,7 +45,7 @@ To use the example created in [15MSC in Pirmasens](inputs/15MSC/POIqueries/), re [EdgeTable](https://docs.pgrouting.org/2.5/en/pgRouting-concepts.html#description-of-the-edges-sql-query-for-dijkstra-like-functions) describes the characteristic of the road networks. It is used to define the transport mode and road conditions during the calculation of isochrone. EdgeTableSQL follows the following format `TransportMode_RoadConditions.sql`. -1) `TransportMode` and `RoadConditions` refers to the ontology classes developed in [OntoIsochrone](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontoisochrone/OntoIsochrone.owl). +1) `TransportMode` and `RoadConditions` refers to the ontology classes developed in [OntoIsochrone](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoisochrone/OntoIsochrone.owl). 2) The SQL statement content refers to the cost table used for routing calculations. EdgeTableSQL are created for [15MSC in Pirmasens](inputs/15MSC/edgesSQLTable/) and [UR in King's Lynn](inputs/UR/edgesSQLTable/) use cases. @@ -111,13 +111,13 @@ The debugger port will be available at 5005. ### 7.1 Feature Info Agent The isochrones is designed to be compatible with TWA-VF and queryable via FeatureInfoAgent. -1) In the directory [stack-manager-config/data/webspace/](stack-manager-config/data/webspace/), contains the TWA-VF `data.json` prepared for the different scnearios that is meant to be placed inside [`stack-manager/inputs/data/webspace`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/data), following instruction [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#example---including-a-visualisation). -2) In the directory [stack-manager-config/data/fia-queries/](stack-manager-config/data/fia-queries/), contains `SPARQL queries` and `fia-config.json` to be used with the agent [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent#configuration). Place the `fia-config.json` and `isochrone.sparql` inside `stack-manager/inputs/data/queries` as according the bind mount path specified in the stack-manager config's [`feature-info-agent.json`](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/FeatureInfoAgent/sample/feature-info-agent.json). +1) In the directory [stack-manager-config/data/webspace/](stack-manager-config/data/webspace/), contains the TWA-VF `data.json` prepared for the different scnearios that is meant to be placed inside [`stack-manager/inputs/data/webspace`](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/data), following instruction [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#example---including-a-visualisation). +2) In the directory [stack-manager-config/data/fia-queries/](stack-manager-config/data/fia-queries/), contains `SPARQL queries` and `fia-config.json` to be used with the agent [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent#configuration). Place the `fia-config.json` and `isochrone.sparql` inside `stack-manager/inputs/data/queries` as according the bind mount path specified in the stack-manager config's [`feature-info-agent.json`](https://github.com/TheWorldAvatar/Feature-Info-Agent/blob/main/sample/feature-info-agent.json). -[stack-data-uploader]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services +[stack-data-uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services [inputs]: stack-data-uploader-inputs/ diff --git a/Agents/LSOAInputAgent/README.md b/Agents/LSOAInputAgent/README.md index 67c7ff48c8a..00d3bae2bf7 100644 --- a/Agents/LSOAInputAgent/README.md +++ b/Agents/LSOAInputAgent/README.md @@ -154,7 +154,7 @@ Jieyang Xu (jx309@cam.ac.uk), Feroz Farazi (msff2@cam.ac.uk) Dec 2022 [allows you to publish and install packages]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry#authenticating-to-github-packages [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts [Create SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/create-with-openssh/ [DataPoint]: https://www.metoffice.gov.uk/services/data/datapoint/about [Github container registry]: https://ghcr.io @@ -162,12 +162,12 @@ Jieyang Xu (jx309@cam.ac.uk), Feroz Farazi (msff2@cam.ac.uk) Dec 2022 [http://localhost:5000/]: http://localhost:5000/ [Java Development Kit version >=11]: https://adoptium.net/en-GB/temurin/releases/?version=11 [JDBC driver]: https://jdbc.postgresql.org/download/ -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [personal access token]: https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token [py4jps]: https://pypi.org/project/py4jps/#description -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-MetOfficeAgent-withinStack/Deploy/stacks/dynamic/stack-clients +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar [Upload SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/to-existing-droplet/ [virtual environment]: https://docs.python.org/3/tutorial/venv.html diff --git a/Agents/MackayCalculatorAgent/readme.md b/Agents/MackayCalculatorAgent/readme.md index 87de7fdd1eb..7e44d19785a 100644 --- a/Agents/MackayCalculatorAgent/readme.md +++ b/Agents/MackayCalculatorAgent/readme.md @@ -102,5 +102,5 @@ For example, to start it on your local machine and on port 5000: flask run --host=0.0.0.0 --port=5000 ``` [MacKay Data Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/MackayDataAgent -[Derived Information Framework]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/derivation -[OntoTimeSeries]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries \ No newline at end of file +[Derived Information Framework]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/derivation +[OntoTimeSeries]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotimeseries \ No newline at end of file diff --git a/Agents/MackayDataAgent/README.md b/Agents/MackayDataAgent/README.md index 34d9ffc978c..60ff0b5d78d 100644 --- a/Agents/MackayDataAgent/README.md +++ b/Agents/MackayDataAgent/README.md @@ -119,13 +119,13 @@ We map three Mackay inputs data to quantity instances in TWA KG. A proposed vers -[TimeSeriesClient]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries +[TimeSeriesClient]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries [Github container registry]: https://ghcr.io [personal access token]: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens -[Derived Information Framework]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/derivation -[derivation agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent -[OntoTimeSeries]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries -[OntoDerivation]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoderivation +[Derived Information Framework]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/derivation +[derivation agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent +[OntoTimeSeries]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotimeseries +[OntoDerivation]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoderivation [API Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/APIAgent [Forecasting Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/ForecastingAgent diff --git a/Agents/MetOfficeAgent/README.md b/Agents/MetOfficeAgent/README.md index 71b0f68815b..cc31baaaff8 100755 --- a/Agents/MetOfficeAgent/README.md +++ b/Agents/MetOfficeAgent/README.md @@ -209,7 +209,7 @@ Markus Hofmeister (mh807@cam.ac.uk), January 2022 [allows you to publish and install packages]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry#authenticating-to-github-packages [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts [Create SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/create-with-openssh/ [DataPoint]: https://www.metoffice.gov.uk/services/data/datapoint/about [Github container registry]: https://ghcr.io @@ -217,18 +217,18 @@ Markus Hofmeister (mh807@cam.ac.uk), January 2022 [http://localhost:5000/]: http://localhost:5000/ [Java Development Kit version >=11]: https://adoptium.net/en-GB/temurin/releases/?version=11 [JDBC driver]: https://jdbc.postgresql.org/download/ -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB -[OntoEMS]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoems +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main +[OntoEMS]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoems [personal access token]: https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token [py4jps]: https://pypi.org/project/py4jps/#description -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-MetOfficeAgent-withinStack/Deploy/stacks/dynamic/stack-clients +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar [Upload SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/to-existing-droplet/ [virtual environment]: https://docs.python.org/3/tutorial/venv.html [VSCode via SSH]: https://code.visualstudio.com/docs/remote/ssh -[StackClients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[StackClients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [CMCL Docker registry]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry diff --git a/Agents/NTUDataAgent/README.md b/Agents/NTUDataAgent/README.md index 0531485a33c..141614f90d9 100644 --- a/Agents/NTUDataAgent/README.md +++ b/Agents/NTUDataAgent/README.md @@ -64,7 +64,7 @@ config/ |_ ... |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-custom-containers). ### 4. Spin up a Docker Stack diff --git a/Agents/NTUEnergyClusterAgent/README.md b/Agents/NTUEnergyClusterAgent/README.md index 1a0841c25e6..8531f4ff88e 100644 --- a/Agents/NTUEnergyClusterAgent/README.md +++ b/Agents/NTUEnergyClusterAgent/README.md @@ -59,7 +59,7 @@ config/ |_ ... |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-custom-containers). ### [Step 3] Spin up a Docker Stack diff --git a/Agents/NTUForecastingAgent/README.md b/Agents/NTUForecastingAgent/README.md index 3fdc9fd31f0..5eac55dc9a3 100644 --- a/Agents/NTUForecastingAgent/README.md +++ b/Agents/NTUForecastingAgent/README.md @@ -63,7 +63,7 @@ The default namespace of the forecasting agent in ```forecasting-agent.json``` i #### Spinning up -Now, follow the [steps](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) to spin up the stack. The stack manager should bring up 12 containers in total. Activate the [Historical NTUEnergy Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HistoricalNTUEnergyAgent) by sending the specified curl request for the agent to instantiate the knowledge graph of the NTU power network. +Now, follow the [steps](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) to spin up the stack. The stack manager should bring up 12 containers in total. Activate the [Historical NTUEnergy Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HistoricalNTUEnergyAgent) by sending the specified curl request for the agent to instantiate the knowledge graph of the NTU power network. # 2. Usage of the agent diff --git a/Agents/NTUP2PEnergyAgent/README.md b/Agents/NTUP2PEnergyAgent/README.md index dee68ad86d3..5199d3e923f 100644 --- a/Agents/NTUP2PEnergyAgent/README.md +++ b/Agents/NTUP2PEnergyAgent/README.md @@ -53,11 +53,11 @@ config/ |_ ... |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-custom-containers). ### [Step 3] Spin up a Docker Stack -**Note: The docker container must run within the same stack as the [HistoricalNTUEnergyAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/1496-dev-instantiate-historic-ntuenergyconsumptiondata-2/Agents/HistoricalNTUEnergyAgent) to get access and query the NTU Power Network Knowledge Graph for calculation.** +**Note: The docker container must run within the same stack as the [HistoricalNTUEnergyAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HistoricalNTUEnergyAgent) to get access and query the NTU Power Network Knowledge Graph for calculation.** Running this agent in a docker stack can facilitate interactions between other agents and endpoints (Postgres, Blazegraph, etc,.) for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). A successful setup will result in 10 containers (optional 11): diff --git a/Agents/NTUPVLibAgent/README.md b/Agents/NTUPVLibAgent/README.md index a6309a4d6d6..8536e2c5c38 100644 --- a/Agents/NTUPVLibAgent/README.md +++ b/Agents/NTUPVLibAgent/README.md @@ -2,7 +2,7 @@ The NTUPVLib Agent is a modified version of the [PVLibAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/PVLibAgent) developed to work with the PV data available for the NTU use case. The agent assumes a standard PV for the purpose of running PVLib and then scales the output by the PV area for the building. An additional scale factor is included to scale the data to a magnitude appropriate for the 15-bus NTU test system. -This agent is designed to calculate AC and DC Power output from Photovaltaic Panels based on values provided in the properties files or values queried from the knowledge graph. It will then initialise the AC and DC Power as timeseries in the knowledge graph. The agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the knowledge graph and database and uses [PvLib](https://pvlib-python.readthedocs.io/en/stable/) for it's AC and DC Power calculations. +This agent is designed to calculate AC and DC Power output from Photovaltaic Panels based on values provided in the properties files or values queried from the knowledge graph. It will then initialise the AC and DC Power as timeseries in the knowledge graph. The agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the knowledge graph and database and uses [PvLib](https://pvlib-python.readthedocs.io/en/stable/) for it's AC and DC Power calculations. ## 1. Property files @@ -155,10 +155,10 @@ config/ |_ ... |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-custom-containers). ##### Spin Up Stack -Follow the [steps](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) to spin up the stack. +Follow the [steps](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) to spin up the stack. ##### Run the agent diff --git a/Agents/NUSDavisWeatherStationAgent/README.md b/Agents/NUSDavisWeatherStationAgent/README.md index 9298c44b6a3..092e0080094 100644 --- a/Agents/NUSDavisWeatherStationAgent/README.md +++ b/Agents/NUSDavisWeatherStationAgent/README.md @@ -2,7 +2,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the NUS Davis weather station. Its only purpose is to retrieve new data (if available) from the API and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the KG and database. Before explaining the usage of the agent, we will briefly summarize the weather station API that is diff --git a/Agents/NetworkAnalysisAgent/README.md b/Agents/NetworkAnalysisAgent/README.md index 8e183b5537d..ddcd9453521 100644 --- a/Agents/NetworkAnalysisAgent/README.md +++ b/Agents/NetworkAnalysisAgent/README.md @@ -70,10 +70,10 @@ Spin up with `./stack.sh start ` in the [stack-manager]'s main folde The debugger port will be available at 5005. ## 7. TWA-VF Visualisation -1) In the directory [stack-manager-config/data/webspace/](stack-manager-config/data/webspace/), contains the TWA-VF `data.json` prepared for the different scnearios that is meant to be placed inside [`stack-manager/inputs/data/webspace`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/data), following instruction [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#example---including-a-visualisation). +1) In the directory [stack-manager-config/data/webspace/](stack-manager-config/data/webspace/), contains the TWA-VF `data.json` prepared for the different scnearios that is meant to be placed inside [`stack-manager/inputs/data/webspace`](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/data), following instruction [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#example---including-a-visualisation). -[stack-data-uploader]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services +[stack-data-uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services [inputs]: stack-data-uploader-inputs/ diff --git a/Agents/OPFAgent/README.md b/Agents/OPFAgent/README.md index 4304a276cd9..9cc84de8047 100644 --- a/Agents/OPFAgent/README.md +++ b/Agents/OPFAgent/README.md @@ -4,7 +4,7 @@ The purpose of OPFAgent is to handle HTTP requests to perform Optimal Power Flow (OPF) analysis on a power network instantiated in the knowledge graph. Information about the power network will be retrieved from the triple store and time series data will be extracted from a relational database. After running the simulation, OPF results will be stored back into the relational database as time series data. ## Requirements -- In order to run OPFAgent, a local version (or if you are running in a stack, a stack version) of (TripleStore)AccessAgent needs to be deployed. Refer to [AccessAgent README](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_ACCESS_AGENT/README.md) for Access Agent setup. If running in a stack, create a new namespace in your stack blazegraph called 'storerouter' to store the routing information. Please note that routing information of the target blazegraph should be uploaded accordingly before calling OPFAgent. +- In order to run OPFAgent, a local version (or if you are running in a stack, a stack version) of (TripleStore)AccessAgent needs to be deployed. Refer to [AccessAgent README](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AccessAgent/README.md) for Access Agent setup. If running in a stack, create a new namespace in your stack blazegraph called 'storerouter' to store the routing information. Please note that routing information of the target blazegraph should be uploaded accordingly before calling OPFAgent. - As OPFAgent interacts with time series data stored in a relational database, URL, username and password of the database are required. Refer to the [time series client properties](#time-series-client-properties) section below for more details. diff --git a/Agents/OSMAgent/README.md b/Agents/OSMAgent/README.md index 153052f258d..0a000e4a7bc 100644 --- a/Agents/OSMAgent/README.md +++ b/Agents/OSMAgent/README.md @@ -2,7 +2,7 @@ ## 1. Description The OSMAgent is an agent that works with OpenStreetMap (OSM) data to link them to existing building IRI and instantiate the semantic representation of building usage information from OSM data. The workflow of the agent can be broadly outlined in the following steps: -1) Categorize OSM tags according to [OntoBuiltEnvironment](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobuiltenv) concept. +1) Categorize OSM tags according to [OntoBuiltEnvironment](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobuiltenv) concept. 2) Identify and match OSM data with the 3D buildings uploaded as CityGML data and LoD0 footprint. This is performed by assigning building IRI to OSM data through matching the geometry of the OSM data to the 3D buildings' footprint. 3) Calculate building usage share for all OSM data with tagged building IRI and non-null usage information. 4) If land use data is available, for 3D buildings without tagged OSM usage, the agent will tag it with the corresponding land use. @@ -13,11 +13,11 @@ After running the OSMAgent, the results can be retrieved through: - `building_iri` - Refers to the IRI of the building. - `propertyusage_iri` - Refers to the IRI of the propertyusage. -- `ontobuilt` - Refers to the [OntoBuiltEnvironment](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobuiltenv) usage category of the building. +- `ontobuilt` - Refers to the [OntoBuiltEnvironment](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobuiltenv) usage category of the building. - `usageshare` - Refers to the usage proportion of each `ontobuilt` in a building. - `name` - Refers to the name of the building derived from OSM data. -2) A geoserver layer with the workspace name `twa` and layer name `building_usage`, this layer contains all the information to display the buildings with [TWA-VF](https://github.com/cambridge-cares/TheWorldAvatar/tree/1671-dev-update-osmagent-to-new-building-workflow/web/twa-vis-framework) using the [data.json](stack-manager-config/data/webspace/data.json). +2) A geoserver layer with the workspace name `twa` and layer name `building_usage`, this layer contains all the information to display the buildings with [TWA-VF](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/web/twa-vis-framework) using the [data.json](stack-manager-config/data/webspace/data.json). ## 2. Prerequisites @@ -30,7 +30,7 @@ In the [resource folder](osmagent/src/main/resources/), there are two CSV files The agent has been implemented to work in the stack. Follow the instructions in the [stack-manager]'s README to set up the stack. ### 2.3. CityDb -The agent works with 3D buildings uploaded from CityGML data, follow the instructions in the [stack-data-uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader#citydb-data)'s README. +The agent works with 3D buildings uploaded from CityGML data, follow the instructions in the [stack-data-uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader#citydb-data)'s README. ### 2.4. Uploading Raw Data #### 2.4.1. OSM Data @@ -142,14 +142,14 @@ The debugger port will be available at 5005. The result of OSMAgent - Building Usages is designed to be compatible with TWA-VF and queryable via FeatureInfoAgent. #### Setting up FIAgent -1) Place [`building_usage.sparql`](stack-manager-config/data/fia-queries/queries/building_usage.sparql) and [`fia-config.json`](stack-manager-config/data/fia-queries/queries/fia-config.json) inside [`stack-manager/inputs/data/queries`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/data) as according the volume path specified in the stack-manager config's [`feature-info-agent.json`](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/FeatureInfoAgent/sample/feature-info-agent.json). -2) Spin FeatureInfoAgent up along with the [stack-manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-the-feature-info-agent). +1) Place [`building_usage.sparql`](stack-manager-config/data/fia-queries/queries/building_usage.sparql) and [`fia-config.json`](stack-manager-config/data/fia-queries/queries/fia-config.json) inside [`stack-manager/inputs/data/queries`](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/data) as according the volume path specified in the stack-manager config's [`feature-info-agent.json`](https://github.com/TheWorldAvatar/Feature-Info-Agent/blob/main/sample/feature-info-agent.json). +2) Spin FeatureInfoAgent up along with the [stack-manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-the-feature-info-agent). #### Setting up TWA-VF -1) Place [`data.json`](stack-manager-config/data/webspace/data.json) inside [`stack-manager/inputs/data/webspace`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/data), following instruction [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#example---including-a-visualisation) in the stack-manager. +1) Place [`data.json`](stack-manager-config/data/webspace/data.json) inside [`stack-manager/inputs/data/webspace`](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/data), following instruction [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#example---including-a-visualisation) in the stack-manager. -[stack-data-uploader]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services +[stack-data-uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services [stack-data-uploader-inputs]: stack-data-uploader-inputs/ diff --git a/Agents/OntoMatchAgent/README.md b/Agents/OntoMatchAgent/README.md index 0a4a2c01597..aee8e05ca80 100644 --- a/Agents/OntoMatchAgent/README.md +++ b/Agents/OntoMatchAgent/README.md @@ -112,7 +112,7 @@ The Python script ``coordinator.py`` is called with the following parameters: --config the JSON config file ``` -The config file is a JSON file containing several blocks for configuration. In the following, we will explain each block for the config file [conf_power_plant_DEU_auto_5_geo_http_link.json](https://github.com/cambridge-cares/TheWorldAvatar/blob/develop/Agents/OntoMatchAgent/conf/power_plant_DEU/conf_power_plant_DEU_auto_5_geo_http_link.json) used above. +The config file is a JSON file containing several blocks for configuration. In the following, we will explain each block for the config file [conf_power_plant_DEU_auto_5_geo_http_link.json](https://github.com/cambridge-cares/TheWorldAvatar/blob/develop/Agents/OntoMatchAgent/tests/conf/conf_power_plant_DEU_auto_geo.json) used above. The first block only specifies the random seed: @@ -199,4 +199,4 @@ Andreas Eibeck, Shaocong Zhang * Preprint [A Simple and Effective Approach to Unsupervised Instance Matching and its Application to Linked Data of Power Plants](https://como.ceb.cam.ac.uk/preprints/293/) * OntoMatchAgent is part of [The World Avatar](http://theworldavatar.com/) at [CARES](https://www.cares.cam.ac.uk/) * [Knowledge graphs](https://como.ceb.cam.ac.uk/research/cps/) at the [Computational Modelling Group](https://como.ceb.cam.ac.uk/) -* [Universal Digital Twin and Knowledge graphs](https://cmclinnovations.com/digitalisation/knowledge-graphs/) at [CMCL](https://cmclinnovations.com/) +* [CMCL](https://cmclinnovations.com/) diff --git a/Agents/PVLibAgent/README.md b/Agents/PVLibAgent/README.md index eb92d501f33..ed23329f873 100644 --- a/Agents/PVLibAgent/README.md +++ b/Agents/PVLibAgent/README.md @@ -1,6 +1,6 @@ # PVLib Agent -This agent is designed to calculate AC and DC Power output from Photovaltaic Panels based on values provided in the properties files or values queried from the knowledge graph. It will then initialise the AC and DC Power as timeseries in the knowledge graph. The agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the knowledge graph and database and uses [PvLib](https://pvlib-python.readthedocs.io/en/stable/) for it's AC and DC Power calculations. +This agent is designed to calculate AC and DC Power output from Photovaltaic Panels based on values provided in the properties files or values queried from the knowledge graph. It will then initialise the AC and DC Power as timeseries in the knowledge graph. The agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the knowledge graph and database and uses [PvLib](https://pvlib-python.readthedocs.io/en/stable/) for it's AC and DC Power calculations. For the agent to read data, three property files are required: - One [property file for DC and AC Power instantiation](#dataIRIs-properties) defining the IRIs for each of the keys. @@ -49,7 +49,7 @@ The model_parameters properties contains the parameters required to create a sol - `strings_per_inverter` the number of strings per inverter #### [Option 2] Read Photovoltaic Model Specs from Knowledge Graph -Alternatively, the parameters required to create a solar PV Model can be read from a knowledge graph. This requires an instantiation agent to create a Knowledge Graph filled with PV model parameter values. The [HistoricalNTUEnergy Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/1496-dev-instantiate-historic-ntuenergyconsumptiondata-2/Agents/HistoricalNTUEnergyAgent) provides an example to instantiate a knowledge graph which includes PV model parameters. +Alternatively, the parameters required to create a solar PV Model can be read from a knowledge graph. This requires an instantiation agent to create a Knowledge Graph filled with PV model parameter values. The [HistoricalNTUEnergy Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HistoricalNTUEnergyAgent) provides an example to instantiate a knowledge graph which includes PV model parameters. ## 2. Weather Data Preparation The agent is designed to work with data from one of three sources: weather stations, irradiance sensors, and the OpenMeteo API. It is necessary to have one of the above data retrieved and instantiated on the knowledge graph before running the agent. @@ -62,7 +62,7 @@ In the event that the weather data is retrieved from the weather station, the re rdf:type ontoems:AirTemperature . om:hasValue . ``` -see [OntoEMS ontology](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoems) for more information. The [NUSDavisWeatherStation Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/NUSDavisWeatherStationAgent) provides an example of the instantiation. +see [OntoEMS ontology](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoems) for more information. The [NUSDavisWeatherStation Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/NUSDavisWeatherStationAgent) provides an example of the instantiation. The PVLib Agent will query for the latest air temperature, wind speed and global horizontal irradiance values from the knowledge graph. @@ -196,10 +196,10 @@ config/ |_ ... |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-custom-containers). ##### Spin Up Stack -Follow the [steps](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) to spin up the stack. +Follow the [steps](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) to spin up the stack. ##### Run the agent Select from one of the following to read weather data: diff --git a/Agents/PropertyValueEstimationAgent/README.md b/Agents/PropertyValueEstimationAgent/README.md index 6f7c56e7bc6..094edaf7c37 100644 --- a/Agents/PropertyValueEstimationAgent/README.md +++ b/Agents/PropertyValueEstimationAgent/README.md @@ -275,16 +275,16 @@ Markus Hofmeister (mh807@cam.ac.uk), March 2022 [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Derivation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent -[Derivation Agent configuration]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/python_derivation_agent/pyderivationagent/conf/agent_conf.py +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Derivation Agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent +[Derivation Agent configuration]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent/pyderivationagent/conf/agent_conf.py [EPC Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/EnergyPerformanceCertificateAgent -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [OntoBuiltEnv]: http://www.theworldavatar.com/ontology/ontobuiltenv/OntoBuiltEnv.owl [HM Land Registry Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HMLandRegistryAgent -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md#spinning-up-a-stack -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md#spinning-up-a-stack +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [The World Avatar]: https://github.com/cambridge-cares/TheWorldAvatar [Average Square Metre Price Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AverageSquareMetrePriceAgent diff --git a/Agents/RFIDQueryAgent/README.MD b/Agents/RFIDQueryAgent/README.MD index 2097816c569..14dfdb977d3 100644 --- a/Agents/RFIDQueryAgent/README.MD +++ b/Agents/RFIDQueryAgent/README.MD @@ -2,13 +2,13 @@ This agent is able to execute the following routes: 1) Check route. Query for the latest status of the RFID tags (In/Out) and determine whether the tagged object has been "Out" for longer -than allowed. If the tagged object has been "Out" for longer than allowed, an email will be sent to the relevant personnel via the [EmailSender class in the JPS Base Lib](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/email/EmailSender.java). The agent is able to intuitively determine whether the tagged object is a chemical container that contains a chemical species and retrieve the information of the chemical species if possible. More information can be found at the [Check route](#51-check-route) section. +than allowed. If the tagged object has been "Out" for longer than allowed, an email will be sent to the relevant personnel via the [EmailSender class in the JPS Base Lib](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/email/EmailSender.java). The agent is able to intuitively determine whether the tagged object is a chemical container that contains a chemical species and retrieve the information of the chemical species if possible. More information can be found at the [Check route](#51-check-route) section. 2) Retrieve data route. Retrieve information relating to a tagged object IRI and return them in the form of a JSONObject. The agent is able to intuitively determine whether the tagged object is a chemical container that contains a chemical species and retrieve the information of the chemical species if possible. More information can be found at the [Retrieve data route](#52-retrieve-data-route) section. -3) Send notification route. Upon receiving the latest status of a tag, the agent will query for all the meta data relevant to the tag and it's tagged object, send out an email containing these information to the relevant personnels via the [EmailSender class in the JPS Base Lib](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/email/EmailSender.java). More information can be found at the [Send notification route](#53-send-notification-route) section. +3) Send notification route. Upon receiving the latest status of a tag, the agent will query for all the meta data relevant to the tag and it's tagged object, send out an email containing these information to the relevant personnels via the [EmailSender class in the JPS Base Lib](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/email/EmailSender.java). More information can be found at the [Send notification route](#53-send-notification-route) section. -The agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and [remote store client](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/query/RemoteStoreClient.java) from the JPS_BASE_LIB to interact with both the KG and database. +The agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and [remote store client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/query/RemoteStoreClient.java) from the JPS_BASE_LIB to interact with both the KG and database. ## 1. Usage @@ -21,9 +21,9 @@ The [next section](#2-requirements) will explain the requirements to run the age 2) It is required to have the [Email Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/EmailAgent) set up beforehand. -3) It is required to have some timeseries data that contains the status of the RFID tags (In/Out) already instantiated in the knowledge graph via the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries). +3) It is required to have some timeseries data that contains the status of the RFID tags (In/Out) already instantiated in the knowledge graph via the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries). -4) If the tagged object is a chemical container that contain some chemicals; the tag, chemical container, chemical it contains, chemical species label and the species's GHS Hazard Statements(if the species does have GHS Hazard Statements) should be instantiated in the knowledge graph based on several ontologies: [OntoDevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice), [OntoLab](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontolab), [OntoSpecies](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontospecies), [OntoCAPE](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontocape), [OntoReaction](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoreaction) . An example of the instance can be found below: +4) If the tagged object is a chemical container that contain some chemicals; the tag, chemical container, chemical it contains, chemical species label and the species's GHS Hazard Statements(if the species does have GHS Hazard Statements) should be instantiated in the knowledge graph based on several ontologies: [OntoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice), [OntoLab](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontolab), [OntoSpecies](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontospecies), [OntoCAPE](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontocape), [OntoReaction](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoreaction) . An example of the instance can be found below: ``` a ontodevice:RFIDSensor; ontodevice:observes . diff --git a/Agents/RFIDUpdateAgent/README.md b/Agents/RFIDUpdateAgent/README.md index bbaa182fc1f..5a030affd11 100644 --- a/Agents/RFIDUpdateAgent/README.md +++ b/Agents/RFIDUpdateAgent/README.md @@ -3,7 +3,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding RFID tag data being sent to a RFID servlet. It's only purpose is to retrieve new data (if available) from the API and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS base lib to interact with both the KG and database. Before explaining the usage of the agent, we will briefly summarize the RFID API that is contacted by one of the classes in this package to retrieve data. diff --git a/Agents/RenewableEnergyAgents/MetOfficeWindSensorAgent/README.md b/Agents/RenewableEnergyAgents/MetOfficeWindSensorAgent/README.md index 4998b5224cd..d3e1f771b23 100644 --- a/Agents/RenewableEnergyAgents/MetOfficeWindSensorAgent/README.md +++ b/Agents/RenewableEnergyAgents/MetOfficeWindSensorAgent/README.md @@ -1,8 +1,8 @@ # An agent to represent and query UK wind data reported by the Met Office. ### Authors -* [Toby Latcham](tjl47@cam.ac.uk) -* [Sophie Hall](sh2000@cam.ac.uk) -* [Feroz Farazi](msff2@cam.ac.uk) +* [Toby Latcham](mailto:tjl47@cam.ac.uk) +* [Sophie Hall](mailto:sh2000@cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) The agent processes UK Mean Wind Data downloaded as a CSV file from Met Office Integrated Data Archive System (MIDAS) to represent it in the World Avatar Knowledge Graph (KG) to make it accessible and queryable. The data describes the mean wind speed and direction and maximum gust speed, direction and time reported in the context of sensors installed in different locations all over the UK. diff --git a/Agents/RenewableEnergyAgents/README.md b/Agents/RenewableEnergyAgents/README.md index c2c63529408..18dc9f5ab10 100644 --- a/Agents/RenewableEnergyAgents/README.md +++ b/Agents/RenewableEnergyAgents/README.md @@ -100,8 +100,8 @@ In Docker Desktop: [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar [DTVF]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/TWA-Visualisations -[TimeSeriesClient]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries -[py4jps]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_wrapper +[TimeSeriesClient]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries +[py4jps]: https://github.com/TheWorldAvatar/baselib/tree/main/python_wrapper [properties file]: resources/renewable_energy_agents.properties [Create a Mapbox API access token]: https://account.mapbox.com/access-tokens/ [data folder]: https://www.dropbox.com/sh/2dgpwmedboumkkt/AAAPUxMSa5BTw10iPVkReBGaa/Codes/Research%20project%20code?dl=0&subfolder_nav_tracking=1 \ No newline at end of file diff --git a/Agents/ResultedConsumptionCalculationAgent/README.md b/Agents/ResultedConsumptionCalculationAgent/README.md index 888d32a8dc2..dcc5722c95e 100644 --- a/Agents/ResultedConsumptionCalculationAgent/README.md +++ b/Agents/ResultedConsumptionCalculationAgent/README.md @@ -168,7 +168,7 @@ If you started from an empty namespace, or have not instantiate upper level inst Please check if you have created a namespace in the blazegraph, and entered the correct environmental variables in the [agent.env.example](./agent.env.example). -Afterwards, run the [upper_level_ontology_update.py](./copcalculationagent/upper_level_ontology_update.py), simply run this command in the powershell terminal: +Afterwards, run the [upper_level_ontology_update.py](./resultedconsumptioncalculationagent/upper_level_ontology_update.py), simply run this command in the powershell terminal: ```bash py ./resultedconsumptioncalculationagent/upper_level_ontology_update.py @@ -201,16 +201,16 @@ Jieyang Xu (jx309@cam.ac.uk), May 2023 [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Derivation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent -[Derivation Agent configuration]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/python_derivation_agent/pyderivationagent/conf/agent_conf.py +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Derivation Agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent +[Derivation Agent configuration]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent/pyderivationagent/conf/agent_conf.py [EPC Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-EPCInstantiationAgent/Agents/EnergyPerformanceCertificateAgent -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [OntoRegionalAnalysis]: http://www.theworldavatar.com/ontology/ontoregionalanlysis/OntoRegionalAnalysis.owl [HM Land Registry Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-PropertySalesInstantiationAgent/Agents/HMLandRegistryAgent -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md#spinning-up-a-stack -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md#spinning-up-a-stack +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [The World Avatar]: https://github.com/cambridge-cares/TheWorldAvatar [Average Square Metre Price Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AverageSquareMetrePriceAgent diff --git a/Agents/RxnOptGoalAgent/README.md b/Agents/RxnOptGoalAgent/README.md index a73cfe109fc..10df55e1cbc 100644 --- a/Agents/RxnOptGoalAgent/README.md +++ b/Agents/RxnOptGoalAgent/README.md @@ -4,7 +4,7 @@ The folder contains the source, resource, and Docker setup files for the Reactio   ## 1. Purpose -The Reaction Optimisation Goal (ROG) Agent is designed to take goal requests, monitor the progress in goal iterations, make decisions based on the latest results, visualise progress in goal iterations, and notify users about the status change throughout the process. It does so by translating the goal request to actionable ontological representations based on concepts defined in [`OntoGoal`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontogoal). These expressions will then be picked up by [`RxnOptGoalIterAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/RxnOptGoalIterAgent) to orchestrate the actual performance of the reaction experiment. +The Reaction Optimisation Goal (ROG) Agent is designed to take goal requests, monitor the progress in goal iterations, make decisions based on the latest results, visualise progress in goal iterations, and notify users about the status change throughout the process. It does so by translating the goal request to actionable ontological representations based on concepts defined in [`OntoGoal`](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontogoal). These expressions will then be picked up by [`RxnOptGoalIterAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/RxnOptGoalIterAgent) to orchestrate the actual performance of the reaction experiment.   @@ -227,7 +227,7 @@ pytest tests/test_rxn_opt_dockerised.py --docker-compose=./docker-compose-test-d ### 4.3 Physical test #### 4.3.1 Local test The local integration test using physical equipment is provided in `test_rxn_lab_physical.py`. To run physical test in the lab, please follow below steps: -1. (**ONLY IF** you would like to receive email notifications about the agents operations) Set up email configuration in relevant `tests/env_files/*.env.test`, for details, see [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent#set-up-email-notification-for-exceptions) +1. (**ONLY IF** you would like to receive email notifications about the agents operations) Set up email configuration in relevant `tests/env_files/*.env.test`, for details, see [here](https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent#set-up-email-notification-for-exceptions) 2. Manually spin up docker containers in `tests/docker-compose.test.kg.yml` (this design prevents the test triples being deleted by the teardown function) 3. Open FlowCommander in Windows host machine, load the correct experiment file (`.fcexp`) - you may contact the maintainer of this repo to get it 4. Open HPLC software in Windows host machine, load the correct HPLC method, turn on the hardware, queue the analysis sequence, obtain the report folder path diff --git a/Agents/RxnOptGoalIterAgent/README.md b/Agents/RxnOptGoalIterAgent/README.md index aa769ceda35..6ca5f564aee 100644 --- a/Agents/RxnOptGoalIterAgent/README.md +++ b/Agents/RxnOptGoalIterAgent/README.md @@ -3,7 +3,7 @@ The folder contains the source, resource, and Docker setup files for the Reactio ## Purpose -The Reaction Optimisation Goal Iteration (ROGI) Agent is designed to perform iterations of reaction experiment as part of goal-driven reaction optimisation exercise. It operates based on concepts defined in [`OntoGoal`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontogoal) and orchestrates [`DoE Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/DoEAgent), [`VapourtecSchedule Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/VapourtecScheduleAgent), [`Vapourtec Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/VapourtecAgent), [`HPLC Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HPLCAgent), and [`HPLCPostPro Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HPLCPostProAgent) to complete one iteration. +The Reaction Optimisation Goal Iteration (ROGI) Agent is designed to perform iterations of reaction experiment as part of goal-driven reaction optimisation exercise. It operates based on concepts defined in [`OntoGoal`](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontogoal) and orchestrates [`DoE Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/DoEAgent), [`VapourtecSchedule Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/VapourtecScheduleAgent), [`Vapourtec Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/VapourtecAgent), [`HPLC Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HPLCAgent), and [`HPLCPostPro Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HPLCPostProAgent) to complete one iteration. ## Building the Docker image @@ -65,7 +65,7 @@ The following command can be used to install all required packages. () $ python -m pip install -e .[dev] ``` -As `pyderivationagent` library relies on the `py4jps` package, Java 11 is required. For Windows, it is recommended to obtain OpenJDK 11 from [here](https://developers.redhat.com/products/openjdk/download) and follow the [instructions](https://access.redhat.com/documentation/en-us/openjdk/11/html-single/installing_and_using_openjdk_11_for_windows/index). For linux environment, one can install via: +As `pyderivationagent` library relies on the `py4jps` package, Java 11 is required. For Windows, it is recommended to obtain OpenJDK 11 from [here](https://developers.redhat.com/products/openjdk/download) and follow the [instructions](https://docs.redhat.com/en/documentation/red_hat_build_of_openjdk/11/html/installing_and_using_red_hat_build_of_openjdk_11_for_windows/index). For linux environment, one can install via: `(Linux)` ```sh diff --git a/Agents/SeaLevelImpactAgent/README.md b/Agents/SeaLevelImpactAgent/README.md index 1d0cda40fe4..1a6a0639914 100644 --- a/Agents/SeaLevelImpactAgent/README.md +++ b/Agents/SeaLevelImpactAgent/README.md @@ -19,7 +19,7 @@ The SeaLevelImpactAgent is an agent that 3) Create geoserver layer for each sealevelprojections ## 2. Prerequisites -This agent is developed as part of the [Singapore-sea-level-rise stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-sea-level-rise-singapore/Deploy/stacks/Singapore-sea-level-rise). +This agent is developed as part of the [Singapore-sea-level-rise stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/Singapore-sea-level-rise). Data in the [Singapore-sea-level-rise stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/Singapore-sea-level-rise) needs to be uploaded by stack-data-uploader before running this agent. diff --git a/Agents/SensorLoggerMobileAppAgent/README.md b/Agents/SensorLoggerMobileAppAgent/README.md index ebed3441197..6b27ece8b55 100644 --- a/Agents/SensorLoggerMobileAppAgent/README.md +++ b/Agents/SensorLoggerMobileAppAgent/README.md @@ -1,11 +1,11 @@ # SensorLoggerMobileAppAgent ## 1. Description -The SensorLoggerMobileAppAgent is an agent which receives HTTP POST requests containing JSON payload sent from the [SensorLogger](https://github.com/tszheichoi/awesome-sensor-logger) mobile application, subsequently instantiate it as time series following the [OntoDevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice) ontology. The information instantiated from SensorLogger includes: Smartphone device, Acceleration vector, Gravity vector, Magnetic flux density vector, Sound pressure level, Illuminance, Relative brightness, Location. +The SensorLoggerMobileAppAgent is an agent which receives HTTP POST requests containing JSON payload sent from the [SensorLogger](https://github.com/tszheichoi/awesome-sensor-logger) mobile application, subsequently instantiate it as time series following the [OntoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice) ontology. The information instantiated from SensorLogger includes: Smartphone device, Acceleration vector, Gravity vector, Magnetic flux density vector, Sound pressure level, Illuminance, Relative brightness, Location. The agent functions as below: 1) The agent receives JSON payload from the SensorLogger and parse the received JSON Array. -2) It downsamples the received timeseries data via the [Downsampling](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/core/downsampling) library, and instantiates the data using the [TimeSeriesClient](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries). -3) The [OntoDevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice) triples are instantiated in Ontop. +2) It downsamples the received timeseries data via the [Downsampling](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/core/downsampling) library, and instantiates the data using the [TimeSeriesClient](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries). +3) The [OntoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice) triples are instantiated in Ontop. ### 1.1 Concurrency Design The agent manages a phone ID to recording task map, where each phone ID will have a corresponding recording task. The recording task is responsible for sensor data processing, knowldge graph instantiation and postgres table initiation and data upload. Each recording task has different types of sensor processors, which are responsible for the sensor IRI query and generation, downsampling and data formulation for individual types. The following class diagram highlight the relations between class and omit some details of some classes for simlicity. @@ -234,5 +234,5 @@ The debugger port will be available at 5005. ### 5.3 Testing resources You may use the [SamplePOST request](sensorloggermobileappagent/src/main/resources/SamplePOST.http) for testing any changes made to the code, this HTTP request contains a sample of the recording for testing purposes. -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services \ No newline at end of file +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services \ No newline at end of file diff --git a/Agents/SmartMeterAgent/README.md b/Agents/SmartMeterAgent/README.md index 95f34d09f3c..41f9369c0ba 100644 --- a/Agents/SmartMeterAgent/README.md +++ b/Agents/SmartMeterAgent/README.md @@ -4,7 +4,7 @@ The purpose of Smart Meter Agent is to handle HTTP requests to retrieve latest reading for the current time from a database storing smart meter readings every minute, or retrieve all valid historical readings from a database or a CSV file, and upload the data to instantiated time series in the KG. ## Requirements -- In order to run SmartMeterAgent, a local version (or if you are running in a stack, a stack version) of (TripleStore)AccessAgent needs to be deployed. Refer to [AccessAgent README](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_ACCESS_AGENT/README.md) for Access Agent setup. If running in a stack, create a new namespace in your stack blazegraph called 'storerouter' to store the routing information. Please note that routing information of the target blazegraph should be uploaded accordingly before calling SmartMeterAgent. +- In order to run SmartMeterAgent, a local version (or if you are running in a stack, a stack version) of (TripleStore)AccessAgent needs to be deployed. Refer to [AccessAgent README](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AccessAgent/README.md) for Access Agent setup. If running in a stack, create a new namespace in your stack blazegraph called 'storerouter' to store the routing information. Please note that routing information of the target blazegraph should be uploaded accordingly before calling SmartMeterAgent. - The target blazegraph should contain a power network instantiated according to [OntoPowSys](http://www.theworldavatar.com/ontology/ontopowsys/), and the related time series should be instantiated before calling SmartMeterAgent. @@ -54,7 +54,7 @@ The agent is reachable on localhost port 39998 by default (you can change this i ``` docker build -t "smart-meter-agent:1.0.0" . ``` -- Adjust the `access-agent.json` file in `JPS_ACCESS_AGENT/access-agent-dev-stack` according to [AccessAgent README](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_ACCESS_AGENT/README.md), and copy it into `inputs/config/services` folder of the stack manager. +- Adjust the `access-agent.json` file in `JPS_ACCESS_AGENT/access-agent-dev-stack` according to [AccessAgent README](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AccessAgent/README.md), and copy it into `inputs/config/services` folder of the stack manager. - Copy the `smart-meter-agent.json` file in `stack-manager-input-config` folder into the `inputs/config/services` folder of the stack manager. - Start the stack manager as usual. This should start an access agent container and a SmartMeterAgent container as part of your stack. diff --git a/Agents/ThingsBoardAgent/README.md b/Agents/ThingsBoardAgent/README.md index 2852f30c8b2..f7b0a7bd418 100644 --- a/Agents/ThingsBoardAgent/README.md +++ b/Agents/ThingsBoardAgent/README.md @@ -3,7 +3,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding sensor measurements send to a ThingsBoard server. It's only purpose is to retrieve new data (if available) from the API and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS base lib to interact with both the KG and database. Before explaining the usage of the agent, we will briefly summarize the ThingsBoard API that is contacted by one of the classes in this package to retrieve data. diff --git a/Agents/ThingspeakAgent/README.MD b/Agents/ThingspeakAgent/README.MD index a3b79d4ecd2..a535c3f09e3 100644 --- a/Agents/ThingspeakAgent/README.MD +++ b/Agents/ThingspeakAgent/README.MD @@ -2,7 +2,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the Thingspeak cloud server. Its only purpose is to retrieve new data (if available) from the API and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the KG and database. Before explaining the usage of the agent, we will briefly summarize the Thingspeak API that is diff --git a/Agents/TimeSeriesExample/README.md b/Agents/TimeSeriesExample/README.md index 55ba46a6d50..f3e0c83e2dd 100644 --- a/Agents/TimeSeriesExample/README.md +++ b/Agents/TimeSeriesExample/README.md @@ -119,7 +119,7 @@ In Docker Desktop: [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar [DTVF]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/TWA-Visualisations -[TimeSeriesClient]: https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries -[py4jps]: https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/python_wrapper +[TimeSeriesClient]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries +[py4jps]: https://github.com/TheWorldAvatar/baselib/tree/main/python_wrapper [properties file]: resources/ts_example.properties [http://localhost:65080/]: http://localhost:65080/ diff --git a/Agents/TrafficIncidentAgent/README.md b/Agents/TrafficIncidentAgent/README.md index 2a862ba69df..d3787d53e73 100644 --- a/Agents/TrafficIncidentAgent/README.md +++ b/Agents/TrafficIncidentAgent/README.md @@ -52,6 +52,6 @@ Sun Xin Yu (https://github.com/Echomo-Xinyu)
October 2024 -[stack-data-uploader]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services \ No newline at end of file +[stack-data-uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services \ No newline at end of file diff --git a/Agents/TrajectoryQueryAgent/README.md b/Agents/TrajectoryQueryAgent/README.md index 3e76153f1b0..1f7f40c2a9d 100644 --- a/Agents/TrajectoryQueryAgent/README.md +++ b/Agents/TrajectoryQueryAgent/README.md @@ -113,7 +113,7 @@ Response given in the form of {"result":[{"month":1,"year":2024,"days":"{1,2,3}"}],"message":"Succeed"} ``` -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services [line layer (device id)]: ./trajectoryqueryagent/src/main/resources/line_layer_device_id.sql [line layer (user id)]: ./trajectoryqueryagent/src/main/resources/line_layer_user_id.sql diff --git a/Agents/TravellingSalesmanAgent/README.md b/Agents/TravellingSalesmanAgent/README.md index 0011723c770..31ac8c2a320 100644 --- a/Agents/TravellingSalesmanAgent/README.md +++ b/Agents/TravellingSalesmanAgent/README.md @@ -11,7 +11,7 @@ The TravellingSalesmanAgent is an agent that ### 2.1. Stack Set Up -The agent has been implemented to work in the stack. Follow the instructions in the [stack-manager]'s README to set up the stack. Several pre-configured examples for the different use cases for King's Lynn can be found in [stack-data-uploader-inputs](stack-data-uploader-inputs/). +The agent has been implemented to work in the stack. Follow the instructions in the [stack-manager]'s README to set up the stack. Several pre-configured examples for the different use cases for King's Lynn can be found in [inputs](inputs/). ## 3. Agent Configuration @@ -101,7 +101,7 @@ The debugger port will be available at 5005. ### 7.1 Feature Info Agent -1) In the directory [stack-manager-config/data/webspace/](stack-manager-config/data/webspace/), contains the TWA-VF `data.json` prepared for the different scnearios that is meant to be placed inside [`stack-manager/inputs/data/webspace`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/data), following instruction [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#example---including-a-visualisation). +1) In the directory [stack-manager-config/data/webspace/](stack-manager-config/data/webspace/), contains the TWA-VF `data.json` prepared for the different scnearios that is meant to be placed inside [`stack-manager/inputs/data/webspace`](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/data), following instruction [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#example---including-a-visualisation). -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services diff --git a/Agents/UserAgent/README.md b/Agents/UserAgent/README.md index 32a99da55d7..70b8024cd83 100644 --- a/Agents/UserAgent/README.md +++ b/Agents/UserAgent/README.md @@ -93,5 +93,5 @@ To debug the agent, replace [`user-agent-debug.json`](stack-manager-config/input Spin up with `./stack.sh start ` in the [stack-manager]'s main folder. The debugger port will be available at 5005. -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services \ No newline at end of file +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services \ No newline at end of file diff --git a/Agents/UtilityCostCalculationAgent/README.md b/Agents/UtilityCostCalculationAgent/README.md index ee2da3c9d84..ad9fd2d5cc8 100644 --- a/Agents/UtilityCostCalculationAgent/README.md +++ b/Agents/UtilityCostCalculationAgent/README.md @@ -131,7 +131,7 @@ If you started from an empty namespace, or have not instantiate upper level inst Please check if you have created a namespace in the blazegraph, and entered the correct environmental variables in the [agent.env.example](./agent.env.example). -Afterwards, run the [upper_level_ontology_update.py](./copcalculationagent/upper_level_ontology_update.py), simply run this command in the powershell terminal: +Afterwards, run the [upper_level_ontology_update.py](./utiliycostcalculationagent/upper_level_ontology_update.py), simply run this command in the powershell terminal: ```bash py ./utilitycostcalculationagent/upper_level_ontology_update.py @@ -147,7 +147,7 @@ py ./utilitycostcalculationagent/markup.py # Authors # Jieyang Xu (jx309@cam.ac.uk), May 2023 -[markup.py]:./utilitycostcalculationagent/markup.py +[markup.py]:./utiliycostcalculationagent/markup.py [home page]:https://htmlpreview.github.io/?https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/UtilityCostCalculationAgent/index.html [CopCalculationAgent]:https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/CopCalculationAgent [OntoCAPE]:http://theworldavatar.com/ontology/ontocape/ @@ -166,16 +166,16 @@ Jieyang Xu (jx309@cam.ac.uk), May 2023 [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Derivation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent -[Derivation Agent configuration]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/python_derivation_agent/pyderivationagent/conf/agent_conf.py +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Derivation Agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent +[Derivation Agent configuration]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent/pyderivationagent/conf/agent_conf.py [EPC Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-EPCInstantiationAgent/Agents/EnergyPerformanceCertificateAgent -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [OntoRegionalAnalysis]: http://www.theworldavatar.com/ontology/ontoregionalanlysis/OntoRegionalAnalysis.owl [HM Land Registry Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-PropertySalesInstantiationAgent/Agents/HMLandRegistryAgent -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md#spinning-up-a-stack -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md#spinning-up-a-stack +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [The World Avatar]: https://github.com/cambridge-cares/TheWorldAvatar [Average Square Metre Price Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AverageSquareMetrePriceAgent diff --git a/Agents/ZeoliteAgent/README.md b/Agents/ZeoliteAgent/README.md index aec6025cb87..0b97df35fed 100644 --- a/Agents/ZeoliteAgent/README.md +++ b/Agents/ZeoliteAgent/README.md @@ -66,7 +66,7 @@ will install an earlier version of the package. `() $ pip install pyuploader` -More details at the [TWA web-site](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_uploader). +More details at the [TWA web-site](https://github.com/TheWorldAvatar/baselib/tree/main/python_uploader). ### Prepare and run the code Copy the code and bat scripts to the current directory. diff --git a/Agents/_DerivationPaper/README.md b/Agents/_DerivationPaper/README.md index 3760aee9960..f3845135588 100644 --- a/Agents/_DerivationPaper/README.md +++ b/Agents/_DerivationPaper/README.md @@ -230,7 +230,7 @@ Jiaru Bai (jb2197@cam.ac.uk), December 2022 [Docker environment]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Environment [CMCL Docker image registry]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry [DTVF]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/TWA-Visualisations -[example Mapbox visualisation]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/web/digital-twin-vis-framework/example-mapbox-vis +[example Mapbox visualisation]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/web/twa-vis-framework/example [wait-for-it]: https://github.com/vishnubob/wait-for-it diff --git a/Agents/utils/chemistry_and_robots/README.md b/Agents/utils/chemistry_and_robots/README.md index 0c17bafefe4..eac357e003d 100644 --- a/Agents/utils/chemistry_and_robots/README.md +++ b/Agents/utils/chemistry_and_robots/README.md @@ -1,8 +1,8 @@ # Description # The `chemistry_and_robots` package provides a collection of dataclasses and SPARQL query/update functions that are used by a series of agents capable of conducting automated reaction experiments as part of [TheWorldAvatar](https://github.com/cambridge-cares/TheWorldAvatar) project. `chemistry_and_robots` uses `pyderivationagent>=1.1.0` to access `PySparqlClient` provided in `pyderivationagent.kg_operations` to form its SPARQL query/update utilities. For technical details, below are a few useful links: -- [`pyderivationagent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent) - python wrapper for derivation agent -- [`py4jps`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_wrapper) - python wrapper for jps-base-lib +- [`pyderivationagent`](https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent) - python wrapper for derivation agent +- [`py4jps`](https://github.com/TheWorldAvatar/baselib/tree/main/python_wrapper) - python wrapper for jps-base-lib # Installation # For development and testing reasons, follow below instructions to get a copy of the project up and running on your local system. @@ -50,7 +50,7 @@ This package provides ontological data models from five main ontologies, namely: All of the concepts are directly or indirectly inherited from the `BaseOntology` class which itself is inherited from `pydantic.BaseModel`. The design of these data model classes serve as a persistence layer between the agent operations in chemistry_and_robots and the data stored in the knowledge graph. Additionally, all TBox IRIs involved in the chemistry_and_robots as part of The World Avatar project are provided in the `chemistry_and_robots.data_model.iris.py`. Developer can import this module to make use of the concepts and relationships. ## SPARQL client -A SPARQL client class `chemistry_and_robots.kg_operations.sparql_client.ChemistryAndRobotsSparqlClient` is provided as part of this package. It provides a few SPARQL query and update functions that are helpful in handling data instantiated using the above ontology data models. These functions have been used to develope a few python agents, for more details, please refer to: [`DoEAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/DoEAgent), [`VapourtecExecutionAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/VapourtecExecutionAgent), [`HPLCPostProAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HPLCPostProAgent), [`VapourtecAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/VapourtecAgent), and [`HPLCAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HPLCAgent). +A SPARQL client class `chemistry_and_robots.kg_operations.sparql_client.ChemistryAndRobotsSparqlClient` is provided as part of this package. It provides a few SPARQL query and update functions that are helpful in handling data instantiated using the above ontology data models. These functions have been used to develope a few python agents, for more details, please refer to: [`DoEAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/DoEAgent), [`VapourtecScheduleAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/VapourtecScheduleAgent), [`HPLCPostProAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HPLCPostProAgent), [`VapourtecAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/VapourtecAgent), and [`HPLCAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HPLCAgent). ## Test Unit and integration tests are written for this package. The tests should pass if you already correctly setup the [Docker Environment](https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Environment) and obtained access to [Docker Image Registry](https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry). To run tests, please execute below commands (remember to replace the `` with actual path): diff --git a/Agents/utils/python-utils/README.md b/Agents/utils/python-utils/README.md index 7ec904a1e20..70692b96cb2 100644 --- a/Agents/utils/python-utils/README.md +++ b/Agents/utils/python-utils/README.md @@ -1,6 +1,6 @@ # TheWorldAvatar - Python Utils -This Python package contains a number of logging utilities that may be useful to any Python-based project within The World Avatar (TWA) ecosystem. At the time of writing, this project builds an isolated package named `agentlogging` that users can import in their own code. In the future, this package may be bundled with the Python wrapper for the JPS Base Library so that only one dependency is required. **Deprecation Warning: `agentlogging` is packaged with `py4jps` as of version [1.0.29](https://pypi.org/project/py4jps/1.0.29/). Please do NOT use or develop this isolated package further. Instead, please use and continue develop [`TheWorldAvatar/JPS_BASE_LIB/python_wrapper/twa/agentlogging`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_wrapper/twa/agentlogging).** +This Python package contains a number of logging utilities that may be useful to any Python-based project within The World Avatar (TWA) ecosystem. At the time of writing, this project builds an isolated package named `agentlogging` that users can import in their own code. In the future, this package may be bundled with the Python wrapper for the JPS Base Library so that only one dependency is required. **Deprecation Warning: `agentlogging` is packaged with `py4jps` as of version [1.0.29](https://pypi.org/project/py4jps/1.0.29/). Please do NOT use or develop this isolated package further. Instead, please use and continue develop [`TheWorldAvatar/JPS_BASE_LIB/python_wrapper/twa/agentlogging`](https://github.com/TheWorldAvatar/baselib/tree/main/python_wrapper/twa/agentlogging).** ## Functions diff --git a/Deploy/stacks/AI4PublicHealth/Stack_Deployment/README.md b/Deploy/stacks/AI4PublicHealth/Stack_Deployment/README.md index b3f051c8873..5767d744232 100644 --- a/Deploy/stacks/AI4PublicHealth/Stack_Deployment/README.md +++ b/Deploy/stacks/AI4PublicHealth/Stack_Deployment/README.md @@ -140,7 +140,7 @@ Jiying Chen (jc2341@cam.ac.uk), Nov 2024 [Remote - SSH]: https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-ssh [Docker]: https://code.visualstudio.com/docs/containers/overview [REST Client]: https://marketplace.visualstudio.com/items?itemName=humao.rest-client -[Stack Manager README]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md +[Stack Manager README]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md [OntoFHRS]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-ai4ph-ontologies/JPS_Ontology/ontology/ontofhrs [OntoPOI]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-ai4ph-ontologies/JPS_Ontology/ontology/ontopoi [OntoGreenspace]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-ai4ph-ontologies/JPS_Ontology/ontology/ontogreenspace @@ -153,9 +153,9 @@ Jiying Chen (jc2341@cam.ac.uk), Nov 2024 [Mapbox visualisation guidance]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/web/twa-vis-framework/docs/mapbox.md -[common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Stack Data Uploader]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md +[common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Stack Data Uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md [AccessAgent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AccessAgent @@ -164,8 +164,8 @@ Jiying Chen (jc2341@cam.ac.uk), Nov 2024 [AirQuality Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AirQualityAgent -[xml_converter]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-AI-for-Healthcare/Deploy/stacks/AI4PublicHealth/Common_Script/xml_converter -[FoodHygieneRating]: ./stack-data-uploader/obda_mappings/FoodHygieneRating.obda -[here]: https://github.com/cambridge-cares/TheWorldAvatar/blob/dev-AI-for-Healthcare/Agents/FenlandTrajectoryAgent/README.md +[xml_converter]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/AI4PublicHealth/Common_Script/xml_converter +[FoodHygieneRating]: ./stack-data-uploader/data/FoodHygiene/FoodHygieneRating.obda +[here]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/FenlandTrajectoryAgent/README.md [OntoDevice]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-ai4ph-ontologies/JPS_Ontology/ontology/ontodevice -[mapping folder]: ./stack-data-uploader/obda_mappings +[mapping folder]: ./stack-data-uploader/data diff --git a/Deploy/stacks/Chile/README.md b/Deploy/stacks/Chile/README.md index 70758fd35fa..9012bfd8c63 100644 --- a/Deploy/stacks/Chile/README.md +++ b/Deploy/stacks/Chile/README.md @@ -151,11 +151,10 @@ Use (ST_Dump(...)).* to expand all of the columns, this should work for all func * https://postgis.net/docs/RT_reference.html * https://epsg.io/32719 * https://postgis.net/docs/reference.html -* https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md -* https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-data-uploader/README.md +* https://github.com/TheWorldAvatar/stack/blob/main/stack-manager/README.md +* https://github.com/TheWorldAvatar/stack/blob/main/stack-data-uploader/README.md * https://github.com/cambridge-cares/TheWorldAvatar/blob/main/web/twa-vis-framework/docs/mapbox.md * https://slideplayer.com/slide/7417666/ -* https://manifold.net/doc/mfd9/sql_example__custom_contour_intervals.html * https://nronnei.github.io/blog/2017/03/creating-rasters-from-scratch-in-postgis-pt3/ * https://www.w3schools.com/SQL/sql_update.asp * https://docs.oracle.com/en/database/oracle/oracle-database/18/geors/raster-algebra-and-analytics.html#GUID-C75744C9-FA04-4391-96F2-59EF2EA212FF diff --git a/Deploy/stacks/KingsLynn/StackDeployment/README.md b/Deploy/stacks/KingsLynn/StackDeployment/README.md index 26a1bc2820f..cb5f5261592 100644 --- a/Deploy/stacks/KingsLynn/StackDeployment/README.md +++ b/Deploy/stacks/KingsLynn/StackDeployment/README.md @@ -480,15 +480,15 @@ HAVING(?streets > 1) [MetOffice My Account]: https://register.metoffice.gov.uk/MyAccountClient/account/view -[common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Stack data uploader]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader -[Stack manager]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md +[common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Stack data uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader +[Stack manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md [AccessAgent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AccessAgent [CityImportAgent]: https://github.com/cambridge-cares/CitiesKG/tree/develop/agents [TSDAgent]: https://github.com/cambridge-cares/CitiesKG/tree/develop/agents -[UPRN Agent]: https://github.com/cambridge-cares/CitiesKG/tree/uprn-agent +[UPRN Agent]: https://github.com/cambridge-cares/CitiesKG/tree/develop/agents [Building Matching Readme]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/BuildingMatchingAgent/README.md [EPC Agent]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/EnergyPerformanceCertificateAgent/README.md [Average Square Metre Price Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AverageSquareMetrePriceAgent/README.md @@ -503,13 +503,13 @@ HAVING(?streets > 1) [AirQuality Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AirQualityAgent -[data.json]: /StackDeployment/inputs/stack-manager/inputs/data/visualisation/data.json -[Agent docker-compose file folder]: /StackDeployment/inputs/docker_compose_files -[resources]: /StackDeployment/resources -[river_level_agent input folder]: /StackDeployment/inputs/river_level_agent +[data.json]: inputs/stack-manager/inputs/data/visualisation/data.json +[Agent docker-compose file folder]: inputs/docker_compose_files +[resources]: resources +[river_level_agent input folder]: inputs/river_level_agent [UPRN Agent in batches]: ../Utilities/uprn_agent/run_uprn_agent_in_chunks.py [Utilities]: ../Utilities -[routing.json]: /StackDeployment/inputs/access_agent/routing.json +[routing.json]: inputs/access_agent/routing.json [CKG config.properties]: https://github.com/cambridge-cares/CitiesKG/blob/develop/agents/src/main/resources/config.properties \ No newline at end of file diff --git a/Deploy/stacks/Pirmasens/README.md b/Deploy/stacks/Pirmasens/README.md index f6de2fb533d..e18577d3363 100644 --- a/Deploy/stacks/Pirmasens/README.md +++ b/Deploy/stacks/Pirmasens/README.md @@ -154,8 +154,11 @@ Kok Foong Lee (kokfoong.lee@cares.cam.ac.uk), November 2023 [grafana-prep readme]: ./stack-manager/inputs/data/grafana-prep/readme.txt [chained derivations]: https://lucid.app/publicSegments/view/8dfdf102-bb7d-47de-bb52-c22d86a50bcf/image.jpeg + + [timeseries.properties]: https://github.com/cambridge-cares/pirmasens/blob/main/districtheating/resources/timeseries.properties [dataproperties.py]: https://github.com/cambridge-cares/pirmasens/blob/main/districtheating/resources/dataproperties.py + [Forecasting Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/ForecastingAgent @@ -163,4 +166,4 @@ Kok Foong Lee (kokfoong.lee@cares.cam.ac.uk), November 2023 [DH Emission Estimation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/DistrictHeatingEmissionEstimationAgent [Aermod Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_VIRTUALSENSOR/AermodAgent [DH Optimisation Trigger Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/DistrictHeatingOptimisationTriggerAgent -[Stack manager]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md +[Stack manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md diff --git a/Deploy/stacks/UK-building-retrofit/StackDeployment/README.md b/Deploy/stacks/UK-building-retrofit/StackDeployment/README.md index 8ed82b9858d..4f8f15eee68 100644 --- a/Deploy/stacks/UK-building-retrofit/StackDeployment/README.md +++ b/Deploy/stacks/UK-building-retrofit/StackDeployment/README.md @@ -135,7 +135,7 @@ The [Feature Info Agent] serves as an access point for the visualisation, enabli [Feature Info Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent -[common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts +[common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts [Stack data uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader [Stack manager]: https://github.com/TheWorldAvatar/stack/blob/main/stack-manager/README.md [fia_queries]: ./Stack-manager/inputs/data/fia-queries diff --git a/web/twa-vis-framework/example/README.md b/web/twa-vis-framework/example/README.md index c8954780db8..f9a89acc594 100644 --- a/web/twa-vis-framework/example/README.md +++ b/web/twa-vis-framework/example/README.md @@ -2,7 +2,7 @@ This directory contains the sample data, configuration, and miscellaneous resources needed to spin up a TWA Stack with some example visualisations. -Please note that this example is aimed as showing new users what the visualisation framework is capable of, and to create an experimentation space in which data formats, styling, and other visualisation functionality can be played with. The generation of the sample data, and its stack configuration files are not explained here; for more details on this, see the [TWA Stack](../../../Deploy/stacks/dynamic/stack-manager) documentation. +Please note that this example is aimed as showing new users what the visualisation framework is capable of, and to create an experimentation space in which data formats, styling, and other visualisation functionality can be played with. The generation of the sample data, and its stack configuration files are not explained here; for more details on this, see the [TWA Stack](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) documentation. ## Mapbox From a46e9d96bbf3f1fd344d02e136a8993f8eac206f Mon Sep 17 00:00:00 2001 From: Myles MacDonald Date: Sat, 12 Apr 2025 16:35:20 -0400 Subject: [PATCH 06/30] fix broken links and configure github action --- .github/workflows/twa-md-push.json | 96 +++++++++++++++++++ .github/workflows/twa-md-push.yml | 2 +- .mlc_config.json | 48 ---------- .../README.md | 2 +- Agents/EquipmentBookingAgent/README.md | 12 +-- Agents/OSMAgent/README.md | 4 +- .../osmagent_pirmasens/ontobuiltenv/README.md | 2 +- .../README.md | 6 +- Agents/SeaLevelImpactAgent/README.md | 2 +- .../vendor/erusev/parsedown-extra/README.md | 2 +- .../site/vendor/erusev/parsedown/README.md | 2 +- 11 files changed, 113 insertions(+), 65 deletions(-) create mode 100644 .github/workflows/twa-md-push.json delete mode 100644 .mlc_config.json diff --git a/.github/workflows/twa-md-push.json b/.github/workflows/twa-md-push.json new file mode 100644 index 00000000000..716e99fccd0 --- /dev/null +++ b/.github/workflows/twa-md-push.json @@ -0,0 +1,96 @@ +{ + "ignorePatterns": [ + { + "pattern": "http://localhost(:\\d+)?(/[^ ]*)?" + }, + { + "pattern": "http://HOST:PORT?(/[^ ]*)?" + }, + { + "pattern": "https://www.cmegroup.com?(/[^ ]*)?" + }, + { + "pattern": "https?://(www\\.)?theworldavatar.com(/[^ ]*)?" + }, + { + "pattern": "https://abc.xyz?(/[^ ]*)?" + }, + { + "pattern": "https://maven.pkg.github.com/cambridge-cares/TheWorldAvatar/?" + }, + { + "pattern": "https?://twitter.com/?" + }, + { + "pattern": "https?://www.w3.org/?" + }, + { + "pattern": "https?://www.ontology-of-units-of-measure.org/?" + }, + { + "pattern": "https?://kg.cmclinnovations.com/mods-agent(/[^ ]*)?" + }, + { + "pattern": "https?://github.com/[^/]+/[^/]+/issues(/[^ ]*)?" + }, + { + "pattern": "https?://www.dropbox.com/?" + }, + { + "pattern": "https?://docs.unity3d.com/?" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoassetmanagement" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobim" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontolab" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotechnicalsystem" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/pirmasens/tree/main/districtheating_stack" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/pirmasens/tree/main/psdt/stack\\-data\\-uploader\\-inputs/data/dlm" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/pirmasens/blob/main/psdt/stack\\-data\\-uploader\\-inputs/config/dlm\\.json" + }, + { + "pattern": "https://caret\\.io\\?ref=parsedown" + }, + { + "pattern": "http://caret\\.io\\?ref=parsedown" + }, + { + "pattern": "tjl47@cam.ac.uk" + }, + { + "pattern": "sh2000@cam.ac.uk" + }, + { + "pattern": "msff2@cam.ac.uk" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobuiltenv" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/dev\\-sea\\-level\\-rise\\-singapore/Deploy/stacks/Singapore\\-sea\\-level\\-rise" + } + ], + "aliveStatusCodes": [ + 200, + 403, + 0 + ] +} \ No newline at end of file diff --git a/.github/workflows/twa-md-push.yml b/.github/workflows/twa-md-push.yml index 3c18b0fa1a4..3f93860c5d4 100644 --- a/.github/workflows/twa-md-push.yml +++ b/.github/workflows/twa-md-push.yml @@ -22,5 +22,5 @@ jobs: - name: Markdown links check uses: ruzickap/action-my-markdown-link-checker@v1 with: - config_file: .github/workflows/twa-md-push-config.json + config_file: .github/workflows/twa-md-push.json debug: true \ No newline at end of file diff --git a/.mlc_config.json b/.mlc_config.json deleted file mode 100644 index 94f184d79b3..00000000000 --- a/.mlc_config.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "ignorePatterns": [ - { - "pattern": "http://localhost(:\\d+)?(/[^ ]*)?" - }, - { - "pattern": "http://HOST:PORT?(/[^ ]*)?" - }, - { - "pattern": "https://www.cmegroup.com?(/[^ ]*)?" - }, - { - "pattern": "https?://(www\\.)?theworldavatar.com(/[^ ]*)?" - }, - { - "pattern": "https://abc.xyz?(/[^ ]*)?" - }, - { - "pattern": "https://maven.pkg.github.com/cambridge-cares/TheWorldAvatar/?" - }, - { - "pattern": "https?://twitter.com/?" - }, - { - "pattern": "https?://www.w3.org/?" - }, - { - "pattern": "https?://www.ontology-of-units-of-measure.org/?" - }, - { - "pattern": "https?://kg.cmclinnovations.com/mods-agent(/[^ ]*)?" - }, - { - "pattern": "https?://github.com/[^\/]+/[^\/]+/issues(/[^ ]*)?" - }, - { - "pattern": "https?://www.dropbox.com/?" - }, - { - "pattern": "https?://docs.unity3d.com/?" - } - ], - "aliveStatusCodes": [ - 200, - 403, - 0 - ] -} \ No newline at end of file diff --git a/Agents/DistrictHeatingOptimisationAgent/README.md b/Agents/DistrictHeatingOptimisationAgent/README.md index c0a451b140c..66eb54198d9 100644 --- a/Agents/DistrictHeatingOptimisationAgent/README.md +++ b/Agents/DistrictHeatingOptimisationAgent/README.md @@ -213,7 +213,7 @@ Markus Hofmeister (mh807@cam.ac.uk), November 2023 [chained derivations markup]: https://lucid.app/publicSegments/view/de4041e1-aee2-44d9-82ca-fffca25f5133/image.png [OntoTimeSeries]: https://miro.com/app/board/uXjVPFaO5As=/ [OntoHeatNet]: https://miro.com/app/board/uXjVOhnB9_4=/ -[stack deployment]: https://github.com/cambridge-cares/pirmasens/tree/main/districtheating_stack +[stack deployment]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/psdt [docker compose file]: ./docker-compose.yml diff --git a/Agents/EquipmentBookingAgent/README.md b/Agents/EquipmentBookingAgent/README.md index 7c1009703a1..3101af42768 100644 --- a/Agents/EquipmentBookingAgent/README.md +++ b/Agents/EquipmentBookingAgent/README.md @@ -67,12 +67,12 @@ Prefix | Namespace --- | --- [bot](https://w3c-lbd-cg.github.io/bot/) | `https://w3id.org/bot#` [fibo](https://github.com/edmcouncil/fibo/tree/master/FND/AgentsAndPeople) | `https://spec.edmcouncil.org/fibo/ontology/FND/AgentsAndPeople/People/` -[ontoam](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoassetmanagement) | `https://www.theworldavatar.com/kg/ontoassetmanagement/` -[ontobim](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobim) | `https://www.theworldavatar.com/kg/ontobim/` -[ontodevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice) | `https://www.theworldavatar.com/kg/ontodevice/` -[ontolab](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontolab) | `https://www.theworldavatar.com/kg/ontolab/` -[ontotechsystem](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotechnicalsystem) | `https://www.theworldavatar.com/kg/ontotechnicalsystem/` -[ontotimeseries](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries) | `https://www.theworldavatar.com/kg/ontotimeseries/` +[ontoam](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoassetmanagement) | `https://www.theworldavatar.com/kg/ontoassetmanagement/` +[ontobim](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobim) | `https://www.theworldavatar.com/kg/ontobim/` +[ontodevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice) | `https://www.theworldavatar.com/kg/ontodevice/` +[ontolab](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontolab) | `https://www.theworldavatar.com/kg/ontolab/` +[ontotechsystem](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotechnicalsystem) | `https://www.theworldavatar.com/kg/ontotechnicalsystem/` +[ontotimeseries](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotimeseries) | `https://www.theworldavatar.com/kg/ontotimeseries/` [time](https://www.w3.org/TR/owl-time/) | `http://www.w3.org/2006/time#` diff --git a/Agents/OSMAgent/README.md b/Agents/OSMAgent/README.md index 0a000e4a7bc..59dac823dbc 100644 --- a/Agents/OSMAgent/README.md +++ b/Agents/OSMAgent/README.md @@ -69,7 +69,7 @@ Once the OSM data is uploaded, it will appear in PostgreSQL tables. The agent as #### 2.4.2. Digitales Landschaftsmodell (DLM) Land Use Data DLM files can be uploaded via the stack-data-uploader in Pirmasens Digital Twin (PSDT) repository. -The link to the DLM file in PSDT is available [here](https://github.com/cambridge-cares/pirmasens/tree/main/psdt/stack-data-uploader-inputs/data/dlm). +The link to the DLM file in PSDT is available [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/psdt/stack-data-uploader-inputs/data/dlm). Please note that PSDT is a private repository, permission may be required. #### 2.4.3. Other Land Use Data @@ -99,7 +99,7 @@ In the [config.properties](osmagent/src/main/resources/config.properties) file, Default value - `postgres` is set to according to the database name specified in [osmagent_data.json](stack-data-uploader-inputs/config/osmagent_data.json). Change `db.name` if [osmagent_data.json](stack-data-uploader-inputs/config/osmagent_data.json) database value is changed. - `osm.schema` - Schema name containing OSM data. Default value - `public` is set to the schema specified in [osmagent_data.json](stack-data-uploader-inputs/config/osmagent_data.json). Change `osm.schema` and [`building_usage.obda`](osmagent/src/main/resources/building_usage.obda) if [osmagent_data.json](stack-data-uploader-inputs/config/osmagent_data.json) schema is changed. -- `landuse.table` - Table name (inclusive of schema) containing land use data. Default value is set to `public.dlmsie02f` as per uploaded via psdt [here](https://github.com/cambridge-cares/pirmasens/blob/main/psdt/stack-data-uploader-inputs/config/dlm.json). Leave empty if there is no land use data available, no land use matching will be run. +- `landuse.table` - Table name (inclusive of schema) containing land use data. Default value is set to `public.dlmsie02f` as per uploaded via psdt [here](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/psdt/stack-data-uploader-inputs/config/dlm.json). Leave empty if there is no land use data available, no land use matching will be run. ## 4. Deployment ### 4.1 Retrieving OSMAgent's image diff --git a/Agents/OSMAgent/stack-data-uploader-inputs/data/osmagent_pirmasens/ontobuiltenv/README.md b/Agents/OSMAgent/stack-data-uploader-inputs/data/osmagent_pirmasens/ontobuiltenv/README.md index 117a6184627..2b1694f149f 100644 --- a/Agents/OSMAgent/stack-data-uploader-inputs/data/osmagent_pirmasens/ontobuiltenv/README.md +++ b/Agents/OSMAgent/stack-data-uploader-inputs/data/osmagent_pirmasens/ontobuiltenv/README.md @@ -1 +1 @@ -Add OntoBuiltEnv.owl file here from https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobuiltenv \ No newline at end of file +Add OntoBuiltEnv.owl file here from https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobuiltenv \ No newline at end of file diff --git a/Agents/RenewableEnergyAgents/UrbanObservatorySolarSensorAgent/README.md b/Agents/RenewableEnergyAgents/UrbanObservatorySolarSensorAgent/README.md index e89cf0edf6b..c343c7ded02 100644 --- a/Agents/RenewableEnergyAgents/UrbanObservatorySolarSensorAgent/README.md +++ b/Agents/RenewableEnergyAgents/UrbanObservatorySolarSensorAgent/README.md @@ -1,8 +1,8 @@ # An agent to represent and query solar data reported by the Newcastle Urban Observatory. ### Authors -* [Toby Latcham](tjl47@cam.ac.uk) -* [Sophie Hall](sh2000@cam.ac.uk) -* [Feroz Farazi](msff2@cam.ac.uk) +* [Toby Latcham](mailto:tjl47@cam.ac.uk) +* [Sophie Hall](mailto:sh2000@cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) The agent processes solar radiation data including global and diffuse solar radiation downloaded as a CSV file from the Newcastle Urban Observatory to represent it in the World Avatar Knowledge Graph (KG) to make it accessible and queryable. The data is reported in the context of sensors positioned around Newcastle. diff --git a/Agents/SeaLevelImpactAgent/README.md b/Agents/SeaLevelImpactAgent/README.md index 1a6a0639914..63bdc91ca10 100644 --- a/Agents/SeaLevelImpactAgent/README.md +++ b/Agents/SeaLevelImpactAgent/README.md @@ -28,7 +28,7 @@ The agent has been implemented to work in the stack. Follow the instructions in ## 3. Agent Configuration ### 3.1 Config Properties -The [Config.properties](inputs/config.properties) file contain the table name for the different datasets. A default value is set for each parameters following the stack-data-uploader table names specified in [Singapore-sea-level-rise stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-sea-level-rise-singapore/Deploy/stacks/Singapore-sea-level-rise). +The [Config.properties](inputs/config.properties) file contain the table name for the different datasets. A default value is set for each parameters following the stack-data-uploader table names specified in [Singapore-sea-level-rise stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/Singapore-sea-level-rise). 1) `dbName` - Specify the postgresql database 2) `buildingsMatViewName` - Specify the table name for CityDB buildings footprint 3) `heritagetreesTable` - Specify the table name for heritage tree diff --git a/Deploy/stacks/web/website/site/vendor/erusev/parsedown-extra/README.md b/Deploy/stacks/web/website/site/vendor/erusev/parsedown-extra/README.md index b1ae3149788..1ccdf39744d 100644 --- a/Deploy/stacks/web/website/site/vendor/erusev/parsedown-extra/README.md +++ b/Deploy/stacks/web/website/site/vendor/erusev/parsedown-extra/README.md @@ -1,4 +1,4 @@ -> You might also like [Caret](http://caret.io?ref=parsedown) - our Markdown editor for the Desktop. +> You might also like [Caret](http://caret.io?ref=parsedown) - our Markdown editor for the Desktop. ## Parsedown Extra diff --git a/Deploy/stacks/web/website/site/vendor/erusev/parsedown/README.md b/Deploy/stacks/web/website/site/vendor/erusev/parsedown/README.md index 92691c56f74..8b9aec41d66 100644 --- a/Deploy/stacks/web/website/site/vendor/erusev/parsedown/README.md +++ b/Deploy/stacks/web/website/site/vendor/erusev/parsedown/README.md @@ -1,4 +1,4 @@ -> I also make [Caret](https://caret.io?ref=parsedown) - a Markdown editor for Mac and PC. +> I also make [Caret](https://caret.io?ref=parsedown) - a Markdown editor for Mac and PC. ## Parsedown From 8d3f3d739f21bfa5c5b9b07e752c6cc149bb9033 Mon Sep 17 00:00:00 2001 From: Myles MacDonald Date: Sat, 12 Apr 2025 16:45:08 -0400 Subject: [PATCH 07/30] add on merge for modified files --- .github/workflows/twa-md-merge.yml | 27 +++++++++++++++++++++++++++ .github/workflows/twa-md-push.yml | 6 +++--- 2 files changed, 30 insertions(+), 3 deletions(-) create mode 100644 .github/workflows/twa-md-merge.yml diff --git a/.github/workflows/twa-md-merge.yml b/.github/workflows/twa-md-merge.yml new file mode 100644 index 00000000000..86dafaf48fe --- /dev/null +++ b/.github/workflows/twa-md-merge.yml @@ -0,0 +1,27 @@ +# +# This workflow contains a job to check for broken links within Markdown files in the repository. +# +name: TWA Markdown Merge + +# Trigger this workflow during pull requests to the 'main' branch if changes to Markdown files +on: + pull_request: + branches: + - main + paths: + - '**.md' + - '**.MD' + +jobs: + # Check for broken links only within Markdown files changed by the pull request + markdown-link-check: + name: Check markdown files for broken links + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Markdown links check + uses: ruzickap/action-my-markdown-link-checker@v1 + with: + config_file: .github/workflows/twa-md-push.json + check-modified-files-only: 'yes' \ No newline at end of file diff --git a/.github/workflows/twa-md-push.yml b/.github/workflows/twa-md-push.yml index 3f93860c5d4..ede7f3e72d0 100644 --- a/.github/workflows/twa-md-push.yml +++ b/.github/workflows/twa-md-push.yml @@ -10,9 +10,10 @@ on: - main paths: - '**.md' + - '**.MD' jobs: - # Check for broken links within Markdown files + # Check for broken links within all Markdown files markdown-link-check: name: Check markdown files for broken links runs-on: ubuntu-latest @@ -22,5 +23,4 @@ jobs: - name: Markdown links check uses: ruzickap/action-my-markdown-link-checker@v1 with: - config_file: .github/workflows/twa-md-push.json - debug: true \ No newline at end of file + config_file: .github/workflows/twa-md-push.json \ No newline at end of file From cde5a3fb55e2c3c03a5cdb3583ce8f5d5463dc5b Mon Sep 17 00:00:00 2001 From: Myles MacDonald <65673247+mayomatsuda@users.noreply.github.com> Date: Sat, 12 Apr 2025 16:47:08 -0400 Subject: [PATCH 08/30] dev-markdown-link-check: add github workflow to check markdown links. Resolves cambridge-cares#464 Fix existing broken links --- .github/workflows/twa-md-merge.yml | 27 ++++++ .github/workflows/twa-md-push.json | 96 +++++++++++++++++++ .github/workflows/twa-md-push.yml | 26 +++++ AR/CARESLab/README.md | 2 +- Agents/APIAgent/README.md | 20 ++-- Agents/AQMeshInputAgent/README.md | 2 +- Agents/AccessAgent/README.md | 4 +- Agents/AirQualityAgent/README.md | 12 +-- Agents/AndroidStatusAgent/README.md | 8 +- Agents/AverageSquareMetrePriceAgent/README.md | 14 +-- Agents/BMSBacnetAgent/README.md | 2 +- Agents/BMSInstantiationAgent/README.MD | 4 +- Agents/BMSQueryAgent/README.md | 8 +- Agents/BMSUpdateAgent/README.md | 12 +-- Agents/BuildingFloorAgent/README.md | 6 +- Agents/BuildingIdentificationAgent/README.md | 6 +- Agents/BuildingMatchingAgent/README.md | 2 +- Agents/CARESWeatherStationAgent/README.md | 4 +- Agents/CEAAgent/README.md | 10 +- Agents/CarparkAgent/README.md | 6 +- Agents/CopCalculationAgent/README.md | 14 +-- Agents/DashboardAgent/README.md | 26 ++--- Agents/DataBridgeAgent/README.md | 12 +-- Agents/DerivationAgentPythonExample/README.md | 12 +-- Agents/DistrictHeatingAgent/README.md | 2 +- .../README.md | 16 ++-- .../README.md | 8 +- .../README.md | 4 +- Agents/ESPHomeAgent/README.md | 8 +- Agents/ESPHomeUpdateAgent/README.md | 4 +- Agents/EmailAgent/README.md | 2 +- .../README.md | 16 ++-- Agents/EquipmentBookingAgent/README.md | 12 +-- Agents/FHSashAndOccupancyAgent/README.MD | 6 +- Agents/FenlandTrajectoryAgent/README.md | 24 ++--- Agents/FilterAgent/README.md | 2 +- Agents/FloodAssessmentAgent/README.md | 18 ++-- Agents/FloodWarningAgent/README.md | 8 +- Agents/ForecastingAgent/README.md | 12 +-- Agents/GFAAgent/README.md | 2 +- Agents/GeoSegmentAgent/README.md | 2 +- Agents/HMLandRegistryAgent/README.md | 8 +- Agents/HeatEmissionAgent/README.md | 2 +- Agents/HistoricalAQMeshAgent/README.md | 2 +- Agents/HistoricalNTUEnergyAgent/README.md | 6 +- Agents/HistoricalNUSDavisAgent/README.md | 2 +- .../HistoricalPirmasensStationAgent/README.md | 2 +- Agents/Ifc2OntoBIMAgent/README.md | 2 +- Agents/Ifc2TilesetAgent/README.md | 4 +- Agents/IfcOwlConverterAgent/README.md | 2 +- .../InequalityIndexCalculationAgent/README.md | 18 ++-- Agents/IsochroneAgent/README.md | 14 +-- Agents/LSOAInputAgent/README.md | 12 +-- Agents/MackayCalculatorAgent/readme.md | 4 +- Agents/MackayDataAgent/README.md | 10 +- Agents/MetOfficeAgent/README.md | 14 +-- Agents/NTUDataAgent/README.md | 4 +- Agents/NTUEnergyClusterAgent/README.md | 6 +- Agents/NTUForecastingAgent/README.md | 2 +- Agents/NTUP2PEnergyAgent/README.md | 6 +- Agents/NTUPVLibAgent/README.md | 8 +- Agents/NUSDavisWeatherStationAgent/README.md | 2 +- Agents/NetworkAnalysisAgent/README.md | 8 +- Agents/OPFAgent/README.md | 2 +- Agents/OSMAgent/README.md | 24 ++--- .../osmagent_pirmasens/ontobuiltenv/README.md | 2 +- Agents/OntoMatchAgent/README.md | 4 +- Agents/OpenMeteoAgent/README.md | 2 +- Agents/PIPSRequestAgent/README.md | 2 +- Agents/PIPSTimeSeriesAgent/README.md | 2 +- Agents/PVLibAgent/README.md | 12 +-- Agents/PropertyValueEstimationAgent/README.md | 14 +-- Agents/RFIDQueryAgent/README.MD | 10 +- Agents/RFIDUpdateAgent/README.md | 2 +- .../MetOfficeWindSensorAgent/README.md | 6 +- Agents/RenewableEnergyAgents/README.md | 4 +- .../README.md | 6 +- .../README.md | 16 ++-- Agents/RxnOptGoalAgent/README.md | 4 +- Agents/RxnOptGoalIterAgent/README.md | 4 +- Agents/SeaLevelImpactAgent/README.md | 6 +- Agents/SensorLoggerMobileAppAgent/README.md | 10 +- Agents/SmartMeterAgent/README.md | 4 +- Agents/SolarkatasterAgent/README.md | 2 +- Agents/ThingsBoardAgent/README.md | 2 +- Agents/ThingspeakAgent/README.MD | 2 +- Agents/TimeSeriesExample/README.md | 4 +- Agents/TrafficIncidentAgent/README.md | 6 +- Agents/TrajectoryQueryAgent/README.md | 6 +- Agents/TravellingSalesmanAgent/README.md | 8 +- Agents/UserAgent/README.md | 6 +- Agents/UtilityCostCalculationAgent/README.md | 18 ++-- Agents/VisBackendAgent/README.md | 2 +- Agents/ZeoliteAgent/README.md | 2 +- Agents/_DerivationPaper/README.md | 2 +- Agents/utils/chemistry_and_robots/README.md | 6 +- Agents/utils/python-utils/README.md | 2 +- Apps/BMSQueryApp/README.md | 6 +- Apps/Modules/camera/README.md | 2 +- Apps/Modules/login/README.md | 4 +- Apps/PirmasensToiletApp/README.md | 6 +- Apps/PirmasensToiletApp/inputs/data/README.md | 4 +- Apps/SampleApp/feature/todo/README.md | 2 +- .../Stack_Deployment/README.md | 16 ++-- Deploy/stacks/Chile/README.md | 5 +- .../KingsLynn/StackDeployment/README.md | 18 ++-- Deploy/stacks/KingsLynn/Utilities/README.md | 4 +- Deploy/stacks/Pirmasens/README.md | 5 +- .../stacks/Singapore-sea-level-rise/README.md | 6 +- Deploy/stacks/Singapore/README.md | 2 +- .../StackDeployment/README.md | 8 +- .../Utilities/TOPSIS/README.md | 1 + Deploy/stacks/cares-lab/README.md | 2 +- Deploy/stacks/db/fileserver/README.md | 2 +- Deploy/stacks/ontop+geoserver/README.md | 2 +- Deploy/stacks/timeline/readme.md | 11 ++- .../user/pages/02.explore/01.marie/marie.md | 2 +- .../01.power-system/uk_power_system.md | 2 +- .../02.gas-grid/uk_gas_grid.md | 2 +- .../02.digital-twin/03.land-use/land_use.md | 2 +- .../04.flood-risk/flood_risk.md | 2 +- .../02.explore/02.digital-twin/default.md | 2 +- .../02.explore/02.digital-twin/template.md | 2 +- .../website/site/user/plugins/error/README.md | 4 +- .../site/user/plugins/form/CHANGELOG.md | 1 + .../website/site/user/plugins/form/README.md | 2 +- .../form/vendor/google/recaptcha/README.md | 5 +- .../site/user/plugins/page-inject/README.md | 2 +- .../site/user/plugins/problems/README.md | 2 +- .../website/site/user/themes/quark/README.md | 2 +- .../site/vendor/composer/semver/README.md | 2 +- .../site/vendor/doctrine/cache/README.md | 2 +- .../vendor/erusev/parsedown-extra/README.md | 6 +- .../site/vendor/erusev/parsedown/README.md | 3 +- .../gregwar/image/Gregwar/Image/README.md | 3 +- .../site/vendor/guzzlehttp/psr7/README.md | 2 +- .../site/vendor/kodus/psr7-server/README.md | 1 - .../site/vendor/league/climate/README.md | 1 - .../site/vendor/miljar/php-exif/README.md | 9 +- .../site/vendor/monolog/monolog/README.md | 11 +-- .../website/site/vendor/nyholm/psr7/README.md | 3 +- .../vendor/php-http/message-factory/README.md | 7 +- .../site/vendor/rockettheme/toolbox/README.md | 6 +- .../vendor/willdurand/negotiation/README.md | 4 +- EntityRDFizer/README.md | 2 +- JPS_ARBITRAGE/README.md | 10 +- JPS_BLAZEGRAPH/Readme.md | 2 +- JPS_ESS/README.MD | 2 +- JPS_VIRTUALSENSOR/README.md | 4 +- QuestionAnswering/JPS_Chatbot/README.md | 2 +- .../JPS_LDF/dependencies/README.md | 2 +- .../Training/EntityLinking/readme.md | 4 +- .../MARIE_AND_BERT/Training/readme.md | 2 +- QuestionAnswering/MARIE_AND_BERT/readme.md | 8 +- .../MARIE_SEQ2SEQ/training/README.md | 4 +- .../QA_ICL/data_generation/README.md | 7 +- .../next_app_marie/resources/history.md | 2 +- .../resources/tbox-info/ontocompchem.md | 2 +- .../resources/tbox-info/ontokin.md | 4 +- .../resources/tbox-info/ontomops.md | 4 +- .../resources/tbox-info/ontospecies.md | 2 +- .../resources/tbox-info/ontozeolite.md | 4 +- README.md | 2 +- .../ifcto3Dtilesnext/README.md | 4 +- obsolete/JPS_DES/README.md | 3 +- obsolete/JPS_Version_0/BMS/BMSMap/LICENSE.md | 2 +- obsolete/JPS_Version_0/BMS/BMSMap/README.md | 2 +- ontology-tools/CMCLOntoChemExp/README.md | 10 +- thermo/README.md | 9 +- thermo/obda-thermochemistry/README.md | 12 +-- thermo/spin-thermochemistry/README.md | 4 +- web/augmented-uk/README.md | 4 +- web/augmented-uk/docs/data.md | 16 ++-- .../data/street_light/king's_lynn/README.md | 1 + .../vector/132kv-overhead-lines/README.md | 2 +- .../ukpn/vector/132kv-poles-towers/README.md | 2 +- .../ukpn/vector/33kv-overhead-lines/README.md | 2 +- .../README.md | 2 +- web/docs/README.md | 1 + .../stack-manager-inputs/README.md | 2 +- web/pylon-visualisation/README.md | 2 +- web/twa-vis-framework/docs/cesium.md | 2 +- web/twa-vis-framework/docs/mapbox.md | 4 +- web/twa-vis-framework/docs/overview.md | 10 +- web/twa-vis-framework/docs/troubleshooting.md | 2 +- web/twa-vis-framework/docs/tutorial-mapbox.md | 8 +- web/twa-vis-framework/docs/tutorials.md | 2 +- web/twa-vis-framework/example/README.md | 2 +- web/twa-vis-framework/example/mapbox.md | 2 +- web/twa-vis-framework/library/README.md | 6 +- 190 files changed, 673 insertions(+), 535 deletions(-) create mode 100644 .github/workflows/twa-md-merge.yml create mode 100644 .github/workflows/twa-md-push.json create mode 100644 .github/workflows/twa-md-push.yml diff --git a/.github/workflows/twa-md-merge.yml b/.github/workflows/twa-md-merge.yml new file mode 100644 index 00000000000..86dafaf48fe --- /dev/null +++ b/.github/workflows/twa-md-merge.yml @@ -0,0 +1,27 @@ +# +# This workflow contains a job to check for broken links within Markdown files in the repository. +# +name: TWA Markdown Merge + +# Trigger this workflow during pull requests to the 'main' branch if changes to Markdown files +on: + pull_request: + branches: + - main + paths: + - '**.md' + - '**.MD' + +jobs: + # Check for broken links only within Markdown files changed by the pull request + markdown-link-check: + name: Check markdown files for broken links + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Markdown links check + uses: ruzickap/action-my-markdown-link-checker@v1 + with: + config_file: .github/workflows/twa-md-push.json + check-modified-files-only: 'yes' \ No newline at end of file diff --git a/.github/workflows/twa-md-push.json b/.github/workflows/twa-md-push.json new file mode 100644 index 00000000000..716e99fccd0 --- /dev/null +++ b/.github/workflows/twa-md-push.json @@ -0,0 +1,96 @@ +{ + "ignorePatterns": [ + { + "pattern": "http://localhost(:\\d+)?(/[^ ]*)?" + }, + { + "pattern": "http://HOST:PORT?(/[^ ]*)?" + }, + { + "pattern": "https://www.cmegroup.com?(/[^ ]*)?" + }, + { + "pattern": "https?://(www\\.)?theworldavatar.com(/[^ ]*)?" + }, + { + "pattern": "https://abc.xyz?(/[^ ]*)?" + }, + { + "pattern": "https://maven.pkg.github.com/cambridge-cares/TheWorldAvatar/?" + }, + { + "pattern": "https?://twitter.com/?" + }, + { + "pattern": "https?://www.w3.org/?" + }, + { + "pattern": "https?://www.ontology-of-units-of-measure.org/?" + }, + { + "pattern": "https?://kg.cmclinnovations.com/mods-agent(/[^ ]*)?" + }, + { + "pattern": "https?://github.com/[^/]+/[^/]+/issues(/[^ ]*)?" + }, + { + "pattern": "https?://www.dropbox.com/?" + }, + { + "pattern": "https?://docs.unity3d.com/?" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoassetmanagement" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobim" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontolab" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotechnicalsystem" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/pirmasens/tree/main/districtheating_stack" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/pirmasens/tree/main/psdt/stack\\-data\\-uploader\\-inputs/data/dlm" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/pirmasens/blob/main/psdt/stack\\-data\\-uploader\\-inputs/config/dlm\\.json" + }, + { + "pattern": "https://caret\\.io\\?ref=parsedown" + }, + { + "pattern": "http://caret\\.io\\?ref=parsedown" + }, + { + "pattern": "tjl47@cam.ac.uk" + }, + { + "pattern": "sh2000@cam.ac.uk" + }, + { + "pattern": "msff2@cam.ac.uk" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobuiltenv" + }, + { + "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/dev\\-sea\\-level\\-rise\\-singapore/Deploy/stacks/Singapore\\-sea\\-level\\-rise" + } + ], + "aliveStatusCodes": [ + 200, + 403, + 0 + ] +} \ No newline at end of file diff --git a/.github/workflows/twa-md-push.yml b/.github/workflows/twa-md-push.yml new file mode 100644 index 00000000000..ede7f3e72d0 --- /dev/null +++ b/.github/workflows/twa-md-push.yml @@ -0,0 +1,26 @@ +# +# This workflow contains a job to check for broken links within Markdown files in the repository. +# +name: TWA Markdown Push + +# Trigger this workflow during pushes to the 'main' branch if changes to Markdown files +on: + push: + branches: + - main + paths: + - '**.md' + - '**.MD' + +jobs: + # Check for broken links within all Markdown files + markdown-link-check: + name: Check markdown files for broken links + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Markdown links check + uses: ruzickap/action-my-markdown-link-checker@v1 + with: + config_file: .github/workflows/twa-md-push.json \ No newline at end of file diff --git a/AR/CARESLab/README.md b/AR/CARESLab/README.md index 7d76cc8860a..d60a3f17528 100644 --- a/AR/CARESLab/README.md +++ b/AR/CARESLab/README.md @@ -105,7 +105,7 @@ A complete guide of setting up **new** MRTK3 project can be found at [here](http Run in Unity is the most easy and convenient way to test during development. Check [here](https://learn.microsoft.com/en-us/windows/mixed-reality/mrtk-unity/mrtk3-input/packages/input/input-simulation#how-to-use-mrtk3-input-simulation-mrtk3-input-simulator-default-controls) for keys to control. Other resources: -- [Debug C# code in Unity](https://docs.unity3d.com/Manual/ManagedCodeDebugging.html) +- [Debug C# code in Unity](https://docs.unity3d.com/Manual/managed-code-debugging.html) ### Test and Deploy on Device or Emulator diff --git a/Agents/APIAgent/README.md b/Agents/APIAgent/README.md index d1d60a1c0b5..9c445543319 100644 --- a/Agents/APIAgent/README.md +++ b/Agents/APIAgent/README.md @@ -4,7 +4,7 @@ This `API Agent` can be used to manage the automatic instantiation and updating The agent is integrated with the [Derived Information Framework]'s (DIF) to ensure proper data provenance. API information and API-Data-to-TimeSeries mappings are defined under a meta-data instance in KG. TS data is then considered as the derived quantity of the meta-data. The required meta-data triples to derive an API-downloaded TS instance are described in the [required derivation markup](#12-required-derivation-markup) section below. -Once a API is registered using the [`DerivationClient`](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/derivation), API agent automatically manages a periodical re-downloading of the data from that API. +Once a API is registered using the [`DerivationClient`](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/derivation), API agent automatically manages a periodical re-downloading of the data from that API. Ontology definition of API meta-data relies on [Web of Things (WoT) Hypermedia Controls Ontology]. We also extend [RDF Mapping Language (RML)] for TimeSeries data as `rml4ts Ontology`. @@ -135,25 +135,25 @@ One may notice the agent, once started, creates a property file named `APIAgent. [allows you to publish and install packages]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry#authenticating-to-github-packages [py4jps]: https://pypi.org/project/py4jps/#description -[TimeSeriesClient]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries +[TimeSeriesClient]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries [Darts]: https://unit8co.github.io/darts/index.html [Prophet]: https://unit8co.github.io/darts/generated_api/darts.models.forecasting.prophet_model.html [Facebook Prophet]: https://github.com/facebook/prophet [Github container registry]: https://ghcr.io [personal access token]: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens -[Derived Information Framework]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/derivation -[Stack manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[derivation agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent +[Derived Information Framework]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/derivation +[Stack manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[derivation agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent -[OntoTimeSeries]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries -[OntoDerivation]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoderivation -[rml4ts.owl]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/rml4ts/rml4ts.owl +[OntoTimeSeries]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotimeseries +[OntoDerivation]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoderivation +[rml4ts.owl]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/rml4ts/rml4ts.owl [docker compose file]: ./docker-compose.yml [Web of Things (WoT) Hypermedia Controls Ontology]:https://www.w3.org/2019/wot/hypermedia [RDF Mapping Language (RML)]:https://rml.io/specs/rml/ [test_triples]: ./test_triples -[api_temperature.ttl]: ./tbox_dev/test_triples/api_temperature.ttl +[api_temperature.ttl]: ./test_triples/api_temperature.ttl [calculations module]: ./data_classes/calculations.py -[api_pvcapacity.ttl]: ./tbox_dev/test_triples/api_pvcapacity.ttl \ No newline at end of file +[api_pvcapacity.ttl]: ./test_triples/api_pvcapacity.ttl \ No newline at end of file diff --git a/Agents/AQMeshInputAgent/README.md b/Agents/AQMeshInputAgent/README.md index 1a7f2faf908..2232d65efe0 100644 --- a/Agents/AQMeshInputAgent/README.md +++ b/Agents/AQMeshInputAgent/README.md @@ -3,7 +3,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the AQMesh air quality measuring station. It's only purpose is to retrieve new data (if available) from the API and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS base lib to interact with both the KG and database. Before explaining the usage of the agent, we will briefly summarize the AQMesh API that is diff --git a/Agents/AccessAgent/README.md b/Agents/AccessAgent/README.md index 1f364d5f4c9..490cf089577 100644 --- a/Agents/AccessAgent/README.md +++ b/Agents/AccessAgent/README.md @@ -41,7 +41,7 @@ If building a new version of the image, the new image should be pushed to the Gi docker push ghcr.io/cambridge-cares/access-agent:X.Y.Z ``` -where X.Y.Z is the new version number. Please also ensure that you are logged in to the docker registry. Follow [step 1 of this](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) for clarity. +where X.Y.Z is the new version number. Please also ensure that you are logged in to the docker registry. Follow [step 1 of this](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) for clarity. #### Integration tests @@ -54,7 +54,7 @@ This will test the agent in a production environment including connections to th ## 2. Deployment Instructions -The Access Agent can be deployed in a standalone Docker container or as part of The World Avatar [stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +The Access Agent can be deployed in a standalone Docker container or as part of The World Avatar [stack](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). ### 2.1 Standalone Container diff --git a/Agents/AirQualityAgent/README.md b/Agents/AirQualityAgent/README.md index eaccf677d43..7912db90c2a 100644 --- a/Agents/AirQualityAgent/README.md +++ b/Agents/AirQualityAgent/README.md @@ -153,20 +153,20 @@ Markus Hofmeister (mh807@cam.ac.uk), March 2023 [allows you to publish and install packages]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry#authenticating-to-github-packages [CMCL Docker registry]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts [Create SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/create-with-openssh/ [Github container registry]: https://ghcr.io [Github package repository]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Packages [http://localhost:5002/airqualityagent]: http://localhost:5002/airqualityagent [Java Development Kit version >=11]: https://adoptium.net/en-GB/temurin/releases/?version=11 [JDBC driver]: https://jdbc.postgresql.org/download/ -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB -[OntoEMS]: https://raw.githubusercontent.com/cambridge-cares/TheWorldAvatar/main/JPS_Ontology/ontology/ontoems/OntoEMS.owl +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main +[OntoEMS]: https://raw.githubusercontent.com/TheWorldAvatar/ontology/refs/heads/main/ontology/ontoems/OntoEMS.owl [personal access token]: https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token [py4jps]: https://pypi.org/project/py4jps/#description -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [Upload SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/to-existing-droplet/ [VSCode via SSH]: https://code.visualstudio.com/docs/remote/ssh [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar diff --git a/Agents/AndroidStatusAgent/README.md b/Agents/AndroidStatusAgent/README.md index 56e62d052f8..41c2586039c 100644 --- a/Agents/AndroidStatusAgent/README.md +++ b/Agents/AndroidStatusAgent/README.md @@ -9,9 +9,9 @@ Because DTVF can only send request instead of taking in any, logging the status AndroidStatusAgent in visualisation use case # 1. Setup -This agent is designed to run in stack, which is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +This agent is designed to run in stack, which is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). A successful setup will result in 9 containers: -- 8 [default containers](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) +- 8 [default containers](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) - AndroidStatusAgent ## 1.1 Config BMSQueryAgent in Stack @@ -58,10 +58,10 @@ config/ |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-custom-containers). ## 1.2 Spin Up Stack -Follow these [steps](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) to spin up the stack. +Follow these [steps](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) to spin up the stack. # 2. Usage This agent support both POST and GET requests. diff --git a/Agents/AverageSquareMetrePriceAgent/README.md b/Agents/AverageSquareMetrePriceAgent/README.md index c98ea4502eb..b73dc7fbbfe 100644 --- a/Agents/AverageSquareMetrePriceAgent/README.md +++ b/Agents/AverageSquareMetrePriceAgent/README.md @@ -251,16 +251,16 @@ Markus Hofmeister (mh807@cam.ac.uk), March 2023 [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Derivation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent -[Derivation Agent configuration]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/python_derivation_agent/pyderivationagent/conf/agent_conf.py +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Derivation Agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent +[Derivation Agent configuration]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent/pyderivationagent/conf/agent_conf.py [EPC Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/EnergyPerformanceCertificateAgent -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [OntoBuiltEnv]: http://www.theworldavatar.com/ontology/ontobuiltenv/OntoBuiltEnv.owl [HM Property Sales Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HMLandRegistryAgent -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md#spinning-up-a-stack -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md#spinning-up-a-stack +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [The World Avatar]: https://github.com/cambridge-cares/TheWorldAvatar diff --git a/Agents/BMSBacnetAgent/README.md b/Agents/BMSBacnetAgent/README.md index 7c42856920b..6fca2674993 100644 --- a/Agents/BMSBacnetAgent/README.md +++ b/Agents/BMSBacnetAgent/README.md @@ -50,7 +50,7 @@ This agent automatically runs the update job periodically once deployed and does [config file]: ./config/config.py -[TimeSeriesClient]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries +[TimeSeriesClient]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries [BMSQueryAgent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BMSQueryAgent diff --git a/Agents/BMSInstantiationAgent/README.MD b/Agents/BMSInstantiationAgent/README.MD index 091031923ce..a41406dbb6c 100644 --- a/Agents/BMSInstantiationAgent/README.MD +++ b/Agents/BMSInstantiationAgent/README.MD @@ -1,8 +1,8 @@ # BMS Instantiation Agent -This agent is designed to instantiate devices found in Building Management System (BMS) based on [OntoDevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice), [OntoBMS](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobms) and [OntoCAPE](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontocape). +This agent is designed to instantiate devices found in Building Management System (BMS) based on [OntoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice), [OntoBMS](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobms) and [OntoCAPE](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontocape). -The agent mainly uses the [remote store client](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/query/RemoteStoreClient.java) from the JPS_BASE_LIB to interact with the knowledge graph. +The agent mainly uses the [remote store client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/query/RemoteStoreClient.java) from the JPS_BASE_LIB to interact with the knowledge graph. ## Usage This part of the README describes the usage of the agent. The module itself can be packaged into an executable war, deployed as a web servlet on tomcat. Sending the appropriate request to the correct URL will initiate the agent. Since it uses the remote store client to interact with the knowledge graph , the knowledge graph will be required to be set-up beforehand. diff --git a/Agents/BMSQueryAgent/README.md b/Agents/BMSQueryAgent/README.md index cd0eda626a0..e64cbe71bd8 100644 --- a/Agents/BMSQueryAgent/README.md +++ b/Agents/BMSQueryAgent/README.md @@ -9,7 +9,7 @@ To achieve a balance between response speed and body size, the agent breaks the - Once the room is determined, users can send `retrieve/equipment?RoomIRI=` to get all the equipment in the selected room. # 1. Setup -This agent is designed to run in stack, which is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +This agent is designed to run in stack, which is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). A successful setup will result in 9 containers (optional 10): - Default containers - Stack Manager (exits when spins up all other containers) @@ -25,7 +25,7 @@ A successful setup will result in 9 containers (optional 10): For the BMSQueryAgent to return results, it is assumed that there is already knowledge graph in the Blazegraph. -BMSQueryAgent does not depend on [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent), but they are used together to create time series visualisation in the [BMS Query App](https://github.com/cambridge-cares/TheWorldAvatar/tree/1502-android-app-for-data-visualisation/Apps/BMSQueryApp). +BMSQueryAgent does not depend on [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent), but they are used together to create time series visualisation in the [BMS Query App](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Apps/BMSQueryApp). ## 1.1 Config BMSQueryAgent in Stack @@ -54,10 +54,10 @@ config/ |_ ... |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-custom-containers). ## 1.2 Spin Up Stack -Follow the [steps](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) to spin up the stack. +Follow the [steps](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) to spin up the stack. # 2. Usage The agent accepts three paths: diff --git a/Agents/BMSUpdateAgent/README.md b/Agents/BMSUpdateAgent/README.md index 853cedaa217..9370b2e1f02 100644 --- a/Agents/BMSUpdateAgent/README.md +++ b/Agents/BMSUpdateAgent/README.md @@ -9,7 +9,7 @@ BMSUpdateAgent is an agent designed for multiple functions: 4) It is able to retrieve the present value for a Bacnet object and update the knowledge graph accordingly. More information is available at the [Update Present Value Route](#34-update-present-value-route). # 1. Setup -This agent is designed to run in stack, which is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +This agent is designed to run in stack, which is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). ## 1.1. Build Image The BMSUpdateAgent is set up to use the Maven repository. You'll need to provide your credentials in single-word text files located like this: @@ -53,10 +53,10 @@ config/ |_ ... |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#specifying-custom-containers). ## 1.3. Spin Up Stack -Follow these [steps](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) to spin up the stack. +Follow these [steps](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) to spin up the stack. ## 2. Client Properties File Each of the route requires a client.properties file containing specific keys and values. The client.properties file are then mapped to an environment variable in the docker container. Refer to the `ENV` section in `stack-manager-input-config-service/bms-update-agent.json` for more information. @@ -104,13 +104,13 @@ More information can be found in the example property file `updateTriplesClient. # 3. Authorization Setup ## 3.1 Setup Stack and Keycloak -Please refer to [Deploy/stacks/dynamic/examples/services/keycloak](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/examples/services/keycloak) for setup guide. +Please refer to [Deploy/stacks/dynamic/examples/services/keycloak](https://github.com/TheWorldAvatar/stack/tree/main/examples/services/keycloak) for setup guide. ## 3.2 Configure Client After BMSUpdateAgent client has been registered in Keycloak, 1. Download client adapter from keycloak admin console. -2. Copy [stack-manager-input/secets/bms_updateagent_keycloak.json](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/BMSUpdateAgent/stack-manager-input/secrets/bms-update-agent-keycloak.json) to the stack manager's input secrets folder. +2. Copy [stack-manager-input/secets/bms_updateagent_keycloak.json](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/BMSUpdateAgent/stack-manager-input/config/services/bms-update-agent.json) to the stack manager's input secrets folder. 3. Replace `realm`, `resource` and `secret` in the copied secret file 4. Replace `STACK_NAME` in the copied secret file 5. (DEVELOPER) Update src/main/webapp/WEB-INF/web.xml to set up authorization on more endpoints. @@ -159,7 +159,7 @@ In order for the agent to query for `bacnetObjectId` and `bacnetDeviceId`, the [ - `dataIRI` the data IRI that is linked to the `bacnetObjectId` and `bacnetDeviceId`. - `value` the value to write to the Bacnet object - The query run by the agent is structured based on [OntoBMS](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontobms/OntoBMS.owl): + The query run by the agent is structured based on [OntoBMS](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontobms/OntoBMS.owl): ``` ontobms:hasBacnetObjectID "bacnetObjectId" ; diff --git a/Agents/BuildingFloorAgent/README.md b/Agents/BuildingFloorAgent/README.md index 0cdbaf86017..2e685ecd70e 100644 --- a/Agents/BuildingFloorAgent/README.md +++ b/Agents/BuildingFloorAgent/README.md @@ -6,7 +6,7 @@ This agent has been developed to improve the number of floors for 3D buildings. 3) Cat. C: the data is estimate calculated by the height of buildilng ### 1.1 Requirements -The agent requires 3D building models based on the CityGML standard. These models must be uploaded through the [stack-data-uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader#citydb-data). +The agent requires 3D building models based on the CityGML standard. These models must be uploaded through the [stack-data-uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader#citydb-data). ## 2. Building the Agent The agent is designed for execution through a Docker container. Other deployment workflows are beyond the scope of this document. Follow the steps below to build and deploy the agent. @@ -49,7 +49,9 @@ According to the general standard of Singapore, there are two cases: 2) Domestic building: 1st floor height is 3.6m, the rest floor height is 2.8m ### 2.3 Retrieving BuildingFloorAgent's image -The Building Floor Agent should be pulled automatically with the stack-manager, if not you can pull the latest version from [cambridge_cares package](https://github.com/orgs/cambridge-cares/packages/container/package/buildingflooragent) using `docker pull ghcr.io/cambridge-cares/buildingflooragent:` + + +The Building Floor Agent should be pulled automatically with the stack-manager, if not you can pull the latest version from cambridge_cares package using `docker pull ghcr.io/cambridge-cares/buildingflooragent:` ### 2.4 Starting with the stack-manager The agent has been implemented to work in the stack. To do so, place buildingfloor.json in the [stack-manager config directory]. diff --git a/Agents/BuildingIdentificationAgent/README.md b/Agents/BuildingIdentificationAgent/README.md index 4c6742ed8da..7c26c393b9d 100644 --- a/Agents/BuildingIdentificationAgent/README.md +++ b/Agents/BuildingIdentificationAgent/README.md @@ -22,18 +22,18 @@ which must have a 'scope' that [allows you to publish and install packages](http #### Stack containers -This agent requires the following tools, which **MUST** run on the same stack. The details for setting them up are explained at [stack manager page](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +This agent requires the following tools, which **MUST** run on the same stack. The details for setting them up are explained at [stack manager page](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). (1) PostgreSQL database -The agent is designed to use the stack PostgreSQL. It requires the buildings data to be stored in a schema called 'citydb' in the 'postgres' database. The buildings data can be instantiated in the required format by uploading the raw data using the [stack data uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader). The user-specified table must be in the same 'postgres' database that contains the buildings data. +The agent is designed to use the stack PostgreSQL. It requires the buildings data to be stored in a schema called 'citydb' in the 'postgres' database. The buildings data can be instantiated in the required format by uploading the raw data using the [stack data uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader). The user-specified table must be in the same 'postgres' database that contains the buildings data. ### 1.2 Docker deployment - Build this agent's image by executing `docker compose build` within this folder. Do not start the container. - Copy the `buildingidentificationagent.json` file from the `stack-manager-input-config` folder into the `TheWorldAvatar/Deploy/stacks/dynamic/stack-manager/inputs/config/services` folder of the stack manager. -- Start the stack manager as usual following [these instructions](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +- Start the stack manager as usual following [these instructions](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). ## 2. Agent Routes diff --git a/Agents/BuildingMatchingAgent/README.md b/Agents/BuildingMatchingAgent/README.md index a10b72ca894..957efbbabe9 100644 --- a/Agents/BuildingMatchingAgent/README.md +++ b/Agents/BuildingMatchingAgent/README.md @@ -1,7 +1,7 @@ # Building Matching Agent ### Introduction -The Building Matching Agent is used to link a building instantiated in [OntoBuiltEnv](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontobuiltenv/OntoBuiltEnv.owl) to its corresponding instance instantiated in [OntoCityGML](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontocitygml/OntoCityGML.owl). The link is created by using UPRNs as the identifiers. +The Building Matching Agent is used to link a building instantiated in [OntoBuiltEnv](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobuiltenv/OntoBuiltEnv.owl) to its corresponding instance instantiated in [OntoCityGML](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontocitygml/OntoCityGML.owl). The link is created by using UPRNs as the identifiers. ### Input The agent accepts 3 input parameters in a JSONObject format with the keys: "ocgml", "obe" and "prefixIRI" where, "ocgml" is the endpoint containing buildings instantiated in OntoCityGML, "obe" is the endpoint containing buildings instantiated in OntoBuiltEnv and "prefixIRI" is the complete IRI of the OntoCityGML namespace (i.e. the IRI string used to prefix all OntoCityGml instances at creation). diff --git a/Agents/CARESWeatherStationAgent/README.md b/Agents/CARESWeatherStationAgent/README.md index cddb47ca8bc..3bcb203c12a 100644 --- a/Agents/CARESWeatherStationAgent/README.md +++ b/Agents/CARESWeatherStationAgent/README.md @@ -2,7 +2,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the weather station located in the vicinity of the CARES Lab. Its only purpose is to retrieve new data (if available) from the API and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the KG and database. Before explaining the usage of the agent, we will briefly summarize the weather station API that is @@ -39,7 +39,7 @@ taken at a timestamp between the first and third image. ## Usage This part of the README describes the usage of the input agent. The module itself can be packaged into an executable war, deployed as a web servlet on tomcat. Sending the appropriate request to the correct URL will initiate the agent. -The agent instantiates the weather reading retrieved via the API as timeseries in the knowledge graph. In addition, it will check and instantiate the ABoxes for the weather station and the quantities it measures based on these ontologies [ontology-of-units-of-measure](https://github.com/cambridge-cares/OM/tree/master), [OntoDevice](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontodevice/OntoDevice.owl), [OntoEMS](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontoems/OntoEMS.owl), , [OntoTimeSeries](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontotimeseries/OntoTimeSeries.owl). An example of the ABox is shown below: +The agent instantiates the weather reading retrieved via the API as timeseries in the knowledge graph. In addition, it will check and instantiate the ABoxes for the weather station and the quantities it measures based on these ontologies [ontology-of-units-of-measure](https://github.com/cambridge-cares/OM/tree/master), [OntoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice/OntoDevice.owl), [OntoEMS](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoems/OntoEMS.owl), , [OntoTimeSeries](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotimeseries/OntoTimeSeries.owl). An example of the ABox is shown below: ``` rdf:type ontoems:ReportingStation ; ontoems:reports . diff --git a/Agents/CEAAgent/README.md b/Agents/CEAAgent/README.md index f2bb7a644a1..b317ca9a827 100644 --- a/Agents/CEAAgent/README.md +++ b/Agents/CEAAgent/README.md @@ -63,7 +63,7 @@ The agent employs a configuration file - [CEAAgentConfig.properties], the conten 4. `terrain.database`: _Optional_ field for the RDB database name containing the terrain data. 5. `terrain.table`: _Optional_ field for the RDB table containing the terrain data. -The file location of `CEAAgentConfig.properties` can be set in the stack service configuration file at [cea-agent.json]. The default location is set to the [current file location](./cea-agent/src/main/resources/CEAAgentConfig.properties) and need not be modified if you transfer the [cea-agent.json] to the [stack manager's services](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services) directory. +The file location of `CEAAgentConfig.properties` can be set in the stack service configuration file at [cea-agent.json]. The default location is set to the [current file location](./cea-agent/src/main/resources/CEAAgentConfig.properties) and need not be modified if you transfer the [cea-agent.json] to the [stack manager's services](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services) directory. ### 2.2. Agent Dependencies @@ -114,7 +114,7 @@ Run `./stack.sh start ` in the [stack-manager] directory to start th ### 2.6. Visualisation -Visualisation of CEA outputs can be achieved with the [TWA Visualisation Framework] and [FeatureInfoAgent]. The [TWA Visualisation Framework] can be deployed according to the [documentation](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#example---including-a-visualisation). +Visualisation of CEA outputs can be achieved with the [TWA Visualisation Framework] and [FeatureInfoAgent]. The [TWA Visualisation Framework] can be deployed according to the [documentation](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#example---including-a-visualisation). For the [FeatureInfoAgent], the `.sparql` and `.json` files for CEA data is available at [feature-info-agent-input](./feature-info-agent-input). This must be placed inside the `stack-manager/input/data/fia-queries` directory following the [FeatureInfoAgent] instructions. @@ -383,8 +383,8 @@ The agent will attempt to retrieve terrain data for an area containing the targe [CEAAgentConfig.properties]: ./cea-agent/src/main/resources/CEAAgentConfig.properties [cea-agent.json]: ./stack-manager-input-config/cea-agent.json [cea-agent-debug.json]: ./stack-manager-input-config/cea-agent-debug.json -[stack-data-uploader]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config services]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services +[stack-data-uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config services]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services [TWA Visualisation Framework]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/web/twa-vis-framework [FeatureInfoAgent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent diff --git a/Agents/CarparkAgent/README.md b/Agents/CarparkAgent/README.md index a62e2a1f718..0a4c269e9dc 100644 --- a/Agents/CarparkAgent/README.md +++ b/Agents/CarparkAgent/README.md @@ -2,8 +2,8 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the carparks located in Singapore. Its only purpose is to retrieve new data (if available) from the API and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [Timeseries Client](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and the [Remote Store Client](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/query/RemoteStoreClient.java) -from the JPS_BASE_LIB to interact with both the KG and database to mantain the KG instances and timeseries. In addition, the agent will instantiate the carpark's geolocation information in postGIS and Geoserver via the [GDAL Client](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-clients/src/main/java/com/cmclinnovations/stack/clients/gdal/GDALClient.java) and [Geoserver Client](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-clients/src/main/java/com/cmclinnovations/stack/clients/geoserver/GeoServerClient.java). The agent is also able to interact with the [Building Identification Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BuildingIdentificationAgent) to match the carparks to their nearest building based on the carpark's geolocation information (latitude, longitude etc). +agent uses the [Timeseries Client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and the [Remote Store Client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/query/RemoteStoreClient.java) +from the JPS_BASE_LIB to interact with both the KG and database to mantain the KG instances and timeseries. In addition, the agent will instantiate the carpark's geolocation information in postGIS and Geoserver via the [GDAL Client](https://github.com/TheWorldAvatar/stack/tree/main/stack-clients/src/main/java/com/cmclinnovations/stack/clients/gdal/GDALClient.java) and [Geoserver Client](https://github.com/TheWorldAvatar/stack/tree/main/stack-clients/src/main/java/com/cmclinnovations/stack/clients/geoserver/GeoServerClient.java). The agent is also able to interact with the [Building Identification Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BuildingIdentificationAgent) to match the carparks to their nearest building based on the carpark's geolocation information (latitude, longitude etc). ## Carpark API The carpark information are retrieved via two different APIs. @@ -109,7 +109,7 @@ curl -X POST --header "Content-Type: application/json" -d "{\"delay\":\"0\",\"in ``` #### Create route -This request instantiates the ABoxes for the carparks based on [ontoCarpark](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontocarpark/OntoCarpark.owl) and matches each carpark to the closest building (within 100m) via the [Building Identification Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BuildingIdentificationAgent). The carparks meta data are stored in the sparql endpoints indicated in the [client.properties](#client-properties) while the carpark's geolocation and matched buildings data are stored based on the locations indicated in the `stack-manager-input-config-service/carpark-agent.json` ([Geolocation data configurations](#geolocation-data-configurations)). The request has the following format: +This request instantiates the ABoxes for the carparks based on [ontoCarpark](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontocarpark/OntoCarpark.owl) and matches each carpark to the closest building (within 100m) via the [Building Identification Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BuildingIdentificationAgent). The carparks meta data are stored in the sparql endpoints indicated in the [client.properties](#client-properties) while the carpark's geolocation and matched buildings data are stored based on the locations indicated in the `stack-manager-input-config-service/carpark-agent.json` ([Geolocation data configurations](#geolocation-data-configurations)). The request has the following format: ``` curl -X POST http://localhost:3838/carpark-agent/create ``` diff --git a/Agents/CopCalculationAgent/README.md b/Agents/CopCalculationAgent/README.md index 042e96be244..f5e9dc9adaf 100644 --- a/Agents/CopCalculationAgent/README.md +++ b/Agents/CopCalculationAgent/README.md @@ -145,16 +145,16 @@ Jieyang Xu (jx309@cam.ac.uk), May 2023 [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Derivation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent -[Derivation Agent configuration]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/python_derivation_agent/pyderivationagent/conf/agent_conf.py +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Derivation Agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent +[Derivation Agent configuration]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent/pyderivationagent/conf/agent_conf.py [EPC Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-EPCInstantiationAgent/Agents/EnergyPerformanceCertificateAgent -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [OntoRegionalAnalysis]: http://www.theworldavatar.com/ontology/ontoregionalanlysis/OntoRegionalAnalysis.owl [HM Land Registry Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-PropertySalesInstantiationAgent/Agents/HMLandRegistryAgent -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md#spinning-up-a-stack -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md#spinning-up-a-stack +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [The World Avatar]: https://github.com/cambridge-cares/TheWorldAvatar [Average Square Metre Price Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AverageSquareMetrePriceAgent diff --git a/Agents/DashboardAgent/README.md b/Agents/DashboardAgent/README.md index 7afe80436a2..535e52db9d4 100644 --- a/Agents/DashboardAgent/README.md +++ b/Agents/DashboardAgent/README.md @@ -1,6 +1,6 @@ # Dashboard Agent The Dashboard Agent is designed to set up and populate dashboards within a stack. These dashboards will require both spatial topological and time series data to be available within the stack. -Namely, it will require the concept of buildings, facilities, rooms, elements and connected sensors/devices from at minimal the [OntoBIM](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobim) and [OntoDevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice) ontologies. +Namely, it will require the concept of buildings, facilities, rooms, elements and connected sensors/devices from at minimal the [OntoBIM](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobim) and [OntoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice) ontologies. ## Instructions Before you can use the Dashboard Agent, there are some requirements you need to meet. Follow the steps below to ensure you have everything you need to successfully run the agent. @@ -21,12 +21,12 @@ repo_username.txt should contain your Github username. repo_password.txt should which must have a 'scope' that [allows you to publish and install packages](https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry#authenticating-to-github-packages). ##### Stack containers -This agent requires the following tools, which **MUST** run on the same stack. Please read more from the [stack manager page](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) for setting these containers up. +This agent requires the following tools, which **MUST** run on the same stack. Please read more from the [stack manager page](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) for setting these containers up. (1) [Grafana](https://grafana.com/docs/grafana/latest/) dashboard - Requires the deployment of the built-in optional `grafana` service on the stack to configure and set up dashboards - For deploying the service, - - include `grafana` as a service following the [stack-manager configuration file](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#stack-configuration) + - include `grafana` as a service following the [stack-manager configuration file](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#stack-configuration) - include a `grafana_password` with your password as a Docker Secret in the `stack-manager/inputs/secrets` directory. - Once deployed, the service can be accessed at the `/analytics` route with a default username of admin. - The following Plugin must be installed: @@ -42,7 +42,7 @@ This agent requires the following tools, which **MUST** run on the same stack. P - Contains triples linking time series to facilities and/or assets - Mandatory structure: - A name must be appended to all buildings, facilities, rooms, assets, sensors, and measures/dataIRIs through the `Instance rdfs:label "name"^^xsd:string` triple. - - All sensor measures are attached according to the [OntoDevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice) ontology. + - All sensor measures are attached according to the [OntoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice) ontology. - Units can be included into the dashboard through the `MeasureInstance om:hasUnit UnitInstance. UnitInstance om:symbol "symbols"^^xsd:string.` triples but are **OPTIONAL**. - Only temperature and relative humidity can be currently retrieved for any room measures. Do note to include a min and max threshold triples for the facility holding these rooms. - ABox samples are documented [here](#3-data-model-requirements). @@ -57,8 +57,8 @@ docker compose -f "./docker/docker-compose.test.yml" up -d --build **PRODUCTION ENVIRONMENT** - Build this agent's image by issuing `docker compose build` within this folder. Do not start the container. - Copy the `dashboard-agent.json` file from the `stack-manager-input-config` folder into the `inputs/config/services` folder of the stack manager, adjusting the absolute path of the bind mount as required. -Please review the [different routes](#2-agent-route) to understand the purpose of these bind mounts. See [sample bind mounts](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#bind-mounts) for the configuration syntax. -- Start the stack manager as usual following [these instructions](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +Please review the [different routes](#2-agent-route) to understand the purpose of these bind mounts. See [sample bind mounts](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#bind-mounts) for the configuration syntax. +- Start the stack manager as usual following [these instructions](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). ### 2. Agent Route The agent currently offers two API routes: @@ -95,7 +95,7 @@ If the agent ran successfully, a JSON Object would be returned as follows. ``` ### 3. Data model requirements -This agent can retrieve the measures and their time series associated with a facility from the knowledge graph. Please ensure that the measure and time series triples conform to [TWA's time series standards](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-cities-ontologies/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and the standard `OM` model as illustrated in Figure 1. +This agent can retrieve the measures and their time series associated with a facility from the knowledge graph. Please ensure that the measure and time series triples conform to [TWA's time series standards](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and the standard `OM` model as illustrated in Figure 1. *Figure 1. Standard Time Series Measure ABox* ```mermaid @@ -113,12 +113,12 @@ The legend for the prefix-namespace is available below. Prefix | Namespace --- | --- [bot](https://w3c-lbd-cg.github.io/bot/) | `https://w3id.org/bot#` -[ontoam](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoassetmanagement) | `https://www.theworldavatar.com/kg/ontoassetmanagement/` -[ontobim](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobim) | `https://www.theworldavatar.com/kg/ontobim/` -[ontodevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice) | `https://www.theworldavatar.com/kg/ontodevice/` -[ontotechsystem](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotechnicalsystem) | `https://www.theworldavatar.com/kg/ontotechnicalsystem/` -[ontotimeseries](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries) | `https://www.theworldavatar.com/kg/ontotimeseries/` -[ontoubemmp](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoubemmp) | `https://www.theworldavatar.com/kg/ontoubemmp/` +[ontoam](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoassetmanagement) | `https://www.theworldavatar.com/kg/ontoassetmanagement/` +[ontobim](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobim) | `https://www.theworldavatar.com/kg/ontobim/` +[ontodevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice) | `https://www.theworldavatar.com/kg/ontodevice/` +[ontotechsystem](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotechnicalsystem) | `https://www.theworldavatar.com/kg/ontotechnicalsystem/` +[ontotimeseries](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotimeseries) | `https://www.theworldavatar.com/kg/ontotimeseries/` +[ontoubemmp](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoubemmp) | `https://www.theworldavatar.com/kg/ontoubemmp/` [om](https://github.com/HajoRijgersberg/OM) | `http://www.ontology-of-units-of-measure.org/resource/om-2/` [omgCD](https://www.omg.org/spec/COMMONS/Designators) | `https://www.omg.org/spec/Commons/Designators/` [saref](https://saref.etsi.org/core/) | `https://saref.etsi.org/core/` diff --git a/Agents/DataBridgeAgent/README.md b/Agents/DataBridgeAgent/README.md index fd0be1592f4..0850ecddba6 100644 --- a/Agents/DataBridgeAgent/README.md +++ b/Agents/DataBridgeAgent/README.md @@ -44,12 +44,12 @@ docker-compose up -d ``` docker build -t data-bridge-agent:versionNo . ``` - 2) Add the `/docker/data-bridge-agent.json` to the [`stack-manager/inputs/config/services`](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services) directory + 2) Add the `/docker/data-bridge-agent.json` to the [`stack-manager/inputs/config/services`](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services) directory - Please ensure the version numbers are targeted at the right image. If you are building it, please update the version number accordingly. 3) Modify the absolute path of the agent's `config` folder to your absolute file path - For Windows users using WSL on Docker, the file path should start with `/mnt/c/`, which is equivalent to `C://` 4) Include this agent service into the stack configuration file at `stack-manager/inputs/config/.json` - - Read more in the [Stack Configuration](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) section + - Read more in the [Stack Configuration](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) section 5) Start the stack as per normal If the agent is successfully started, the endpoint at `http://localhost:3838/data-bridge-agent/status` should return the following message. @@ -69,7 +69,7 @@ There are currently four routes available: - `source`: The source SPARQL endpoint containing the triples to be transferred - `target`: The target SPARQL endpoint intended to store the transferred triples - Sample SPARQL endpoints for Blazegraph are [listed here](#4-sample-blazegraph-endpoints) - - **WARNING**: The transfer of triples may fail for large (>1 million) numbers of triples as it is dependent on the available RAM. The [JPS Base library's cloning tool](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/tools/cloning/CloningTool.java) can be used in that situation. + - **WARNING**: The transfer of triples may fail for large (>1 million) numbers of triples as it is dependent on the available RAM. The [JPS Base library's cloning tool](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/tools/cloning/CloningTool.java) can be used in that situation. - A sample `POST` request using curl on a CLI: ``` curl -X POST --header 'Content-Type: application/json' -d '{ @@ -99,13 +99,13 @@ curl -X GET 'localhost:3838/data-bridge-agent/sql?srcDbName=db' ``` 4. `/timeseries` route: - - Execute the agent's task through an HTTP `POST` request using the [time series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries). This route will instantiate the time series inputs sent in the request into the stack's knowledge graph. + - Execute the agent's task through an HTTP `POST` request using the [time series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries). This route will instantiate the time series inputs sent in the request into the stack's knowledge graph. - The request will require the following parameters: - - `timeClass` : Refers to the time series classes as written in the [time series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries#instantiation-in-kg). + - `timeClass` : Refers to the time series classes as written in the [time series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries#instantiation-in-kg). - `timestamp` : A JSONArray containing the time stamp as strings in the format of `YYYY-MM-DD'T'HH:MM:SS`. - `values` : A JSONObject containing the time series values. A data IRI is inserted as the key and paired with their values as a JSONArray. For example: `{"dataIRI": [1, 2, 3]}`. - `namespace`: Specifies the SPARQL endpoint to store the instantiated time series data. See [Sample Blazegraph endpoints](#4-sample-blazegraph-endpoints) - - `database` : Specifies the database name within the same stack. Do note that this agent is not intended to instantiate data for non-stack databases. If required, please use the [Timeseries Client](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) in your agent instead. + - `database` : Specifies the database name within the same stack. Do note that this agent is not intended to instantiate data for non-stack databases. If required, please use the [Timeseries Client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) in your agent instead. - A sample `POST` request using curl on a CLI: ``` curl -X POST --header 'Content-Type: application/json' -d '{ diff --git a/Agents/DerivationAgentPythonExample/README.md b/Agents/DerivationAgentPythonExample/README.md index 13f1547ee97..40d6e891781 100644 --- a/Agents/DerivationAgentPythonExample/README.md +++ b/Agents/DerivationAgentPythonExample/README.md @@ -3,7 +3,7 @@ ## Purpose This agent folder is intended as a template that one can copy and adapt to turn their own Python code into a derivation agent. The example requires [`pyderivationagent`](https://pypi.org/project/pyderivationagent/)>=1.4.1. -This document covers four stages: development, test, package & publish, and deployment. For each stage, a step-by-step instruction is provided. Before continuing with this tutorial, it is recommended to read the [documentation](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent) of `pyderivationagent` and all the relevant links. +This document covers four stages: development, test, package & publish, and deployment. For each stage, a step-by-step instruction is provided. Before continuing with this tutorial, it is recommended to read the [documentation](https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent) of `pyderivationagent` and all the relevant links. If you identified anything that can be improved to make it easier for newcomers, please feel free to open a [pull request](https://github.com/cambridge-cares/TheWorldAvatar/pulls) or get in touch with the maintainer of the package. @@ -54,7 +54,7 @@ The following command can be used to install all required packages. () $ python -m pip install -e .[dev] ``` -As `pyderivationagent` library relies on the `py4jps` package, Java 11 is required. For Windows, it is recommended to obtain OpenJDK 11 from [here](https://developers.redhat.com/products/openjdk/download) and follow the [instructions](https://access.redhat.com/documentation/en-us/openjdk/11/html-single/installing_and_using_openjdk_11_for_windows/index). For linux environment, one can install via: +As `pyderivationagent` library relies on the `py4jps` package, Java 11 is required. For Windows, it is recommended to obtain OpenJDK 11 from [here](https://developers.redhat.com/products/openjdk/download) and follow the [instructions](https://docs.redhat.com/en/documentation/red_hat_build_of_openjdk/11/html/installing_and_using_red_hat_build_of_openjdk_11_for_windows/index). For linux environment, one can install via: `(Linux)` ```sh @@ -102,7 +102,7 @@ The derivation agent modifies the knowledge graph automatically, it is therefore ### Local agent integration test This example is provided in `docker-compose-testcontainers.yml` file. Other relevant files are provided in the `tests` folder. -1. `dummy_services_secrets` folder: credential for blazegraph container used in test, and potentially auth json file for email services (for more information on this, please refer to the official documentation of [`pyderivationagent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent)) +1. `dummy_services_secrets` folder: credential for blazegraph container used in test, and potentially auth json file for email services (for more information on this, please refer to the official documentation of [`pyderivationagent`](https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent)) 2. `test_triples` folder: test triples for derivation inputs (example ABox), and example TBox where relevant concepts and relationships are defined 3. `agent.env.test` file: agent configuration parameters 4. `conftest.py` for pytest: all pytest fixtures and other utility functions @@ -140,7 +140,7 @@ For developers new to `Run and Debug` configurations, please refer to these offi - [Use Docker Compose: Debug](https://code.visualstudio.com/docs/containers/docker-compose#_debug) - [Debugpy](https://github.com/microsoft/debugpy) -For developers interested to see more example of possible configurations, including those relevant to the usage of [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager), please refer to [`PropertyValueEstimationAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/PropertyValueEstimationAgent). +For developers interested to see more example of possible configurations, including those relevant to the usage of [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager), please refer to [`PropertyValueEstimationAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/PropertyValueEstimationAgent). ### Develop tests for new agents @@ -224,7 +224,7 @@ To release your agent, you may want to update information in `Dockerfile` and `d   ## Deployment -Example of configurations for the agent are provided in `agent.env.example` file. The knowledge graph endpoints used by this agent are specified using `SPARQL_QUERY_ENDPOINT` and `SPARQL_UPDATE_ENDPOINT`, with the credentials specified using `KG_USERNAME` and `KG_PASSWORD`. To avoid commit these information to git at deployment, developer may make a copy of this example file as `agent.env`. As `*.env` entry already exist in `.gitignore`, this new created file will be omitted. Any credentials encoded are safe. The `OntoAgent:Service` IRI of the agent is specified using `ONTOAGENT_SERVICE_IRI`. The periodically time interval to monitor asynchronous derivation is specified by `DERIVATION_PERIODIC_TIMESCALE`. One may also provide `DERIVATION_INSTANCE_BASE_URL` to be used by DerivationClient when creating derivations related instances. `ONTOAGENT_OPERATION_HTTP_URL` can be used to specify the URL of the agent that listens the request for updating synchronous derivations. To help monitoring the agent running status, an email notification feature is also provided and can be set up via `EMAIL_RECIPIENT`, `EMAIL_SUBJECT_PREFIX`, `EMAIL_USERNAME`, `EMAIL_AUTH_JSON_PATH` and `EMAIL_START_END_ASYNC_DERIVATIONS`. More details are provided in the [documentation](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent) of the `pyderivationagent` package. Developers needs to ensure that this file is correctly updated before deploying the Docker Image. +Example of configurations for the agent are provided in `agent.env.example` file. The knowledge graph endpoints used by this agent are specified using `SPARQL_QUERY_ENDPOINT` and `SPARQL_UPDATE_ENDPOINT`, with the credentials specified using `KG_USERNAME` and `KG_PASSWORD`. To avoid commit these information to git at deployment, developer may make a copy of this example file as `agent.env`. As `*.env` entry already exist in `.gitignore`, this new created file will be omitted. Any credentials encoded are safe. The `OntoAgent:Service` IRI of the agent is specified using `ONTOAGENT_SERVICE_IRI`. The periodically time interval to monitor asynchronous derivation is specified by `DERIVATION_PERIODIC_TIMESCALE`. One may also provide `DERIVATION_INSTANCE_BASE_URL` to be used by DerivationClient when creating derivations related instances. `ONTOAGENT_OPERATION_HTTP_URL` can be used to specify the URL of the agent that listens the request for updating synchronous derivations. To help monitoring the agent running status, an email notification feature is also provided and can be set up via `EMAIL_RECIPIENT`, `EMAIL_SUBJECT_PREFIX`, `EMAIL_USERNAME`, `EMAIL_AUTH_JSON_PATH` and `EMAIL_START_END_ASYNC_DERIVATIONS`. More details are provided in the [documentation](https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent) of the `pyderivationagent` package. Developers needs to ensure that this file is correctly updated before deploying the Docker Image. Once the env file is prepared, the docker image can be deployed via: @@ -238,7 +238,7 @@ docker run --env-file --name derivation_agent_python_example ghc ## Adapt agent to work with stack > **NOTE** This agent example will be updated to incorporate Stack in the next iteration. -This agent example has been adapted to work with a Docker stack spun up by the [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) for a real use-case. For more information, please refer to [`PropertyValueEstimationAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/PropertyValueEstimationAgent). +This agent example has been adapted to work with a Docker stack spun up by the [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) for a real use-case. For more information, please refer to [`PropertyValueEstimationAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/PropertyValueEstimationAgent).   diff --git a/Agents/DistrictHeatingAgent/README.md b/Agents/DistrictHeatingAgent/README.md index 27f30af3ae3..663db27f947 100644 --- a/Agents/DistrictHeatingAgent/README.md +++ b/Agents/DistrictHeatingAgent/README.md @@ -1,6 +1,6 @@ # District Heating Agent -This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the district heating network located in a midsize town in Germany. Its purpose is to instantiate instances of the district heating network. The agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the district heating network located in a midsize town in Germany. Its purpose is to instantiate instances of the district heating network. The agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the KG and database. ## Usage diff --git a/Agents/DistrictHeatingEmissionEstimationAgent/README.md b/Agents/DistrictHeatingEmissionEstimationAgent/README.md index d2809b6c33a..5c344b295df 100644 --- a/Agents/DistrictHeatingEmissionEstimationAgent/README.md +++ b/Agents/DistrictHeatingEmissionEstimationAgent/README.md @@ -165,14 +165,14 @@ Markus Hofmeister (mh807@cam.ac.uk), August 2023 [py4jps]: https://pypi.org/project/py4jps/#description [Github container registry]: https://ghcr.io [personal access token]: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens -[Derived Information Framework]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/derivation -[Stack manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[derivation agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent - -[OntoTimeSeries]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries -[OntoHeatNetwork]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoheatnetwork -[OntoDispersion]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodispersion -[OntoDerivation]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoderivation +[Derived Information Framework]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/derivation +[Stack manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[derivation agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent + +[OntoTimeSeries]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotimeseries +[OntoHeatNetwork]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoheatnetwork +[OntoDispersion]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodispersion +[OntoDerivation]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoderivation [example triples]: ./tests/test_triples/example_abox.ttl diff --git a/Agents/DistrictHeatingOptimisationAgent/README.md b/Agents/DistrictHeatingOptimisationAgent/README.md index 06cc76651a2..66eb54198d9 100644 --- a/Agents/DistrictHeatingOptimisationAgent/README.md +++ b/Agents/DistrictHeatingOptimisationAgent/README.md @@ -205,15 +205,15 @@ Markus Hofmeister (mh807@cam.ac.uk), November 2023 [py4jps]: https://pypi.org/project/py4jps/#description [Github container registry]: https://ghcr.io [personal access token]: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens -[Derived Information Framework]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/derivation -[Stack manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[derivation agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent +[Derived Information Framework]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/derivation +[Stack manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[derivation agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent [preprint 275]: https://como.ceb.cam.ac.uk/preprints/275/ [chained derivations]: https://lucid.app/publicSegments/view/a00b553e-d9d1-4845-97b7-f480e980898e/image.png [chained derivations markup]: https://lucid.app/publicSegments/view/de4041e1-aee2-44d9-82ca-fffca25f5133/image.png [OntoTimeSeries]: https://miro.com/app/board/uXjVPFaO5As=/ [OntoHeatNet]: https://miro.com/app/board/uXjVOhnB9_4=/ -[stack deployment]: https://github.com/cambridge-cares/pirmasens/tree/main/districtheating_stack +[stack deployment]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/psdt [docker compose file]: ./docker-compose.yml diff --git a/Agents/DistrictHeatingOptimisationTriggerAgent/README.md b/Agents/DistrictHeatingOptimisationTriggerAgent/README.md index 48d5ccde2bb..6869ed066b5 100644 --- a/Agents/DistrictHeatingOptimisationTriggerAgent/README.md +++ b/Agents/DistrictHeatingOptimisationTriggerAgent/README.md @@ -13,7 +13,7 @@ The dockerised agent can be deployed as standalone version (i.e., outside a larg Before starting the agent, the `disp:hasOntoCityGMLCityObject` range instances in the [static_point_sources.ttl] file need to be populated manually with the corresponding exhaust outlets/chimneys, as there is currently no way to extract these CityObject IRIs programmatically. The agent will not start in case syntactically invalid IRIs are provided. As the entire [resources] folder is mounted into the container, no rebuilding is required after changing the triples to upload; a simple restart shall be sufficient. -The published agent image assumes the stack name to be `dhstack`. This is because this agent is an integral part of a larger stack and requires another service to be finished before it can start up. To determine when this is the case that service is curled via `dhstack-dh-instantiation`. Further details can be found [here](https://github.com/cambridge-cares/pirmasens/tree/main/districtheating_stack).
+The published agent image assumes the stack name to be `dhstack`. This is because this agent is an integral part of a larger stack and requires another service to be finished before it can start up. To determine when this is the case that service is curled via `dhstack-dh-instantiation`. Further details can be found [here](https://github.com/cambridge-cares/pirmasens/tree/main/districtheating_stack).
To deploy this agent to another stack, please adjust the stack name in the [delayed startup script] prior to re-building the image. @@ -120,7 +120,7 @@ Markus Hofmeister (mh807@cam.ac.uk), July 2023 [OntoTimeSeries (Miro board)]: https://miro.com/app/board/uXjVPFaO5As=/ [OntoHeatNet (Miro board)]: https://miro.com/app/board/uXjVOhnB9_4=/ [chained derivations]: https://lucid.app/publicSegments/view/a00b553e-d9d1-4845-97b7-f480e980898e/image.png -[specifying custom containers]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#specifying-custom-containers +[specifying custom containers]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#specifying-custom-containers [flaskapp init]: ./agent/flaskapp/__init__.py diff --git a/Agents/ESPHomeAgent/README.md b/Agents/ESPHomeAgent/README.md index ebf4882858c..d290f245a45 100644 --- a/Agents/ESPHomeAgent/README.md +++ b/Agents/ESPHomeAgent/README.md @@ -4,7 +4,7 @@ This agent is designed to query for the relevant IRIs and information from the k The agent will toggle the status of a component that is being controlled via the ESPHome web server based on the latest timeseries data value, latest status of the component and the setpoint value that it queries from the knowledge graph.(e.g. controlling the on off state of a cooling fan based on the latest measured temperature and temperature setpoint queried from the knowledge graph.) -The agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and [Access Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_ACCESS_AGENT) to interact with both the KG and database. +The agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and [Access Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AccessAgent) to interact with both the KG and database. Before explaining the usage of the agent, we will briefly summarize the ESPHome API that is contacted by one of the classes in this package to retrieve data. @@ -51,9 +51,9 @@ http://///turn_off 1) It is required to have access to a knowledge graph SPARQL endpoint and Postgres database. These can run on the same machine or need to be accessible from the host machine via a fixed URL. This can be either in form of a Docker container or natively running on a machine. It is not in the scope of this README to explain the set-up of a knowledge graph triple store or Postgres database. -2) An Access Agent instance should be set up and working properly. The necessary routing information should be stored in a triple store such as Blazegraph. Check [Uploading-routing-information](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_ACCESS_AGENT#Uploading-routing-information) for more information. +2) An Access Agent instance should be set up and working properly. The necessary routing information should be stored in a triple store such as Blazegraph. Check [Uploading-routing-information](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AccessAgent#Uploading-routing-information) for more information. -3) It is necessary to have the component and the setpoint that determines when to toggle the component instantiated in the knowledge graph based on [OntoDevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice). It is necessary to have the timeseries of the status of the component instantiated in the knowledge graph, one such agent that does this is [ESPHome Update Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/ESPHomeUpdateAgent). An example of such an instantiation is shown below: +3) It is necessary to have the component and the setpoint that determines when to toggle the component instantiated in the knowledge graph based on [OntoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice). It is necessary to have the timeseries of the status of the component instantiated in the knowledge graph, one such agent that does this is [ESPHome Update Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/ESPHomeUpdateAgent). An example of such an instantiation is shown below: ``` rdf:type ; ; @@ -149,7 +149,7 @@ If you want to spin up this agent as part of a stack, instead of `docker-compose - Replace the contents of `config/client.properties` with `config/client.properties_stack` and the contents of `config/esphome-client.properties` with `config/esphome-client.properties_stack`, inserting the name of your stack. - Build the image via `docker-compose build`. Do not start the container. - Copy the `json` file from the `stack-manager-input-config` folder into `TheWorldAvatar/Deploy/dynamic/stack-manager/inputs/config/services/`. -- Go to the stack manager folder by following this route: `TheWorldAvatar/Deploy/stacks/dynamic/stack-manager/`, check whether there is a `.json` under the sub folder `/inputs/config/` and create one if it doesn't exist. If it exists already, append the agent to the json file. (Read [Stack configuration](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#stack-configuration) for more information.) +- Go to the stack manager folder by following this route: `TheWorldAvatar/Deploy/stacks/dynamic/stack-manager/`, check whether there is a `.json` under the sub folder `/inputs/config/` and create one if it doesn't exist. If it exists already, append the agent to the json file. (Read [Stack configuration](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#stack-configuration) for more information.) - Start the stack manager as usual. This should start the container. diff --git a/Agents/ESPHomeUpdateAgent/README.md b/Agents/ESPHomeUpdateAgent/README.md index f89186636e2..2bb01f54873 100644 --- a/Agents/ESPHomeUpdateAgent/README.md +++ b/Agents/ESPHomeUpdateAgent/README.md @@ -3,7 +3,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding status of components that are controllable via ESPHome. It's only purpose is to retrieve new data (if available) from the API and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS base lib to interact with both the KG and database. Before explaining the usage of the agent, we will briefly summarize the ESPHome API that is contacted by one of the classes in this package to retrieve data. @@ -147,7 +147,7 @@ If you want to spin up this agent as part of a stack, instead of `docker-compose - Replace the contents of `config/client.properties` with `config/client.properties_stack`, inserting the name of your stack. - Build the image via `docker-compose build`. Do not start the container. - Copy the `json` file from the `stack-manager-input-config` folder into `TheWorldAvatar/Deploy/dynamic/stack-manager/inputs/config/services/`. -- Go to the stack manager folder by following this route: `TheWorldAvatar/Deploy/stacks/dynamic/stack-manager/`, check whether there is a `.json` under the sub folder `/inputs/config/` and create one if it doesn't exist. If it exists already, append the agent to the json file. (Read [Stack configuration](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#stack-configuration) for more information.) +- Go to the stack manager folder by following this route: `TheWorldAvatar/Deploy/stacks/dynamic/stack-manager/`, check whether there is a `.json` under the sub folder `/inputs/config/` and create one if it doesn't exist. If it exists already, append the agent to the json file. (Read [Stack configuration](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#stack-configuration) for more information.) - Start the stack manager as usual. This should start the container. diff --git a/Agents/EmailAgent/README.md b/Agents/EmailAgent/README.md index 05b6e4d544a..cb225258fe6 100644 --- a/Agents/EmailAgent/README.md +++ b/Agents/EmailAgent/README.md @@ -13,7 +13,7 @@ Please note that it is not the intention for this EmailAgent to be used to send ## Building the Image -The agent directory contains the required files to build a Docker Image for the EmailAgent service; the `Dockerfile` file contains the instructions to build an Image; before making any changes to it, please consult the application's developer or the system administrators at CMCL. Files have also been added to ensure the agent is compatible for deplyment in a [stack environment](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md). +The agent directory contains the required files to build a Docker Image for the EmailAgent service; the `Dockerfile` file contains the instructions to build an Image; before making any changes to it, please consult the application's developer or the system administrators at CMCL. Files have also been added to ensure the agent is compatible for deplyment in a [stack environment](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md). Please note the caveats below before attempting to build the service using Docker: diff --git a/Agents/EnergyPerformanceCertificateAgent/README.md b/Agents/EnergyPerformanceCertificateAgent/README.md index f88759dc3eb..dd82f03f2b1 100644 --- a/Agents/EnergyPerformanceCertificateAgent/README.md +++ b/Agents/EnergyPerformanceCertificateAgent/README.md @@ -233,7 +233,7 @@ Markus Hofmeister (mh807@cam.ac.uk), February 2023 [http://localhost:5007/epcagent]: http://localhost:5007/epcagent [Java Runtime Environment version >=11]: https://adoptopenjdk.net/?variant=openjdk8&jvmVariant=hotspot [JDBC driver]: https://jdbc.postgresql.org/download/ -[OntoBuiltEnv]: https://raw.githubusercontent.com/cambridge-cares/TheWorldAvatar/main/JPS_Ontology/ontology/ontobuiltenv/OntoBuiltEnv.owl +[OntoBuiltEnv]: https://raw.githubusercontent.com/TheWorldAvatar/ontology/refs/heads/main/ontology/ontobuiltenv/OntoBuiltEnv.owl [personal access token]: https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token [py4jps]: https://pypi.org/project/py4jps/#description [Upload SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/to-existing-droplet/ @@ -241,13 +241,13 @@ Markus Hofmeister (mh807@cam.ac.uk), February 2023 [Building Matching Readme]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/BuildingMatchingAgent/README.md -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[credentials]: https://github.com/cambridge-cares/TheWorldAvatar/tree/1376-dev-building-matching-agent/Agents/BuildingMatchingAgent/credentials -[Digital Twin Visualisation Framework]:https://github.com/cambridge-cares/TheWorldAvatar/tree/main/web/digital-twin-vis-framework -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md -[Stack Data Uploader]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-data-uploader/README.md -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[credentials]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BuildingMatchingAgent/credentials +[Digital Twin Visualisation Framework]:https://github.com/cambridge-cares/TheWorldAvatar/tree/main/web/twa-vis-framework +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md +[Stack Data Uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader/README.md +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar [CMCL Docker registry]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry diff --git a/Agents/EquipmentBookingAgent/README.md b/Agents/EquipmentBookingAgent/README.md index 7c1009703a1..3101af42768 100644 --- a/Agents/EquipmentBookingAgent/README.md +++ b/Agents/EquipmentBookingAgent/README.md @@ -67,12 +67,12 @@ Prefix | Namespace --- | --- [bot](https://w3c-lbd-cg.github.io/bot/) | `https://w3id.org/bot#` [fibo](https://github.com/edmcouncil/fibo/tree/master/FND/AgentsAndPeople) | `https://spec.edmcouncil.org/fibo/ontology/FND/AgentsAndPeople/People/` -[ontoam](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoassetmanagement) | `https://www.theworldavatar.com/kg/ontoassetmanagement/` -[ontobim](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobim) | `https://www.theworldavatar.com/kg/ontobim/` -[ontodevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice) | `https://www.theworldavatar.com/kg/ontodevice/` -[ontolab](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontolab) | `https://www.theworldavatar.com/kg/ontolab/` -[ontotechsystem](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotechnicalsystem) | `https://www.theworldavatar.com/kg/ontotechnicalsystem/` -[ontotimeseries](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries) | `https://www.theworldavatar.com/kg/ontotimeseries/` +[ontoam](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoassetmanagement) | `https://www.theworldavatar.com/kg/ontoassetmanagement/` +[ontobim](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobim) | `https://www.theworldavatar.com/kg/ontobim/` +[ontodevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice) | `https://www.theworldavatar.com/kg/ontodevice/` +[ontolab](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontolab) | `https://www.theworldavatar.com/kg/ontolab/` +[ontotechsystem](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotechnicalsystem) | `https://www.theworldavatar.com/kg/ontotechnicalsystem/` +[ontotimeseries](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotimeseries) | `https://www.theworldavatar.com/kg/ontotimeseries/` [time](https://www.w3.org/TR/owl-time/) | `http://www.w3.org/2006/time#` diff --git a/Agents/FHSashAndOccupancyAgent/README.MD b/Agents/FHSashAndOccupancyAgent/README.MD index 3d89bef4cdc..9479346c0f0 100644 --- a/Agents/FHSashAndOccupancyAgent/README.MD +++ b/Agents/FHSashAndOccupancyAgent/README.MD @@ -1,6 +1,6 @@ # Fumehood Sash And Occupancy Agent -This agent is designed to query for the sash opening percentage and occupied state of fumehoods and walkin-fumehoods, if any of the fumehoods and walkin-fumehoods is unoccupied and has a sash opening percentage higher than a certain threshold, an email will be sent to the relevant personnel via the [EmailSender class in the JPS Base Lib](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/email/EmailSender.java) and the [Email Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/EmailAgent). The agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and [remote store client](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/query/RemoteStoreClient.java) from the JPS_BASE_LIB to interact with both the KG and database. +This agent is designed to query for the sash opening percentage and occupied state of fumehoods and walkin-fumehoods, if any of the fumehoods and walkin-fumehoods is unoccupied and has a sash opening percentage higher than a certain threshold, an email will be sent to the relevant personnel via the [EmailSender class in the JPS Base Lib](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/email/EmailSender.java) and the [Email Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/EmailAgent). The agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and [remote store client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/query/RemoteStoreClient.java) from the JPS_BASE_LIB to interact with both the KG and database. ## Usage This part of the README describes the usage of the agent. The module itself can be packaged into an executable war, deployed as a web servlet on tomcat. Sending the appropriate request to the correct URL will initiate the agent. Since it uses the time-series client and remote store client to interact with the KG and database, the KG and database will be required to be set-up beforehand. @@ -12,11 +12,11 @@ The [next section](#requirements) will explain the requirements to run the agent 2) It is required to have the [Email Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/EmailAgent) set up beforehand. -3) It is required to have the fumehoods and walkin-fumehoods devices already instantiated in the knowledge graph based on [ontoBMS](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobms) and [ontoDevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice). +3) It is required to have the fumehoods and walkin-fumehoods devices already instantiated in the knowledge graph based on [ontoBMS](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobms) and [ontoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice). 4) In order for the agent to work properly, it is also necessary for the fumehoods and walkin-fumehoods instances to have occupied states and sash opening percentages. -5) The occupied states and sash opening percentages should be instantiated as timeseries in the knowledge graph via the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries). +5) The occupied states and sash opening percentages should be instantiated as timeseries in the knowledge graph via the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries). An example of the instance can be found below: ``` diff --git a/Agents/FenlandTrajectoryAgent/README.md b/Agents/FenlandTrajectoryAgent/README.md index 41b2d2efa8b..ba1b8806826 100644 --- a/Agents/FenlandTrajectoryAgent/README.md +++ b/Agents/FenlandTrajectoryAgent/README.md @@ -110,16 +110,16 @@ Jiying Chen (jc2341@cam.ac.uk), May 2024 [OntoDevice]: https://www.theworldavatar.com/kg/ontodevice [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar -[TimeSeriesClient]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[Stack Manager README]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md -[JPS_Document]: https://github.com/cambridge-cares/TheWorldAvatar/blob/dev-pydantic-rdflib/JPS_BASE_LIB/python_wrapper/docs/examples/additional_java_lib.md +[TimeSeriesClient]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[Stack Manager README]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md +[JPS_Document]: https://github.com/TheWorldAvatar/baselib/tree/main/python_wrapper/docs/examples/additional_java_lib.md [twa]: https://pypi.org/project/twa/ [virtual environment]: https://docs.python.org/3/tutorial/venv.html -[Python wrapper]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_wrapper#installing-additional-java-resources +[Python wrapper]: https://github.com/TheWorldAvatar/baselib/tree/main/python_wrapper#installing-additional-java-resources [allows you to publish and install packages]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry#authenticating-to-github-packages [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts [Create SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/create-with-openssh/ [DataPoint]: https://www.metoffice.gov.uk/services/data/datapoint/about [Github container registry]: https://ghcr.io @@ -127,12 +127,12 @@ Jiying Chen (jc2341@cam.ac.uk), May 2024 [http://localhost:5000/]: http://localhost:5000/ [Java Development Kit version >=11]: https://adoptium.net/en-GB/temurin/releases/?version=11 [JDBC driver]: https://jdbc.postgresql.org/download/ -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [personal access token]: https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token [py4jps]: https://pypi.org/project/py4jps/#description -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-MetOfficeAgent-withinStack/Deploy/stacks/dynamic/stack-clients +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar [Upload SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/to-existing-droplet/ [virtual environment]: https://docs.python.org/3/tutorial/venv.html @@ -148,8 +148,8 @@ Jiying Chen (jc2341@cam.ac.uk), May 2024 [resources]: ./resources [README]: ./README.md [FenlandTrajectoryAgent.json]: ./stack-manager-input-config-service/ -[stack manager configuration service directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services -[stack manager configuration directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/ +[stack manager configuration service directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services +[stack manager configuration directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/ [CURL commands folder]: ./example-requests/curl [SendHTTP]: ./example-requests/SendHTTP [preprocess]: ./example-requests/SendHTTP/gps_preprocess.http diff --git a/Agents/FilterAgent/README.md b/Agents/FilterAgent/README.md index 90c5be746aa..02de752515f 100644 --- a/Agents/FilterAgent/README.md +++ b/Agents/FilterAgent/README.md @@ -20,7 +20,7 @@ A stack-manager config similar to the following will deploy this agent: } ``` -Spin up a stack as per the [documented workflow](../../Deploy/stacks/dynamic/stack-manager/README.md). +Spin up a stack as per the [documented workflow](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md). The filter agent should be spun up along with the entire stack. There is no need to build or deploy the filter agent and its image separately. diff --git a/Agents/FloodAssessmentAgent/README.md b/Agents/FloodAssessmentAgent/README.md index 02bc7ba03a2..4ba798b8115 100644 --- a/Agents/FloodAssessmentAgent/README.md +++ b/Agents/FloodAssessmentAgent/README.md @@ -217,18 +217,18 @@ Markus Hofmeister (mh807@cam.ac.uk), November 2022 [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Derivation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent -[Derivation Agent configuration]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/python_derivation_agent/pyderivationagent/conf/agent_conf.py +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Derivation Agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent +[Derivation Agent configuration]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent/pyderivationagent/conf/agent_conf.py [EPC Instantiation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/EnergyPerformanceCertificateAgent [Flood Warning Instantiation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FloodWarningAgent -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB -[OntoBuiltEnv]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobuiltenv -[OntoFlood]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoflood +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main +[OntoBuiltEnv]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobuiltenv +[OntoFlood]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoflood [Property Value Estimation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/PropertyValueEstimationAgent -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md#spinning-up-a-stack -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md#spinning-up-a-stack +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [The World Avatar]: https://github.com/cambridge-cares/TheWorldAvatar diff --git a/Agents/FloodWarningAgent/README.md b/Agents/FloodWarningAgent/README.md index 865a1b5ec90..3b51e992f0b 100644 --- a/Agents/FloodWarningAgent/README.md +++ b/Agents/FloodWarningAgent/README.md @@ -121,15 +121,15 @@ Markus Hofmeister (mh807@cam.ac.uk), February 2023 [allows you to publish and install packages]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry#authenticating-to-github-packages [Github package repository]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Packages -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [personal access token]: https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token [py4jps]: https://pypi.org/project/py4jps/#description [Environment Agency Real Time flood-monitoring API]: https://environment.data.gov.uk/flood-monitoring/doc/reference#flood-warnings [FloodAssessmentAgent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FloodAssessmentAgent -[OntoFlood]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoflood -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager +[OntoFlood]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoflood +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar diff --git a/Agents/ForecastingAgent/README.md b/Agents/ForecastingAgent/README.md index 1bb6a0de318..a4a4ad3bd3a 100644 --- a/Agents/ForecastingAgent/README.md +++ b/Agents/ForecastingAgent/README.md @@ -300,18 +300,18 @@ Magnus Mueller (mm2692@cam.ac.uk), November 2022 [allows you to publish and install packages]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry#authenticating-to-github-packages [py4jps]: https://pypi.org/project/py4jps/#description -[TimeSeriesClient]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries +[TimeSeriesClient]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries [Darts]: https://unit8co.github.io/darts/index.html [Prophet]: https://unit8co.github.io/darts/generated_api/darts.models.forecasting.prophet_model.html [Facebook Prophet]: https://github.com/facebook/prophet [Github container registry]: https://ghcr.io [personal access token]: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens -[Derived Information Framework]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/derivation -[Stack manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[derivation agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent +[Derived Information Framework]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/derivation +[Stack manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[derivation agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent -[OntoTimeSeries]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries -[OntoDerivation]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoderivation +[OntoTimeSeries]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotimeseries +[OntoDerivation]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoderivation [HTTP forecast error request]: ./resources/HTTP_evaluate_errors.http diff --git a/Agents/GFAAgent/README.md b/Agents/GFAAgent/README.md index 32198884694..d98d2ed81d6 100644 --- a/Agents/GFAAgent/README.md +++ b/Agents/GFAAgent/README.md @@ -5,7 +5,7 @@ This agent has been developed to compute the Gross Floor Area (GFA) and GFA cost 2) Calculate the GFA cost of buildings by GFA and standard unit price of GFA cost. ### 1.1 Requirements -1) The agent requires 3D building models based on the CityGML standard. These models must be uploaded through the [stack-data-uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader#citydb-data). +1) The agent requires 3D building models based on the CityGML standard. These models must be uploaded through the [stack-data-uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader#citydb-data). 2) The agent requires number of floors data. Therefore, the Building Floor Agent should be run firstly. 3) GFA cost standard unit price: The latest standard GFA cost should be integrated in csv file named as [ais_cost.csv](https://github.com/cambridge-cares/TheWorldAvatar/blob/4a5b2b6eaf60be88f95e1561da24b043943fec83/Agents/GFAAgent/stack-data-uploader-input-config/ais_cost.csv), which need to upload to postgresql by stack-data-uploader first. 4) Building usage matching: The mathcing of building cost standard and OntobBuiltEnv, which stores in the [cost_ontobuiltenv.csv](https://github.com/cambridge-cares/TheWorldAvatar/blob/4a5b2b6eaf60be88f95e1561da24b043943fec83/Agents/GFAAgent/src/main/resources/cost_ontobuiltenv.csv). diff --git a/Agents/GeoSegmentAgent/README.md b/Agents/GeoSegmentAgent/README.md index cc5c4d360fa..f2dcd829039 100644 --- a/Agents/GeoSegmentAgent/README.md +++ b/Agents/GeoSegmentAgent/README.md @@ -24,7 +24,7 @@ docker build -t geosegment_agent:1.0.0 . This will use the Dockerfile to build an image named `geosegment_agent` tagged with `1.0.0`. ### Note: The GeoSegment Agent can only run within a Docker stack. -The stack is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager), which is beyond the scope of this README. +The stack is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager), which is beyond the scope of this README. A successful setup will result in 9 containers (or more): - Default containers diff --git a/Agents/HMLandRegistryAgent/README.md b/Agents/HMLandRegistryAgent/README.md index 505e32e8131..02a4d6efa3e 100644 --- a/Agents/HMLandRegistryAgent/README.md +++ b/Agents/HMLandRegistryAgent/README.md @@ -171,10 +171,10 @@ Markus Hofmeister (mh807@cam.ac.uk), March 2023 [HM Land Registry SPARQL endpoint]: http://landregistry.data.gov.uk/landregistry/query -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar [EPC Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/EnergyPerformanceCertificateAgent [CMCL Docker Registry]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry diff --git a/Agents/HeatEmissionAgent/README.md b/Agents/HeatEmissionAgent/README.md index d6ab88595f5..2db84136326 100644 --- a/Agents/HeatEmissionAgent/README.md +++ b/Agents/HeatEmissionAgent/README.md @@ -44,7 +44,7 @@ The data needed to estimate the heat emissions of various types of industrial fa ##### Stack containers -If the agent is being run as part of a stack, the user can opt to use a namespace located in the stack blazegraph. The procedure for spinning up the stack is described at [stack manager page](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +If the agent is being run as part of a stack, the user can opt to use a namespace located in the stack blazegraph. The procedure for spinning up the stack is described at [stack manager page](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). #### 1.2 Docker Deployment diff --git a/Agents/HistoricalAQMeshAgent/README.md b/Agents/HistoricalAQMeshAgent/README.md index 79f88ad744b..1ea5495e597 100644 --- a/Agents/HistoricalAQMeshAgent/README.md +++ b/Agents/HistoricalAQMeshAgent/README.md @@ -3,7 +3,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the AQMesh air quality measuring station. It's only purpose is to retrieve new/historical data from excel files and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS base lib to interact with both the KG and database. ### Data retrieval and pre-processing diff --git a/Agents/HistoricalNTUEnergyAgent/README.md b/Agents/HistoricalNTUEnergyAgent/README.md index 0bf02cdb29e..82eb8fc78e1 100644 --- a/Agents/HistoricalNTUEnergyAgent/README.md +++ b/Agents/HistoricalNTUEnergyAgent/README.md @@ -146,7 +146,7 @@ The Dockerfile will automatically copy all properties files and mapping folder a ### [Option 2] Run in a Docker Stack **Note: Please follow instructions in Option 1 to build the agent first before proceeding with Option 2. ** -Running this agent in a docker stack is a more advanced option as it facilitate interactions between other agents for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +Running this agent in a docker stack is a more advanced option as it facilitate interactions between other agents for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). A successful setup will result in 9 containers (optional 10): - Default containers - Stack Manager (exits when spins up all other containers) @@ -187,10 +187,10 @@ config/ |_ ... |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-custom-containers). ##### Spin Up Stack -Follow the [steps](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) to spin up the stack. +Follow the [steps](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) to spin up the stack. # 3. Run diff --git a/Agents/HistoricalNUSDavisAgent/README.md b/Agents/HistoricalNUSDavisAgent/README.md index 4fe7a77fe9b..e611dd64f7a 100644 --- a/Agents/HistoricalNUSDavisAgent/README.md +++ b/Agents/HistoricalNUSDavisAgent/README.md @@ -2,7 +2,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the NUS Davis weather station. It's only purpose is to retrieve new/historical data from excel files and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. -The agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +The agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the KG and database. ### Data retrieval and pre-processing diff --git a/Agents/HistoricalPirmasensStationAgent/README.md b/Agents/HistoricalPirmasensStationAgent/README.md index 234baac7a3c..f2b2dd794bd 100644 --- a/Agents/HistoricalPirmasensStationAgent/README.md +++ b/Agents/HistoricalPirmasensStationAgent/README.md @@ -2,7 +2,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the Pirmasens weather station. It's only purpose is to retrieve new/historical data from csv files and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. -The agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +The agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the KG and database. ### Data retrieval and pre-processing diff --git a/Agents/Ifc2OntoBIMAgent/README.md b/Agents/Ifc2OntoBIMAgent/README.md index 276d4ccdb51..8a89c3e2506 100644 --- a/Agents/Ifc2OntoBIMAgent/README.md +++ b/Agents/Ifc2OntoBIMAgent/README.md @@ -123,7 +123,7 @@ docker-compose up -d If you want to spin up both this agent and the IfcOwlConverterAgent as part of a stack, do the following: - Copy the contents of `config/client.properties_stack` into `config/client.properties`, inserting the name of your stack and the desired namespaces. - Build both images by issuing `docker compose build` in this folder. There is no need to build the IfcOwlConverterAgent separately. Do not start the containers. -- Copy the `json` file from the `stack-manager-input-config` folder of both agents into the `inputs/config/services` folder of the stack manager, adjusting the absolute path of the bind mounts as required. The `data` bind mount for both agents **MUST** be the same, ideally in the `.../stack-manager/inputs/data` directory. See [sample bind mounts](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#bind-mounts) for the configuration syntax. +- Copy the `json` file from the `stack-manager-input-config` folder of both agents into the `inputs/config/services` folder of the stack manager, adjusting the absolute path of the bind mounts as required. The `data` bind mount for both agents **MUST** be the same, ideally in the `.../stack-manager/inputs/data` directory. See [sample bind mounts](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#bind-mounts) for the configuration syntax. - Start the stack manager as usual. This should start both containers. #### 2.3 Running the Agent diff --git a/Agents/Ifc2TilesetAgent/README.md b/Agents/Ifc2TilesetAgent/README.md index f3f519efe70..c9479591e34 100644 --- a/Agents/Ifc2TilesetAgent/README.md +++ b/Agents/Ifc2TilesetAgent/README.md @@ -46,12 +46,12 @@ The agent is designed for deployment on [Docker](#12-docker-deployment). Althoug These dependencies have been added to the Dockerfile. But in the event there is a need to update their links, please read the steps below on how to find and extract the dependencies. 1. **IfcOpenShell** - Required to load and parse IFC files - - Download required version from https://blenderbim.org/docs-python/ifcopenshell-python/installation.html + - Download required version from https://docs.ifcopenshell.org/ifcopenshell-python/installation.html - Extract and place the `ifcopenshell` from `blenderbim/libs/site/packages/` to the `\Lib\site-packages` of either your temporary python environment `venv` or the python program - Delete the remaining extracted content 2. **IfcConvert.exe** - Required to convert IFC to glb format - - Download IfcConvert.exe from: https://blenderbim.org/docs-python/ifcconvert/installation.html + - Download IfcConvert.exe from: https://docs.ifcopenshell.org/ifcconvert/installation.html - Extract it to `` ### 1.2 Docker Deployment: diff --git a/Agents/IfcOwlConverterAgent/README.md b/Agents/IfcOwlConverterAgent/README.md index 0fda1fb3093..8aaf038a3c6 100644 --- a/Agents/IfcOwlConverterAgent/README.md +++ b/Agents/IfcOwlConverterAgent/README.md @@ -68,7 +68,7 @@ docker-compose up -d If you want to spin up this agent as part of a stack, do the following: - Build the image via `docker compose build`. Do not start the container. -- Copy the `json` file from the `stack-manager-input-config` folder into the `inputs/config/services` folder of the stack manager, adjusting the absolute path of the bind mount as required. If you are using this agent with the Ifc2OntoBIM agent, the `data` bind mount for both agents **MUST** be the same, ideally in the `.../stack-manager/inputs/data` directory. See [sample bind mounts](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#bind-mounts) for the configuration syntax. +- Copy the `json` file from the `stack-manager-input-config` folder into the `inputs/config/services` folder of the stack manager, adjusting the absolute path of the bind mount as required. If you are using this agent with the Ifc2OntoBIM agent, the `data` bind mount for both agents **MUST** be the same, ideally in the `.../stack-manager/inputs/data` directory. See [sample bind mounts](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#bind-mounts) for the configuration syntax. - Start the stack manager as usual. This should start the container. #### 2.3 Running the Agent diff --git a/Agents/InequalityIndexCalculationAgent/README.md b/Agents/InequalityIndexCalculationAgent/README.md index f889fa8bc5f..293f9d9e98d 100644 --- a/Agents/InequalityIndexCalculationAgent/README.md +++ b/Agents/InequalityIndexCalculationAgent/README.md @@ -144,7 +144,7 @@ Once the Agent is deployed, it periodically (defined by `DERIVATION_PERIODIC_TIM Details about how to use the agent please see the [home page] of this agent ## Upper level instances instatiation -If you started from an empty namespace, or have not instantiate upper level instances such as `country` or `assumption`, the result would not be able to be associated with them. Therefore it is required to run the [upper_level_ontology_update.py](./inequalityindexcalculationagent/upper_level_ontology_update.py), simply run this command in the powershell terminal: +If you started from an empty namespace, or have not instantiate upper level instances such as `country` or `assumption`, the result would not be able to be associated with them. Therefore it is required to run the [upper_level_ontology_update.py](./inequalityindexagent/upper_level_ontology_update.py), simply run this command in the powershell terminal: ```bash py ./inequalityindexcalculationagent/upper_level_ontology_update.py @@ -168,7 +168,7 @@ Details can be refered to the individual [README.md](./python_scripts/README.md) # Authors Jieyang Xu (jx309@cam.ac.uk), May 2023 -[markup.py]:./inequalityindexcalculationagent/markup.py +[markup.py]:./inequalityindexagent/markup.py [home page]:https://htmlpreview.github.io/?https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/InequalityIndexCalculationAgent/index.html [CopCalculationAgent]:https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/CopCalculationAgent [OntoCAPE]:http://theworldavatar.com/ontology/ontocape/ @@ -187,16 +187,16 @@ Jieyang Xu (jx309@cam.ac.uk), May 2023 [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Derivation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent -[Derivation Agent configuration]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/python_derivation_agent/pyderivationagent/conf/agent_conf.py +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Derivation Agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent +[Derivation Agent configuration]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent/pyderivationagent/conf/agent_conf.py [EPC Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-EPCInstantiationAgent/Agents/EnergyPerformanceCertificateAgent -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [OntoRegionalAnalysis]: http://www.theworldavatar.com/ontology/ontoregionalanlysis/OntoRegionalAnalysis.owl [HM Land Registry Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-PropertySalesInstantiationAgent/Agents/HMLandRegistryAgent -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md#spinning-up-a-stack -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md#spinning-up-a-stack +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [The World Avatar]: https://github.com/cambridge-cares/TheWorldAvatar [Average Square Metre Price Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AverageSquareMetrePriceAgent diff --git a/Agents/IsochroneAgent/README.md b/Agents/IsochroneAgent/README.md index 98d6db4ede0..2575b3b421f 100644 --- a/Agents/IsochroneAgent/README.md +++ b/Agents/IsochroneAgent/README.md @@ -12,7 +12,7 @@ The agent has been implemented to work in the stack. Follow the instructions in ### 2.2. Uploading OSM Data via stack-data-uploader 1) Download desired bounding box from [BBBike.org](https://extract.bbbike.org/) or [GeoFabrik](https://download.geofabrik.de/) in `.pbf` format. -2) `.pbf` uploaded via [stack-data-uploader] in [osm2pgrouting](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader#osm-data) data type. +2) `.pbf` uploaded via [stack-data-uploader] in [osm2pgrouting](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader#osm-data) data type. ### 2.3. Uploading population data via stack-data-uploader 1) Download High Resolution Population Density Maps from [HDX - Meta Data For Good](https://data.humdata.org/dataset/germany-high-resolution-population-density-maps-demographic-estimates?). @@ -45,7 +45,7 @@ To use the example created in [15MSC in Pirmasens](inputs/15MSC/POIqueries/), re [EdgeTable](https://docs.pgrouting.org/2.5/en/pgRouting-concepts.html#description-of-the-edges-sql-query-for-dijkstra-like-functions) describes the characteristic of the road networks. It is used to define the transport mode and road conditions during the calculation of isochrone. EdgeTableSQL follows the following format `TransportMode_RoadConditions.sql`. -1) `TransportMode` and `RoadConditions` refers to the ontology classes developed in [OntoIsochrone](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontoisochrone/OntoIsochrone.owl). +1) `TransportMode` and `RoadConditions` refers to the ontology classes developed in [OntoIsochrone](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoisochrone/OntoIsochrone.owl). 2) The SQL statement content refers to the cost table used for routing calculations. EdgeTableSQL are created for [15MSC in Pirmasens](inputs/15MSC/edgesSQLTable/) and [UR in King's Lynn](inputs/UR/edgesSQLTable/) use cases. @@ -111,13 +111,13 @@ The debugger port will be available at 5005. ### 7.1 Feature Info Agent The isochrones is designed to be compatible with TWA-VF and queryable via FeatureInfoAgent. -1) In the directory [stack-manager-config/data/webspace/](stack-manager-config/data/webspace/), contains the TWA-VF `data.json` prepared for the different scnearios that is meant to be placed inside [`stack-manager/inputs/data/webspace`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/data), following instruction [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#example---including-a-visualisation). -2) In the directory [stack-manager-config/data/fia-queries/](stack-manager-config/data/fia-queries/), contains `SPARQL queries` and `fia-config.json` to be used with the agent [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent#configuration). Place the `fia-config.json` and `isochrone.sparql` inside `stack-manager/inputs/data/queries` as according the bind mount path specified in the stack-manager config's [`feature-info-agent.json`](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/FeatureInfoAgent/sample/feature-info-agent.json). +1) In the directory [stack-manager-config/data/webspace/](stack-manager-config/data/webspace/), contains the TWA-VF `data.json` prepared for the different scnearios that is meant to be placed inside [`stack-manager/inputs/data/webspace`](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/data), following instruction [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#example---including-a-visualisation). +2) In the directory [stack-manager-config/data/fia-queries/](stack-manager-config/data/fia-queries/), contains `SPARQL queries` and `fia-config.json` to be used with the agent [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent#configuration). Place the `fia-config.json` and `isochrone.sparql` inside `stack-manager/inputs/data/queries` as according the bind mount path specified in the stack-manager config's [`feature-info-agent.json`](https://github.com/TheWorldAvatar/Feature-Info-Agent/blob/main/sample/feature-info-agent.json). -[stack-data-uploader]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services +[stack-data-uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services [inputs]: stack-data-uploader-inputs/ diff --git a/Agents/LSOAInputAgent/README.md b/Agents/LSOAInputAgent/README.md index fb24027571a..00d3bae2bf7 100644 --- a/Agents/LSOAInputAgent/README.md +++ b/Agents/LSOAInputAgent/README.md @@ -4,7 +4,7 @@ The `LSOAInput agent` is dedicated to process data around the UK Lower-layer Sup This agent extract data and turn it into Knowledge Graph. The data, such as electricity consumption, gas consumption, fuel poverty, climate (temperature) and geometric shape are used to perform the use case of analysing the deployment of heat pump. The data instatiated in the knowledge graph follows [Ontoclimate](http://www.theworldavatar.com/ontology/ontogasgrid/ontoclimate.owl), [Ontofuelpoverty](http://www.theworldavatar.com/ontology/ontofuelpoverty/ontofuelpoverty.owl) and [Ontogasgrid](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/GasGrid) ontologies in the [TheWorldAvatar](https://github.com/cambridge-cares/TheWorldAvatar). -The agent is implemented as Docker container to be deployed to a Docker stack spun up by the [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +The agent is implemented as Docker container to be deployed to a Docker stack spun up by the [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). Please note that the use of derive informtaion framework for heat pump analysis only need to run part of this agent in advance (to upload temperature data), other functions serve as an supplementary functions to uploaded triples to the blazegraph. @@ -154,7 +154,7 @@ Jieyang Xu (jx309@cam.ac.uk), Feroz Farazi (msff2@cam.ac.uk) Dec 2022 [allows you to publish and install packages]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry#authenticating-to-github-packages [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts [Create SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/create-with-openssh/ [DataPoint]: https://www.metoffice.gov.uk/services/data/datapoint/about [Github container registry]: https://ghcr.io @@ -162,12 +162,12 @@ Jieyang Xu (jx309@cam.ac.uk), Feroz Farazi (msff2@cam.ac.uk) Dec 2022 [http://localhost:5000/]: http://localhost:5000/ [Java Development Kit version >=11]: https://adoptium.net/en-GB/temurin/releases/?version=11 [JDBC driver]: https://jdbc.postgresql.org/download/ -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [personal access token]: https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token [py4jps]: https://pypi.org/project/py4jps/#description -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-MetOfficeAgent-withinStack/Deploy/stacks/dynamic/stack-clients +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar [Upload SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/to-existing-droplet/ [virtual environment]: https://docs.python.org/3/tutorial/venv.html diff --git a/Agents/MackayCalculatorAgent/readme.md b/Agents/MackayCalculatorAgent/readme.md index 87de7fdd1eb..7e44d19785a 100644 --- a/Agents/MackayCalculatorAgent/readme.md +++ b/Agents/MackayCalculatorAgent/readme.md @@ -102,5 +102,5 @@ For example, to start it on your local machine and on port 5000: flask run --host=0.0.0.0 --port=5000 ``` [MacKay Data Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/MackayDataAgent -[Derived Information Framework]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/derivation -[OntoTimeSeries]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries \ No newline at end of file +[Derived Information Framework]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/derivation +[OntoTimeSeries]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotimeseries \ No newline at end of file diff --git a/Agents/MackayDataAgent/README.md b/Agents/MackayDataAgent/README.md index 34d9ffc978c..60ff0b5d78d 100644 --- a/Agents/MackayDataAgent/README.md +++ b/Agents/MackayDataAgent/README.md @@ -119,13 +119,13 @@ We map three Mackay inputs data to quantity instances in TWA KG. A proposed vers -[TimeSeriesClient]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries +[TimeSeriesClient]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries [Github container registry]: https://ghcr.io [personal access token]: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens -[Derived Information Framework]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/derivation -[derivation agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent -[OntoTimeSeries]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries -[OntoDerivation]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoderivation +[Derived Information Framework]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/derivation +[derivation agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent +[OntoTimeSeries]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontotimeseries +[OntoDerivation]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoderivation [API Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/APIAgent [Forecasting Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/ForecastingAgent diff --git a/Agents/MetOfficeAgent/README.md b/Agents/MetOfficeAgent/README.md index 71b0f68815b..cc31baaaff8 100755 --- a/Agents/MetOfficeAgent/README.md +++ b/Agents/MetOfficeAgent/README.md @@ -209,7 +209,7 @@ Markus Hofmeister (mh807@cam.ac.uk), January 2022 [allows you to publish and install packages]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-apache-maven-registry#authenticating-to-github-packages [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts [Create SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/create-with-openssh/ [DataPoint]: https://www.metoffice.gov.uk/services/data/datapoint/about [Github container registry]: https://ghcr.io @@ -217,18 +217,18 @@ Markus Hofmeister (mh807@cam.ac.uk), January 2022 [http://localhost:5000/]: http://localhost:5000/ [Java Development Kit version >=11]: https://adoptium.net/en-GB/temurin/releases/?version=11 [JDBC driver]: https://jdbc.postgresql.org/download/ -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB -[OntoEMS]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoems +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main +[OntoEMS]: https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoems [personal access token]: https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token [py4jps]: https://pypi.org/project/py4jps/#description -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-MetOfficeAgent-withinStack/Deploy/stacks/dynamic/stack-clients +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar [Upload SSH key]: https://docs.digitalocean.com/products/droplets/how-to/add-ssh-keys/to-existing-droplet/ [virtual environment]: https://docs.python.org/3/tutorial/venv.html [VSCode via SSH]: https://code.visualstudio.com/docs/remote/ssh -[StackClients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[StackClients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [CMCL Docker registry]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry diff --git a/Agents/NTUDataAgent/README.md b/Agents/NTUDataAgent/README.md index 80783021731..141614f90d9 100644 --- a/Agents/NTUDataAgent/README.md +++ b/Agents/NTUDataAgent/README.md @@ -64,13 +64,13 @@ config/ |_ ... |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-custom-containers). ### 4. Spin up a Docker Stack **Note: The docker container must run within the same stack as the HistoricalNTUEnergyAgent to get access and query the NTU Power Network Knowledge Graph for calculation.** -Running this agent in a docker stack can facilitate interactions between other agents and endpoints (Postgres, Blazegraph, etc,.) for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +Running this agent in a docker stack can facilitate interactions between other agents and endpoints (Postgres, Blazegraph, etc,.) for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). ## Run Once the stack is up and running, the agent can be activated by sending a Curl request as shown below with the paramters stack=true and the desired date: diff --git a/Agents/NTUEnergyClusterAgent/README.md b/Agents/NTUEnergyClusterAgent/README.md index 64d00e1a5fe..8531f4ff88e 100644 --- a/Agents/NTUEnergyClusterAgent/README.md +++ b/Agents/NTUEnergyClusterAgent/README.md @@ -11,7 +11,7 @@ For the agent to process opf results a power system must be instantiated by the #### NTU Power System Knowledge Graph - For details to instantiate the NTU Power System Knowledge Graph, pleaes refer to the [HistoricalNTUEnergyAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HistoricalNTUEnergyAgent). -- Both NTUEnergyClusterAgent and HistoricalNTUEnergyAgent should run in the same stack in which they interact with the same Blazegraph and Postgres endpoints. For details to spin up a stack, please refer to the [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +- Both NTUEnergyClusterAgent and HistoricalNTUEnergyAgent should run in the same stack in which they interact with the same Blazegraph and Postgres endpoints. For details to spin up a stack, please refer to the [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). # 2. Build & Run This part of the README explain the instruction to build the agent. @@ -59,13 +59,13 @@ config/ |_ ... |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-custom-containers). ### [Step 3] Spin up a Docker Stack **Note: The docker container must run within the same stack as the [HistoricalNTUEnergyAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HistoricalNTUEnergyAgent) to get access and query the NTU Power Network Knowledge Graph for calculation.** -Running this agent in a docker stack can facilitate interactions between other agents and endpoints (Postgres, Blazegraph, etc,.) for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +Running this agent in a docker stack can facilitate interactions between other agents and endpoints (Postgres, Blazegraph, etc,.) for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). A successful setup will result in 10 containers (optional 11): - Default containers - Stack Manager (exits when spins up all other containers) diff --git a/Agents/NTUForecastingAgent/README.md b/Agents/NTUForecastingAgent/README.md index 3fdc9fd31f0..5eac55dc9a3 100644 --- a/Agents/NTUForecastingAgent/README.md +++ b/Agents/NTUForecastingAgent/README.md @@ -63,7 +63,7 @@ The default namespace of the forecasting agent in ```forecasting-agent.json``` i #### Spinning up -Now, follow the [steps](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) to spin up the stack. The stack manager should bring up 12 containers in total. Activate the [Historical NTUEnergy Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HistoricalNTUEnergyAgent) by sending the specified curl request for the agent to instantiate the knowledge graph of the NTU power network. +Now, follow the [steps](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) to spin up the stack. The stack manager should bring up 12 containers in total. Activate the [Historical NTUEnergy Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HistoricalNTUEnergyAgent) by sending the specified curl request for the agent to instantiate the knowledge graph of the NTU power network. # 2. Usage of the agent diff --git a/Agents/NTUP2PEnergyAgent/README.md b/Agents/NTUP2PEnergyAgent/README.md index a50db8fcdd4..5199d3e923f 100644 --- a/Agents/NTUP2PEnergyAgent/README.md +++ b/Agents/NTUP2PEnergyAgent/README.md @@ -53,13 +53,13 @@ config/ |_ ... |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-custom-containers). ### [Step 3] Spin up a Docker Stack -**Note: The docker container must run within the same stack as the [HistoricalNTUEnergyAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/1496-dev-instantiate-historic-ntuenergyconsumptiondata-2/Agents/HistoricalNTUEnergyAgent) to get access and query the NTU Power Network Knowledge Graph for calculation.** +**Note: The docker container must run within the same stack as the [HistoricalNTUEnergyAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HistoricalNTUEnergyAgent) to get access and query the NTU Power Network Knowledge Graph for calculation.** -Running this agent in a docker stack can facilitate interactions between other agents and endpoints (Postgres, Blazegraph, etc,.) for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +Running this agent in a docker stack can facilitate interactions between other agents and endpoints (Postgres, Blazegraph, etc,.) for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). A successful setup will result in 10 containers (optional 11): - Default containers - Stack Manager (exits when spins up all other containers) diff --git a/Agents/NTUPVLibAgent/README.md b/Agents/NTUPVLibAgent/README.md index 678a0a99f3e..8536e2c5c38 100644 --- a/Agents/NTUPVLibAgent/README.md +++ b/Agents/NTUPVLibAgent/README.md @@ -2,7 +2,7 @@ The NTUPVLib Agent is a modified version of the [PVLibAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/PVLibAgent) developed to work with the PV data available for the NTU use case. The agent assumes a standard PV for the purpose of running PVLib and then scales the output by the PV area for the building. An additional scale factor is included to scale the data to a magnitude appropriate for the 15-bus NTU test system. -This agent is designed to calculate AC and DC Power output from Photovaltaic Panels based on values provided in the properties files or values queried from the knowledge graph. It will then initialise the AC and DC Power as timeseries in the knowledge graph. The agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the knowledge graph and database and uses [PvLib](https://pvlib-python.readthedocs.io/en/stable/) for it's AC and DC Power calculations. +This agent is designed to calculate AC and DC Power output from Photovaltaic Panels based on values provided in the properties files or values queried from the knowledge graph. It will then initialise the AC and DC Power as timeseries in the knowledge graph. The agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the knowledge graph and database and uses [PvLib](https://pvlib-python.readthedocs.io/en/stable/) for it's AC and DC Power calculations. ## 1. Property files @@ -105,7 +105,7 @@ NTUPVLib is intended for deployment in a stack (option 2). For others, refer to #### [Option 2] As a stacked docker container -Running this agent in a docker stack is a more advanced option as it facilitate interactions between other agents for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +Running this agent in a docker stack is a more advanced option as it facilitate interactions between other agents for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). A successful setup will result in 9 containers (optional 10): - Default containers @@ -155,10 +155,10 @@ config/ |_ ... |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-custom-containers). ##### Spin Up Stack -Follow the [steps](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) to spin up the stack. +Follow the [steps](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) to spin up the stack. ##### Run the agent diff --git a/Agents/NUSDavisWeatherStationAgent/README.md b/Agents/NUSDavisWeatherStationAgent/README.md index 9298c44b6a3..092e0080094 100644 --- a/Agents/NUSDavisWeatherStationAgent/README.md +++ b/Agents/NUSDavisWeatherStationAgent/README.md @@ -2,7 +2,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the NUS Davis weather station. Its only purpose is to retrieve new data (if available) from the API and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the KG and database. Before explaining the usage of the agent, we will briefly summarize the weather station API that is diff --git a/Agents/NetworkAnalysisAgent/README.md b/Agents/NetworkAnalysisAgent/README.md index 8e183b5537d..ddcd9453521 100644 --- a/Agents/NetworkAnalysisAgent/README.md +++ b/Agents/NetworkAnalysisAgent/README.md @@ -70,10 +70,10 @@ Spin up with `./stack.sh start ` in the [stack-manager]'s main folde The debugger port will be available at 5005. ## 7. TWA-VF Visualisation -1) In the directory [stack-manager-config/data/webspace/](stack-manager-config/data/webspace/), contains the TWA-VF `data.json` prepared for the different scnearios that is meant to be placed inside [`stack-manager/inputs/data/webspace`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/data), following instruction [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#example---including-a-visualisation). +1) In the directory [stack-manager-config/data/webspace/](stack-manager-config/data/webspace/), contains the TWA-VF `data.json` prepared for the different scnearios that is meant to be placed inside [`stack-manager/inputs/data/webspace`](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/data), following instruction [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#example---including-a-visualisation). -[stack-data-uploader]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services +[stack-data-uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services [inputs]: stack-data-uploader-inputs/ diff --git a/Agents/OPFAgent/README.md b/Agents/OPFAgent/README.md index 4304a276cd9..9cc84de8047 100644 --- a/Agents/OPFAgent/README.md +++ b/Agents/OPFAgent/README.md @@ -4,7 +4,7 @@ The purpose of OPFAgent is to handle HTTP requests to perform Optimal Power Flow (OPF) analysis on a power network instantiated in the knowledge graph. Information about the power network will be retrieved from the triple store and time series data will be extracted from a relational database. After running the simulation, OPF results will be stored back into the relational database as time series data. ## Requirements -- In order to run OPFAgent, a local version (or if you are running in a stack, a stack version) of (TripleStore)AccessAgent needs to be deployed. Refer to [AccessAgent README](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_ACCESS_AGENT/README.md) for Access Agent setup. If running in a stack, create a new namespace in your stack blazegraph called 'storerouter' to store the routing information. Please note that routing information of the target blazegraph should be uploaded accordingly before calling OPFAgent. +- In order to run OPFAgent, a local version (or if you are running in a stack, a stack version) of (TripleStore)AccessAgent needs to be deployed. Refer to [AccessAgent README](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AccessAgent/README.md) for Access Agent setup. If running in a stack, create a new namespace in your stack blazegraph called 'storerouter' to store the routing information. Please note that routing information of the target blazegraph should be uploaded accordingly before calling OPFAgent. - As OPFAgent interacts with time series data stored in a relational database, URL, username and password of the database are required. Refer to the [time series client properties](#time-series-client-properties) section below for more details. diff --git a/Agents/OSMAgent/README.md b/Agents/OSMAgent/README.md index 153052f258d..59dac823dbc 100644 --- a/Agents/OSMAgent/README.md +++ b/Agents/OSMAgent/README.md @@ -2,7 +2,7 @@ ## 1. Description The OSMAgent is an agent that works with OpenStreetMap (OSM) data to link them to existing building IRI and instantiate the semantic representation of building usage information from OSM data. The workflow of the agent can be broadly outlined in the following steps: -1) Categorize OSM tags according to [OntoBuiltEnvironment](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobuiltenv) concept. +1) Categorize OSM tags according to [OntoBuiltEnvironment](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobuiltenv) concept. 2) Identify and match OSM data with the 3D buildings uploaded as CityGML data and LoD0 footprint. This is performed by assigning building IRI to OSM data through matching the geometry of the OSM data to the 3D buildings' footprint. 3) Calculate building usage share for all OSM data with tagged building IRI and non-null usage information. 4) If land use data is available, for 3D buildings without tagged OSM usage, the agent will tag it with the corresponding land use. @@ -13,11 +13,11 @@ After running the OSMAgent, the results can be retrieved through: - `building_iri` - Refers to the IRI of the building. - `propertyusage_iri` - Refers to the IRI of the propertyusage. -- `ontobuilt` - Refers to the [OntoBuiltEnvironment](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobuiltenv) usage category of the building. +- `ontobuilt` - Refers to the [OntoBuiltEnvironment](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobuiltenv) usage category of the building. - `usageshare` - Refers to the usage proportion of each `ontobuilt` in a building. - `name` - Refers to the name of the building derived from OSM data. -2) A geoserver layer with the workspace name `twa` and layer name `building_usage`, this layer contains all the information to display the buildings with [TWA-VF](https://github.com/cambridge-cares/TheWorldAvatar/tree/1671-dev-update-osmagent-to-new-building-workflow/web/twa-vis-framework) using the [data.json](stack-manager-config/data/webspace/data.json). +2) A geoserver layer with the workspace name `twa` and layer name `building_usage`, this layer contains all the information to display the buildings with [TWA-VF](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/web/twa-vis-framework) using the [data.json](stack-manager-config/data/webspace/data.json). ## 2. Prerequisites @@ -30,7 +30,7 @@ In the [resource folder](osmagent/src/main/resources/), there are two CSV files The agent has been implemented to work in the stack. Follow the instructions in the [stack-manager]'s README to set up the stack. ### 2.3. CityDb -The agent works with 3D buildings uploaded from CityGML data, follow the instructions in the [stack-data-uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader#citydb-data)'s README. +The agent works with 3D buildings uploaded from CityGML data, follow the instructions in the [stack-data-uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader#citydb-data)'s README. ### 2.4. Uploading Raw Data #### 2.4.1. OSM Data @@ -69,7 +69,7 @@ Once the OSM data is uploaded, it will appear in PostgreSQL tables. The agent as #### 2.4.2. Digitales Landschaftsmodell (DLM) Land Use Data DLM files can be uploaded via the stack-data-uploader in Pirmasens Digital Twin (PSDT) repository. -The link to the DLM file in PSDT is available [here](https://github.com/cambridge-cares/pirmasens/tree/main/psdt/stack-data-uploader-inputs/data/dlm). +The link to the DLM file in PSDT is available [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/psdt/stack-data-uploader-inputs/data/dlm). Please note that PSDT is a private repository, permission may be required. #### 2.4.3. Other Land Use Data @@ -99,7 +99,7 @@ In the [config.properties](osmagent/src/main/resources/config.properties) file, Default value - `postgres` is set to according to the database name specified in [osmagent_data.json](stack-data-uploader-inputs/config/osmagent_data.json). Change `db.name` if [osmagent_data.json](stack-data-uploader-inputs/config/osmagent_data.json) database value is changed. - `osm.schema` - Schema name containing OSM data. Default value - `public` is set to the schema specified in [osmagent_data.json](stack-data-uploader-inputs/config/osmagent_data.json). Change `osm.schema` and [`building_usage.obda`](osmagent/src/main/resources/building_usage.obda) if [osmagent_data.json](stack-data-uploader-inputs/config/osmagent_data.json) schema is changed. -- `landuse.table` - Table name (inclusive of schema) containing land use data. Default value is set to `public.dlmsie02f` as per uploaded via psdt [here](https://github.com/cambridge-cares/pirmasens/blob/main/psdt/stack-data-uploader-inputs/config/dlm.json). Leave empty if there is no land use data available, no land use matching will be run. +- `landuse.table` - Table name (inclusive of schema) containing land use data. Default value is set to `public.dlmsie02f` as per uploaded via psdt [here](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/psdt/stack-data-uploader-inputs/config/dlm.json). Leave empty if there is no land use data available, no land use matching will be run. ## 4. Deployment ### 4.1 Retrieving OSMAgent's image @@ -142,14 +142,14 @@ The debugger port will be available at 5005. The result of OSMAgent - Building Usages is designed to be compatible with TWA-VF and queryable via FeatureInfoAgent. #### Setting up FIAgent -1) Place [`building_usage.sparql`](stack-manager-config/data/fia-queries/queries/building_usage.sparql) and [`fia-config.json`](stack-manager-config/data/fia-queries/queries/fia-config.json) inside [`stack-manager/inputs/data/queries`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/data) as according the volume path specified in the stack-manager config's [`feature-info-agent.json`](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/FeatureInfoAgent/sample/feature-info-agent.json). -2) Spin FeatureInfoAgent up along with the [stack-manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-the-feature-info-agent). +1) Place [`building_usage.sparql`](stack-manager-config/data/fia-queries/queries/building_usage.sparql) and [`fia-config.json`](stack-manager-config/data/fia-queries/queries/fia-config.json) inside [`stack-manager/inputs/data/queries`](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/data) as according the volume path specified in the stack-manager config's [`feature-info-agent.json`](https://github.com/TheWorldAvatar/Feature-Info-Agent/blob/main/sample/feature-info-agent.json). +2) Spin FeatureInfoAgent up along with the [stack-manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-the-feature-info-agent). #### Setting up TWA-VF -1) Place [`data.json`](stack-manager-config/data/webspace/data.json) inside [`stack-manager/inputs/data/webspace`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/data), following instruction [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#example---including-a-visualisation) in the stack-manager. +1) Place [`data.json`](stack-manager-config/data/webspace/data.json) inside [`stack-manager/inputs/data/webspace`](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/data), following instruction [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#example---including-a-visualisation) in the stack-manager. -[stack-data-uploader]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services +[stack-data-uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services [stack-data-uploader-inputs]: stack-data-uploader-inputs/ diff --git a/Agents/OSMAgent/stack-data-uploader-inputs/data/osmagent_pirmasens/ontobuiltenv/README.md b/Agents/OSMAgent/stack-data-uploader-inputs/data/osmagent_pirmasens/ontobuiltenv/README.md index 117a6184627..2b1694f149f 100644 --- a/Agents/OSMAgent/stack-data-uploader-inputs/data/osmagent_pirmasens/ontobuiltenv/README.md +++ b/Agents/OSMAgent/stack-data-uploader-inputs/data/osmagent_pirmasens/ontobuiltenv/README.md @@ -1 +1 @@ -Add OntoBuiltEnv.owl file here from https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobuiltenv \ No newline at end of file +Add OntoBuiltEnv.owl file here from https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontobuiltenv \ No newline at end of file diff --git a/Agents/OntoMatchAgent/README.md b/Agents/OntoMatchAgent/README.md index 0a4a2c01597..aee8e05ca80 100644 --- a/Agents/OntoMatchAgent/README.md +++ b/Agents/OntoMatchAgent/README.md @@ -112,7 +112,7 @@ The Python script ``coordinator.py`` is called with the following parameters: --config the JSON config file ``` -The config file is a JSON file containing several blocks for configuration. In the following, we will explain each block for the config file [conf_power_plant_DEU_auto_5_geo_http_link.json](https://github.com/cambridge-cares/TheWorldAvatar/blob/develop/Agents/OntoMatchAgent/conf/power_plant_DEU/conf_power_plant_DEU_auto_5_geo_http_link.json) used above. +The config file is a JSON file containing several blocks for configuration. In the following, we will explain each block for the config file [conf_power_plant_DEU_auto_5_geo_http_link.json](https://github.com/cambridge-cares/TheWorldAvatar/blob/develop/Agents/OntoMatchAgent/tests/conf/conf_power_plant_DEU_auto_geo.json) used above. The first block only specifies the random seed: @@ -199,4 +199,4 @@ Andreas Eibeck, Shaocong Zhang * Preprint [A Simple and Effective Approach to Unsupervised Instance Matching and its Application to Linked Data of Power Plants](https://como.ceb.cam.ac.uk/preprints/293/) * OntoMatchAgent is part of [The World Avatar](http://theworldavatar.com/) at [CARES](https://www.cares.cam.ac.uk/) * [Knowledge graphs](https://como.ceb.cam.ac.uk/research/cps/) at the [Computational Modelling Group](https://como.ceb.cam.ac.uk/) -* [Universal Digital Twin and Knowledge graphs](https://cmclinnovations.com/digitalisation/knowledge-graphs/) at [CMCL](https://cmclinnovations.com/) +* [CMCL](https://cmclinnovations.com/) diff --git a/Agents/OpenMeteoAgent/README.md b/Agents/OpenMeteoAgent/README.md index e1392bb49bf..fe394b83d6b 100644 --- a/Agents/OpenMeteoAgent/README.md +++ b/Agents/OpenMeteoAgent/README.md @@ -31,7 +31,7 @@ The docker image uses TheWorldAvatar maven repository (https://maven.pkg.github. ``` ### 2.2. Stack Set Up -The agent is designed to run in the stack. To start the stack, spin up the [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager). +The agent is designed to run in the stack. To start the stack, spin up the [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). ### 2.3. Blazegraph Set Up The agent is designed to use the stack Blazegraph. Please ensure that the Blazegraph namespace corresponding to ```route.label``` in ```./openmeteo-agent/src/main/resources/config.properties```, is set up in the stack Blazegraph with geospatial capabilities. diff --git a/Agents/PIPSRequestAgent/README.md b/Agents/PIPSRequestAgent/README.md index 7e6fc60ffee..127a9c3b088 100644 --- a/Agents/PIPSRequestAgent/README.md +++ b/Agents/PIPSRequestAgent/README.md @@ -6,7 +6,7 @@ The agent also allows for the option of including a client certificate (P12 form # Prerequisite 1. It is necessary to have Keycloak set up properly. Refer to the official [Keycloak guides](https://www.keycloak.org/guides#getting-started) for how to get started. -2. The TWA (TheWorldAvatar) stack can also be used to set up the Keycloak service along with a variety of other services. Refer to [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) for more information. +2. The TWA (TheWorldAvatar) stack can also be used to set up the Keycloak service along with a variety of other services. Refer to [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) for more information. 3) It is necessary to have the PIPSTimeSeriesAgent set up properly. Refer to [PIPSTimeSeriesAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/PIPSTimeSeriesAgent). diff --git a/Agents/PIPSTimeSeriesAgent/README.md b/Agents/PIPSTimeSeriesAgent/README.md index 4d91caf4c2d..2edf660c835 100644 --- a/Agents/PIPSTimeSeriesAgent/README.md +++ b/Agents/PIPSTimeSeriesAgent/README.md @@ -4,7 +4,7 @@ This agent is designed to receive an access token, carry out verification with K # Prerequisite 1. It is necessary to have Keycloak set up properly. Refer to the official [Keycloak guides](https://www.keycloak.org/guides#getting-started) for how to get started. -2. The TWA (TheWorldAvatar) stack can also be used to set up the Keycloak service along with a variety of other services. Refer to [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) for more information. +2. The TWA (TheWorldAvatar) stack can also be used to set up the Keycloak service along with a variety of other services. Refer to [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) for more information. 3. It is necessary to have a PostgreSQL database set up properly. The tables and columns should have a structure similar to how the [OPCUAAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/OPCUAAgent) construct its tables and columns. This agent is originally designed to work with the [OPCUAAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/OPCUAAgent) but it is possible to reuse the agent for other databases as long as they have a similar structure. diff --git a/Agents/PVLibAgent/README.md b/Agents/PVLibAgent/README.md index 495ddad3543..ed23329f873 100644 --- a/Agents/PVLibAgent/README.md +++ b/Agents/PVLibAgent/README.md @@ -1,6 +1,6 @@ # PVLib Agent -This agent is designed to calculate AC and DC Power output from Photovaltaic Panels based on values provided in the properties files or values queried from the knowledge graph. It will then initialise the AC and DC Power as timeseries in the knowledge graph. The agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the knowledge graph and database and uses [PvLib](https://pvlib-python.readthedocs.io/en/stable/) for it's AC and DC Power calculations. +This agent is designed to calculate AC and DC Power output from Photovaltaic Panels based on values provided in the properties files or values queried from the knowledge graph. It will then initialise the AC and DC Power as timeseries in the knowledge graph. The agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the knowledge graph and database and uses [PvLib](https://pvlib-python.readthedocs.io/en/stable/) for it's AC and DC Power calculations. For the agent to read data, three property files are required: - One [property file for DC and AC Power instantiation](#dataIRIs-properties) defining the IRIs for each of the keys. @@ -49,7 +49,7 @@ The model_parameters properties contains the parameters required to create a sol - `strings_per_inverter` the number of strings per inverter #### [Option 2] Read Photovoltaic Model Specs from Knowledge Graph -Alternatively, the parameters required to create a solar PV Model can be read from a knowledge graph. This requires an instantiation agent to create a Knowledge Graph filled with PV model parameter values. The [HistoricalNTUEnergy Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/1496-dev-instantiate-historic-ntuenergyconsumptiondata-2/Agents/HistoricalNTUEnergyAgent) provides an example to instantiate a knowledge graph which includes PV model parameters. +Alternatively, the parameters required to create a solar PV Model can be read from a knowledge graph. This requires an instantiation agent to create a Knowledge Graph filled with PV model parameter values. The [HistoricalNTUEnergy Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HistoricalNTUEnergyAgent) provides an example to instantiate a knowledge graph which includes PV model parameters. ## 2. Weather Data Preparation The agent is designed to work with data from one of three sources: weather stations, irradiance sensors, and the OpenMeteo API. It is necessary to have one of the above data retrieved and instantiated on the knowledge graph before running the agent. @@ -62,7 +62,7 @@ In the event that the weather data is retrieved from the weather station, the re rdf:type ontoems:AirTemperature . om:hasValue . ``` -see [OntoEMS ontology](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoems) for more information. The [NUSDavisWeatherStation Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/NUSDavisWeatherStationAgent) provides an example of the instantiation. +see [OntoEMS ontology](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoems) for more information. The [NUSDavisWeatherStation Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/NUSDavisWeatherStationAgent) provides an example of the instantiation. The PVLib Agent will query for the latest air temperature, wind speed and global horizontal irradiance values from the knowledge graph. @@ -146,7 +146,7 @@ If the agent runs successfully, you should see a returned Object that is similar #### [Option 2] As a stacked docker container -Running this agent in a docker stack is a more advanced option as it facilitate interactions between other agents for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +Running this agent in a docker stack is a more advanced option as it facilitate interactions between other agents for deployment and visualization. The stack is spun up by [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). A successful setup will result in 9 containers (optional 10): - Default containers @@ -196,10 +196,10 @@ config/ |_ ... |_ .json ``` -More information about adding custom containers to the stack can be found [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#adding-custom-containers). +More information about adding custom containers to the stack can be found [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#adding-custom-containers). ##### Spin Up Stack -Follow the [steps](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#spinning-up-a-stack) to spin up the stack. +Follow the [steps](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#spinning-up-a-stack) to spin up the stack. ##### Run the agent Select from one of the following to read weather data: diff --git a/Agents/PropertyValueEstimationAgent/README.md b/Agents/PropertyValueEstimationAgent/README.md index 6f7c56e7bc6..094edaf7c37 100644 --- a/Agents/PropertyValueEstimationAgent/README.md +++ b/Agents/PropertyValueEstimationAgent/README.md @@ -275,16 +275,16 @@ Markus Hofmeister (mh807@cam.ac.uk), March 2022 [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Derivation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent -[Derivation Agent configuration]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/python_derivation_agent/pyderivationagent/conf/agent_conf.py +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Derivation Agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent +[Derivation Agent configuration]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent/pyderivationagent/conf/agent_conf.py [EPC Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/EnergyPerformanceCertificateAgent -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [OntoBuiltEnv]: http://www.theworldavatar.com/ontology/ontobuiltenv/OntoBuiltEnv.owl [HM Land Registry Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HMLandRegistryAgent -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md#spinning-up-a-stack -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md#spinning-up-a-stack +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [The World Avatar]: https://github.com/cambridge-cares/TheWorldAvatar [Average Square Metre Price Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AverageSquareMetrePriceAgent diff --git a/Agents/RFIDQueryAgent/README.MD b/Agents/RFIDQueryAgent/README.MD index 2097816c569..14dfdb977d3 100644 --- a/Agents/RFIDQueryAgent/README.MD +++ b/Agents/RFIDQueryAgent/README.MD @@ -2,13 +2,13 @@ This agent is able to execute the following routes: 1) Check route. Query for the latest status of the RFID tags (In/Out) and determine whether the tagged object has been "Out" for longer -than allowed. If the tagged object has been "Out" for longer than allowed, an email will be sent to the relevant personnel via the [EmailSender class in the JPS Base Lib](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/email/EmailSender.java). The agent is able to intuitively determine whether the tagged object is a chemical container that contains a chemical species and retrieve the information of the chemical species if possible. More information can be found at the [Check route](#51-check-route) section. +than allowed. If the tagged object has been "Out" for longer than allowed, an email will be sent to the relevant personnel via the [EmailSender class in the JPS Base Lib](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/email/EmailSender.java). The agent is able to intuitively determine whether the tagged object is a chemical container that contains a chemical species and retrieve the information of the chemical species if possible. More information can be found at the [Check route](#51-check-route) section. 2) Retrieve data route. Retrieve information relating to a tagged object IRI and return them in the form of a JSONObject. The agent is able to intuitively determine whether the tagged object is a chemical container that contains a chemical species and retrieve the information of the chemical species if possible. More information can be found at the [Retrieve data route](#52-retrieve-data-route) section. -3) Send notification route. Upon receiving the latest status of a tag, the agent will query for all the meta data relevant to the tag and it's tagged object, send out an email containing these information to the relevant personnels via the [EmailSender class in the JPS Base Lib](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/email/EmailSender.java). More information can be found at the [Send notification route](#53-send-notification-route) section. +3) Send notification route. Upon receiving the latest status of a tag, the agent will query for all the meta data relevant to the tag and it's tagged object, send out an email containing these information to the relevant personnels via the [EmailSender class in the JPS Base Lib](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/email/EmailSender.java). More information can be found at the [Send notification route](#53-send-notification-route) section. -The agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and [remote store client](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/query/RemoteStoreClient.java) from the JPS_BASE_LIB to interact with both the KG and database. +The agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) and [remote store client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/query/RemoteStoreClient.java) from the JPS_BASE_LIB to interact with both the KG and database. ## 1. Usage @@ -21,9 +21,9 @@ The [next section](#2-requirements) will explain the requirements to run the age 2) It is required to have the [Email Agent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/EmailAgent) set up beforehand. -3) It is required to have some timeseries data that contains the status of the RFID tags (In/Out) already instantiated in the knowledge graph via the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries). +3) It is required to have some timeseries data that contains the status of the RFID tags (In/Out) already instantiated in the knowledge graph via the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries). -4) If the tagged object is a chemical container that contain some chemicals; the tag, chemical container, chemical it contains, chemical species label and the species's GHS Hazard Statements(if the species does have GHS Hazard Statements) should be instantiated in the knowledge graph based on several ontologies: [OntoDevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice), [OntoLab](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontolab), [OntoSpecies](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontospecies), [OntoCAPE](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontocape), [OntoReaction](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoreaction) . An example of the instance can be found below: +4) If the tagged object is a chemical container that contain some chemicals; the tag, chemical container, chemical it contains, chemical species label and the species's GHS Hazard Statements(if the species does have GHS Hazard Statements) should be instantiated in the knowledge graph based on several ontologies: [OntoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice), [OntoLab](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontolab), [OntoSpecies](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontospecies), [OntoCAPE](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontocape), [OntoReaction](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoreaction) . An example of the instance can be found below: ``` a ontodevice:RFIDSensor; ontodevice:observes . diff --git a/Agents/RFIDUpdateAgent/README.md b/Agents/RFIDUpdateAgent/README.md index bbaa182fc1f..5a030affd11 100644 --- a/Agents/RFIDUpdateAgent/README.md +++ b/Agents/RFIDUpdateAgent/README.md @@ -3,7 +3,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding RFID tag data being sent to a RFID servlet. It's only purpose is to retrieve new data (if available) from the API and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS base lib to interact with both the KG and database. Before explaining the usage of the agent, we will briefly summarize the RFID API that is contacted by one of the classes in this package to retrieve data. diff --git a/Agents/RenewableEnergyAgents/MetOfficeWindSensorAgent/README.md b/Agents/RenewableEnergyAgents/MetOfficeWindSensorAgent/README.md index 4998b5224cd..d3e1f771b23 100644 --- a/Agents/RenewableEnergyAgents/MetOfficeWindSensorAgent/README.md +++ b/Agents/RenewableEnergyAgents/MetOfficeWindSensorAgent/README.md @@ -1,8 +1,8 @@ # An agent to represent and query UK wind data reported by the Met Office. ### Authors -* [Toby Latcham](tjl47@cam.ac.uk) -* [Sophie Hall](sh2000@cam.ac.uk) -* [Feroz Farazi](msff2@cam.ac.uk) +* [Toby Latcham](mailto:tjl47@cam.ac.uk) +* [Sophie Hall](mailto:sh2000@cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) The agent processes UK Mean Wind Data downloaded as a CSV file from Met Office Integrated Data Archive System (MIDAS) to represent it in the World Avatar Knowledge Graph (KG) to make it accessible and queryable. The data describes the mean wind speed and direction and maximum gust speed, direction and time reported in the context of sensors installed in different locations all over the UK. diff --git a/Agents/RenewableEnergyAgents/README.md b/Agents/RenewableEnergyAgents/README.md index c2c63529408..18dc9f5ab10 100644 --- a/Agents/RenewableEnergyAgents/README.md +++ b/Agents/RenewableEnergyAgents/README.md @@ -100,8 +100,8 @@ In Docker Desktop: [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar [DTVF]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/TWA-Visualisations -[TimeSeriesClient]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries -[py4jps]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_wrapper +[TimeSeriesClient]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries +[py4jps]: https://github.com/TheWorldAvatar/baselib/tree/main/python_wrapper [properties file]: resources/renewable_energy_agents.properties [Create a Mapbox API access token]: https://account.mapbox.com/access-tokens/ [data folder]: https://www.dropbox.com/sh/2dgpwmedboumkkt/AAAPUxMSa5BTw10iPVkReBGaa/Codes/Research%20project%20code?dl=0&subfolder_nav_tracking=1 \ No newline at end of file diff --git a/Agents/RenewableEnergyAgents/UrbanObservatorySolarSensorAgent/README.md b/Agents/RenewableEnergyAgents/UrbanObservatorySolarSensorAgent/README.md index e89cf0edf6b..c343c7ded02 100644 --- a/Agents/RenewableEnergyAgents/UrbanObservatorySolarSensorAgent/README.md +++ b/Agents/RenewableEnergyAgents/UrbanObservatorySolarSensorAgent/README.md @@ -1,8 +1,8 @@ # An agent to represent and query solar data reported by the Newcastle Urban Observatory. ### Authors -* [Toby Latcham](tjl47@cam.ac.uk) -* [Sophie Hall](sh2000@cam.ac.uk) -* [Feroz Farazi](msff2@cam.ac.uk) +* [Toby Latcham](mailto:tjl47@cam.ac.uk) +* [Sophie Hall](mailto:sh2000@cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) The agent processes solar radiation data including global and diffuse solar radiation downloaded as a CSV file from the Newcastle Urban Observatory to represent it in the World Avatar Knowledge Graph (KG) to make it accessible and queryable. The data is reported in the context of sensors positioned around Newcastle. diff --git a/Agents/ResultedConsumptionCalculationAgent/README.md b/Agents/ResultedConsumptionCalculationAgent/README.md index 888d32a8dc2..dcc5722c95e 100644 --- a/Agents/ResultedConsumptionCalculationAgent/README.md +++ b/Agents/ResultedConsumptionCalculationAgent/README.md @@ -168,7 +168,7 @@ If you started from an empty namespace, or have not instantiate upper level inst Please check if you have created a namespace in the blazegraph, and entered the correct environmental variables in the [agent.env.example](./agent.env.example). -Afterwards, run the [upper_level_ontology_update.py](./copcalculationagent/upper_level_ontology_update.py), simply run this command in the powershell terminal: +Afterwards, run the [upper_level_ontology_update.py](./resultedconsumptioncalculationagent/upper_level_ontology_update.py), simply run this command in the powershell terminal: ```bash py ./resultedconsumptioncalculationagent/upper_level_ontology_update.py @@ -201,16 +201,16 @@ Jieyang Xu (jx309@cam.ac.uk), May 2023 [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Derivation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent -[Derivation Agent configuration]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/python_derivation_agent/pyderivationagent/conf/agent_conf.py +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Derivation Agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent +[Derivation Agent configuration]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent/pyderivationagent/conf/agent_conf.py [EPC Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-EPCInstantiationAgent/Agents/EnergyPerformanceCertificateAgent -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [OntoRegionalAnalysis]: http://www.theworldavatar.com/ontology/ontoregionalanlysis/OntoRegionalAnalysis.owl [HM Land Registry Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-PropertySalesInstantiationAgent/Agents/HMLandRegistryAgent -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md#spinning-up-a-stack -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md#spinning-up-a-stack +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [The World Avatar]: https://github.com/cambridge-cares/TheWorldAvatar [Average Square Metre Price Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AverageSquareMetrePriceAgent diff --git a/Agents/RxnOptGoalAgent/README.md b/Agents/RxnOptGoalAgent/README.md index a73cfe109fc..10df55e1cbc 100644 --- a/Agents/RxnOptGoalAgent/README.md +++ b/Agents/RxnOptGoalAgent/README.md @@ -4,7 +4,7 @@ The folder contains the source, resource, and Docker setup files for the Reactio   ## 1. Purpose -The Reaction Optimisation Goal (ROG) Agent is designed to take goal requests, monitor the progress in goal iterations, make decisions based on the latest results, visualise progress in goal iterations, and notify users about the status change throughout the process. It does so by translating the goal request to actionable ontological representations based on concepts defined in [`OntoGoal`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontogoal). These expressions will then be picked up by [`RxnOptGoalIterAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/RxnOptGoalIterAgent) to orchestrate the actual performance of the reaction experiment. +The Reaction Optimisation Goal (ROG) Agent is designed to take goal requests, monitor the progress in goal iterations, make decisions based on the latest results, visualise progress in goal iterations, and notify users about the status change throughout the process. It does so by translating the goal request to actionable ontological representations based on concepts defined in [`OntoGoal`](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontogoal). These expressions will then be picked up by [`RxnOptGoalIterAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/RxnOptGoalIterAgent) to orchestrate the actual performance of the reaction experiment.   @@ -227,7 +227,7 @@ pytest tests/test_rxn_opt_dockerised.py --docker-compose=./docker-compose-test-d ### 4.3 Physical test #### 4.3.1 Local test The local integration test using physical equipment is provided in `test_rxn_lab_physical.py`. To run physical test in the lab, please follow below steps: -1. (**ONLY IF** you would like to receive email notifications about the agents operations) Set up email configuration in relevant `tests/env_files/*.env.test`, for details, see [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent#set-up-email-notification-for-exceptions) +1. (**ONLY IF** you would like to receive email notifications about the agents operations) Set up email configuration in relevant `tests/env_files/*.env.test`, for details, see [here](https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent#set-up-email-notification-for-exceptions) 2. Manually spin up docker containers in `tests/docker-compose.test.kg.yml` (this design prevents the test triples being deleted by the teardown function) 3. Open FlowCommander in Windows host machine, load the correct experiment file (`.fcexp`) - you may contact the maintainer of this repo to get it 4. Open HPLC software in Windows host machine, load the correct HPLC method, turn on the hardware, queue the analysis sequence, obtain the report folder path diff --git a/Agents/RxnOptGoalIterAgent/README.md b/Agents/RxnOptGoalIterAgent/README.md index aa769ceda35..6ca5f564aee 100644 --- a/Agents/RxnOptGoalIterAgent/README.md +++ b/Agents/RxnOptGoalIterAgent/README.md @@ -3,7 +3,7 @@ The folder contains the source, resource, and Docker setup files for the Reactio ## Purpose -The Reaction Optimisation Goal Iteration (ROGI) Agent is designed to perform iterations of reaction experiment as part of goal-driven reaction optimisation exercise. It operates based on concepts defined in [`OntoGoal`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontogoal) and orchestrates [`DoE Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/DoEAgent), [`VapourtecSchedule Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/VapourtecScheduleAgent), [`Vapourtec Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/VapourtecAgent), [`HPLC Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HPLCAgent), and [`HPLCPostPro Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HPLCPostProAgent) to complete one iteration. +The Reaction Optimisation Goal Iteration (ROGI) Agent is designed to perform iterations of reaction experiment as part of goal-driven reaction optimisation exercise. It operates based on concepts defined in [`OntoGoal`](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontogoal) and orchestrates [`DoE Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/DoEAgent), [`VapourtecSchedule Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/VapourtecScheduleAgent), [`Vapourtec Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/VapourtecAgent), [`HPLC Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HPLCAgent), and [`HPLCPostPro Agent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HPLCPostProAgent) to complete one iteration. ## Building the Docker image @@ -65,7 +65,7 @@ The following command can be used to install all required packages. () $ python -m pip install -e .[dev] ``` -As `pyderivationagent` library relies on the `py4jps` package, Java 11 is required. For Windows, it is recommended to obtain OpenJDK 11 from [here](https://developers.redhat.com/products/openjdk/download) and follow the [instructions](https://access.redhat.com/documentation/en-us/openjdk/11/html-single/installing_and_using_openjdk_11_for_windows/index). For linux environment, one can install via: +As `pyderivationagent` library relies on the `py4jps` package, Java 11 is required. For Windows, it is recommended to obtain OpenJDK 11 from [here](https://developers.redhat.com/products/openjdk/download) and follow the [instructions](https://docs.redhat.com/en/documentation/red_hat_build_of_openjdk/11/html/installing_and_using_red_hat_build_of_openjdk_11_for_windows/index). For linux environment, one can install via: `(Linux)` ```sh diff --git a/Agents/SeaLevelImpactAgent/README.md b/Agents/SeaLevelImpactAgent/README.md index 57af926ee0e..63bdc91ca10 100644 --- a/Agents/SeaLevelImpactAgent/README.md +++ b/Agents/SeaLevelImpactAgent/README.md @@ -19,16 +19,16 @@ The SeaLevelImpactAgent is an agent that 3) Create geoserver layer for each sealevelprojections ## 2. Prerequisites -This agent is developed as part of the [Singapore-sea-level-rise stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-sea-level-rise-singapore/Deploy/stacks/Singapore-sea-level-rise). +This agent is developed as part of the [Singapore-sea-level-rise stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/Singapore-sea-level-rise). -Data in the [Singapore-sea-level-rise stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-sea-level-rise-singapore/Deploy/stacks/Singapore-sea-level-rise) needs to be uploaded by stack-data-uploader before running this agent. +Data in the [Singapore-sea-level-rise stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/Singapore-sea-level-rise) needs to be uploaded by stack-data-uploader before running this agent. ### 2.1. Stack Set Up The agent has been implemented to work in the stack. Follow the instructions in the [stack-manager]'s README to set up the stack. ## 3. Agent Configuration ### 3.1 Config Properties -The [Config.properties](inputs/config.properties) file contain the table name for the different datasets. A default value is set for each parameters following the stack-data-uploader table names specified in [Singapore-sea-level-rise stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-sea-level-rise-singapore/Deploy/stacks/Singapore-sea-level-rise). +The [Config.properties](inputs/config.properties) file contain the table name for the different datasets. A default value is set for each parameters following the stack-data-uploader table names specified in [Singapore-sea-level-rise stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/Singapore-sea-level-rise). 1) `dbName` - Specify the postgresql database 2) `buildingsMatViewName` - Specify the table name for CityDB buildings footprint 3) `heritagetreesTable` - Specify the table name for heritage tree diff --git a/Agents/SensorLoggerMobileAppAgent/README.md b/Agents/SensorLoggerMobileAppAgent/README.md index ebed3441197..6b27ece8b55 100644 --- a/Agents/SensorLoggerMobileAppAgent/README.md +++ b/Agents/SensorLoggerMobileAppAgent/README.md @@ -1,11 +1,11 @@ # SensorLoggerMobileAppAgent ## 1. Description -The SensorLoggerMobileAppAgent is an agent which receives HTTP POST requests containing JSON payload sent from the [SensorLogger](https://github.com/tszheichoi/awesome-sensor-logger) mobile application, subsequently instantiate it as time series following the [OntoDevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice) ontology. The information instantiated from SensorLogger includes: Smartphone device, Acceleration vector, Gravity vector, Magnetic flux density vector, Sound pressure level, Illuminance, Relative brightness, Location. +The SensorLoggerMobileAppAgent is an agent which receives HTTP POST requests containing JSON payload sent from the [SensorLogger](https://github.com/tszheichoi/awesome-sensor-logger) mobile application, subsequently instantiate it as time series following the [OntoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice) ontology. The information instantiated from SensorLogger includes: Smartphone device, Acceleration vector, Gravity vector, Magnetic flux density vector, Sound pressure level, Illuminance, Relative brightness, Location. The agent functions as below: 1) The agent receives JSON payload from the SensorLogger and parse the received JSON Array. -2) It downsamples the received timeseries data via the [Downsampling](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/core/downsampling) library, and instantiates the data using the [TimeSeriesClient](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries). -3) The [OntoDevice](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice) triples are instantiated in Ontop. +2) It downsamples the received timeseries data via the [Downsampling](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/core/downsampling) library, and instantiates the data using the [TimeSeriesClient](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries). +3) The [OntoDevice](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontodevice) triples are instantiated in Ontop. ### 1.1 Concurrency Design The agent manages a phone ID to recording task map, where each phone ID will have a corresponding recording task. The recording task is responsible for sensor data processing, knowldge graph instantiation and postgres table initiation and data upload. Each recording task has different types of sensor processors, which are responsible for the sensor IRI query and generation, downsampling and data formulation for individual types. The following class diagram highlight the relations between class and omit some details of some classes for simlicity. @@ -234,5 +234,5 @@ The debugger port will be available at 5005. ### 5.3 Testing resources You may use the [SamplePOST request](sensorloggermobileappagent/src/main/resources/SamplePOST.http) for testing any changes made to the code, this HTTP request contains a sample of the recording for testing purposes. -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services \ No newline at end of file +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services \ No newline at end of file diff --git a/Agents/SmartMeterAgent/README.md b/Agents/SmartMeterAgent/README.md index 95f34d09f3c..41f9369c0ba 100644 --- a/Agents/SmartMeterAgent/README.md +++ b/Agents/SmartMeterAgent/README.md @@ -4,7 +4,7 @@ The purpose of Smart Meter Agent is to handle HTTP requests to retrieve latest reading for the current time from a database storing smart meter readings every minute, or retrieve all valid historical readings from a database or a CSV file, and upload the data to instantiated time series in the KG. ## Requirements -- In order to run SmartMeterAgent, a local version (or if you are running in a stack, a stack version) of (TripleStore)AccessAgent needs to be deployed. Refer to [AccessAgent README](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_ACCESS_AGENT/README.md) for Access Agent setup. If running in a stack, create a new namespace in your stack blazegraph called 'storerouter' to store the routing information. Please note that routing information of the target blazegraph should be uploaded accordingly before calling SmartMeterAgent. +- In order to run SmartMeterAgent, a local version (or if you are running in a stack, a stack version) of (TripleStore)AccessAgent needs to be deployed. Refer to [AccessAgent README](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AccessAgent/README.md) for Access Agent setup. If running in a stack, create a new namespace in your stack blazegraph called 'storerouter' to store the routing information. Please note that routing information of the target blazegraph should be uploaded accordingly before calling SmartMeterAgent. - The target blazegraph should contain a power network instantiated according to [OntoPowSys](http://www.theworldavatar.com/ontology/ontopowsys/), and the related time series should be instantiated before calling SmartMeterAgent. @@ -54,7 +54,7 @@ The agent is reachable on localhost port 39998 by default (you can change this i ``` docker build -t "smart-meter-agent:1.0.0" . ``` -- Adjust the `access-agent.json` file in `JPS_ACCESS_AGENT/access-agent-dev-stack` according to [AccessAgent README](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_ACCESS_AGENT/README.md), and copy it into `inputs/config/services` folder of the stack manager. +- Adjust the `access-agent.json` file in `JPS_ACCESS_AGENT/access-agent-dev-stack` according to [AccessAgent README](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AccessAgent/README.md), and copy it into `inputs/config/services` folder of the stack manager. - Copy the `smart-meter-agent.json` file in `stack-manager-input-config` folder into the `inputs/config/services` folder of the stack manager. - Start the stack manager as usual. This should start an access agent container and a SmartMeterAgent container as part of your stack. diff --git a/Agents/SolarkatasterAgent/README.md b/Agents/SolarkatasterAgent/README.md index efd54ed5816..8d9cdb7c275 100644 --- a/Agents/SolarkatasterAgent/README.md +++ b/Agents/SolarkatasterAgent/README.md @@ -36,7 +36,7 @@ curl -X POST --header "Content-Type: application/json" -d "{'table':'stadt_pirma ## Build Instructions ### Stack set up -The agent has been implemented to work with stack, which requires the stack to be [set up](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) and for the Solarkataster data to be [uploaded to stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader), before building and running the agent. +The agent has been implemented to work with stack, which requires the stack to be [set up](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) and for the Solarkataster data to be [uploaded to stack](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader), before building and running the agent. Before building, change the placeholder `` in `./solarkataster_agent/src/main/resources/config.properties` and `./docker-compose.yml` to the name of your stack. diff --git a/Agents/ThingsBoardAgent/README.md b/Agents/ThingsBoardAgent/README.md index 2852f30c8b2..f7b0a7bd418 100644 --- a/Agents/ThingsBoardAgent/README.md +++ b/Agents/ThingsBoardAgent/README.md @@ -3,7 +3,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding sensor measurements send to a ThingsBoard server. It's only purpose is to retrieve new data (if available) from the API and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS base lib to interact with both the KG and database. Before explaining the usage of the agent, we will briefly summarize the ThingsBoard API that is contacted by one of the classes in this package to retrieve data. diff --git a/Agents/ThingspeakAgent/README.MD b/Agents/ThingspeakAgent/README.MD index a3b79d4ecd2..a535c3f09e3 100644 --- a/Agents/ThingspeakAgent/README.MD +++ b/Agents/ThingspeakAgent/README.MD @@ -2,7 +2,7 @@ This agent is for maintaining data and the corresponding instances in the knowledge graph (KG) regarding the Thingspeak cloud server. Its only purpose is to retrieve new data (if available) from the API and download it into the corresponding database, as well as, instantiating KG instances and connection when called for the first time. The -agent uses the [time-series client](https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries) +agent uses the [time-series client](https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries) from the JPS_BASE_LIB to interact with both the KG and database. Before explaining the usage of the agent, we will briefly summarize the Thingspeak API that is diff --git a/Agents/TimeSeriesExample/README.md b/Agents/TimeSeriesExample/README.md index 55ba46a6d50..f3e0c83e2dd 100644 --- a/Agents/TimeSeriesExample/README.md +++ b/Agents/TimeSeriesExample/README.md @@ -119,7 +119,7 @@ In Docker Desktop: [TheWorldAvatar]: https://github.com/cambridge-cares/TheWorldAvatar [DTVF]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/TWA-Visualisations -[TimeSeriesClient]: https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/timeseries -[py4jps]: https://github.com/cambridge-cares/TheWorldAvatar/tree/develop/JPS_BASE_LIB/python_wrapper +[TimeSeriesClient]: https://github.com/TheWorldAvatar/baselib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/timeseries +[py4jps]: https://github.com/TheWorldAvatar/baselib/tree/main/python_wrapper [properties file]: resources/ts_example.properties [http://localhost:65080/]: http://localhost:65080/ diff --git a/Agents/TrafficIncidentAgent/README.md b/Agents/TrafficIncidentAgent/README.md index 2a862ba69df..d3787d53e73 100644 --- a/Agents/TrafficIncidentAgent/README.md +++ b/Agents/TrafficIncidentAgent/README.md @@ -52,6 +52,6 @@ Sun Xin Yu (https://github.com/Echomo-Xinyu)
October 2024 -[stack-data-uploader]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services \ No newline at end of file +[stack-data-uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services \ No newline at end of file diff --git a/Agents/TrajectoryQueryAgent/README.md b/Agents/TrajectoryQueryAgent/README.md index 5dca7087a91..1f7f40c2a9d 100644 --- a/Agents/TrajectoryQueryAgent/README.md +++ b/Agents/TrajectoryQueryAgent/README.md @@ -7,7 +7,7 @@ TrajectoryQueryAgent is an agent that handles trajectory related tasks. It curre ## Requirements -Launch [stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) with the default containers and the following additional containers: +Launch [stack](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) with the default containers and the following additional containers: - information from [SensorLoggerMobileAppAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/SensorLoggerMobileAppAgent) to be instantiated @@ -113,7 +113,7 @@ Response given in the form of {"result":[{"month":1,"year":2024,"days":"{1,2,3}"}],"message":"Succeed"} ``` -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services [line layer (device id)]: ./trajectoryqueryagent/src/main/resources/line_layer_device_id.sql [line layer (user id)]: ./trajectoryqueryagent/src/main/resources/line_layer_user_id.sql diff --git a/Agents/TravellingSalesmanAgent/README.md b/Agents/TravellingSalesmanAgent/README.md index 0011723c770..31ac8c2a320 100644 --- a/Agents/TravellingSalesmanAgent/README.md +++ b/Agents/TravellingSalesmanAgent/README.md @@ -11,7 +11,7 @@ The TravellingSalesmanAgent is an agent that ### 2.1. Stack Set Up -The agent has been implemented to work in the stack. Follow the instructions in the [stack-manager]'s README to set up the stack. Several pre-configured examples for the different use cases for King's Lynn can be found in [stack-data-uploader-inputs](stack-data-uploader-inputs/). +The agent has been implemented to work in the stack. Follow the instructions in the [stack-manager]'s README to set up the stack. Several pre-configured examples for the different use cases for King's Lynn can be found in [inputs](inputs/). ## 3. Agent Configuration @@ -101,7 +101,7 @@ The debugger port will be available at 5005. ### 7.1 Feature Info Agent -1) In the directory [stack-manager-config/data/webspace/](stack-manager-config/data/webspace/), contains the TWA-VF `data.json` prepared for the different scnearios that is meant to be placed inside [`stack-manager/inputs/data/webspace`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/data), following instruction [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#example---including-a-visualisation). +1) In the directory [stack-manager-config/data/webspace/](stack-manager-config/data/webspace/), contains the TWA-VF `data.json` prepared for the different scnearios that is meant to be placed inside [`stack-manager/inputs/data/webspace`](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/data), following instruction [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#example---including-a-visualisation). -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services diff --git a/Agents/UserAgent/README.md b/Agents/UserAgent/README.md index 5b9238d6094..70b8024cd83 100644 --- a/Agents/UserAgent/README.md +++ b/Agents/UserAgent/README.md @@ -72,7 +72,7 @@ Check this Keycloak [guide](https://www.keycloak.org/docs/latest/authorization_s ### 4.3 Starting with the stack-manager The agent has been implemented to work in the stack, which requires the UserAgent Docker container to be deployed in the stack. To do so, place [user-agent.json](stack-manager-config/inputs/config/services/user-agent.json) in the [stack-manager config directory]. -Then, run `./stack.sh start ` in the [stack-manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) main folder. This will spin up the agent in the stack. +Then, run `./stack.sh start ` in the [stack-manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) main folder. This will spin up the agent in the stack. ## 5. Build and debug ## 5.1 Credentials @@ -93,5 +93,5 @@ To debug the agent, replace [`user-agent-debug.json`](stack-manager-config/input Spin up with `./stack.sh start ` in the [stack-manager]'s main folder. The debugger port will be available at 5005. -[stack-manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[stack-manager config directory]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager/inputs/config/services \ No newline at end of file +[stack-manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[stack-manager config directory]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/inputs/config/services \ No newline at end of file diff --git a/Agents/UtilityCostCalculationAgent/README.md b/Agents/UtilityCostCalculationAgent/README.md index ee2da3c9d84..ad9fd2d5cc8 100644 --- a/Agents/UtilityCostCalculationAgent/README.md +++ b/Agents/UtilityCostCalculationAgent/README.md @@ -131,7 +131,7 @@ If you started from an empty namespace, or have not instantiate upper level inst Please check if you have created a namespace in the blazegraph, and entered the correct environmental variables in the [agent.env.example](./agent.env.example). -Afterwards, run the [upper_level_ontology_update.py](./copcalculationagent/upper_level_ontology_update.py), simply run this command in the powershell terminal: +Afterwards, run the [upper_level_ontology_update.py](./utiliycostcalculationagent/upper_level_ontology_update.py), simply run this command in the powershell terminal: ```bash py ./utilitycostcalculationagent/upper_level_ontology_update.py @@ -147,7 +147,7 @@ py ./utilitycostcalculationagent/markup.py # Authors # Jieyang Xu (jx309@cam.ac.uk), May 2023 -[markup.py]:./utilitycostcalculationagent/markup.py +[markup.py]:./utiliycostcalculationagent/markup.py [home page]:https://htmlpreview.github.io/?https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/UtilityCostCalculationAgent/index.html [CopCalculationAgent]:https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/CopCalculationAgent [OntoCAPE]:http://theworldavatar.com/ontology/ontocape/ @@ -166,16 +166,16 @@ Jieyang Xu (jx309@cam.ac.uk), May 2023 [CMCL Docker registry wiki page]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry -[Common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Derivation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent -[Derivation Agent configuration]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_BASE_LIB/python_derivation_agent/pyderivationagent/conf/agent_conf.py +[Common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Derivation Agent]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent +[Derivation Agent configuration]: https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent/pyderivationagent/conf/agent_conf.py [EPC Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-EPCInstantiationAgent/Agents/EnergyPerformanceCertificateAgent -[JPS_BASE_LIB]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB +[JPS_BASE_LIB]: https://github.com/TheWorldAvatar/baselib/tree/main [OntoRegionalAnalysis]: http://www.theworldavatar.com/ontology/ontoregionalanlysis/OntoRegionalAnalysis.owl [HM Land Registry Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-PropertySalesInstantiationAgent/Agents/HMLandRegistryAgent -[spin up the stack]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md#spinning-up-a-stack -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager -[Stack-Clients]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-clients +[spin up the stack]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md#spinning-up-a-stack +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager +[Stack-Clients]: https://github.com/TheWorldAvatar/stack/tree/main/stack-clients [The World Avatar]: https://github.com/cambridge-cares/TheWorldAvatar [Average Square Metre Price Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AverageSquareMetrePriceAgent diff --git a/Agents/VisBackendAgent/README.md b/Agents/VisBackendAgent/README.md index 971cbc105f0..0b58f7822d6 100644 --- a/Agents/VisBackendAgent/README.md +++ b/Agents/VisBackendAgent/README.md @@ -4,4 +4,4 @@ The Vis-Backend Agent has moved. It is now located in the [new TWA Org](https://github.com/TheWorldAvatar/Viz-Backend-Agent). -`szp20/1476-feature-handle-public-holidays-in-vis-backend-agent` has been moved as well as the `main` branch \ No newline at end of file +`szp20/1476-feature-handle-public-holidays-in-vis-backend-agent` has been moved as well as the `main` branch diff --git a/Agents/ZeoliteAgent/README.md b/Agents/ZeoliteAgent/README.md index aec6025cb87..0b97df35fed 100644 --- a/Agents/ZeoliteAgent/README.md +++ b/Agents/ZeoliteAgent/README.md @@ -66,7 +66,7 @@ will install an earlier version of the package. `() $ pip install pyuploader` -More details at the [TWA web-site](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_uploader). +More details at the [TWA web-site](https://github.com/TheWorldAvatar/baselib/tree/main/python_uploader). ### Prepare and run the code Copy the code and bat scripts to the current directory. diff --git a/Agents/_DerivationPaper/README.md b/Agents/_DerivationPaper/README.md index 3760aee9960..f3845135588 100644 --- a/Agents/_DerivationPaper/README.md +++ b/Agents/_DerivationPaper/README.md @@ -230,7 +230,7 @@ Jiaru Bai (jb2197@cam.ac.uk), December 2022 [Docker environment]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Environment [CMCL Docker image registry]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry [DTVF]: https://github.com/cambridge-cares/TheWorldAvatar/wiki/TWA-Visualisations -[example Mapbox visualisation]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/web/digital-twin-vis-framework/example-mapbox-vis +[example Mapbox visualisation]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/web/twa-vis-framework/example [wait-for-it]: https://github.com/vishnubob/wait-for-it diff --git a/Agents/utils/chemistry_and_robots/README.md b/Agents/utils/chemistry_and_robots/README.md index 0c17bafefe4..eac357e003d 100644 --- a/Agents/utils/chemistry_and_robots/README.md +++ b/Agents/utils/chemistry_and_robots/README.md @@ -1,8 +1,8 @@ # Description # The `chemistry_and_robots` package provides a collection of dataclasses and SPARQL query/update functions that are used by a series of agents capable of conducting automated reaction experiments as part of [TheWorldAvatar](https://github.com/cambridge-cares/TheWorldAvatar) project. `chemistry_and_robots` uses `pyderivationagent>=1.1.0` to access `PySparqlClient` provided in `pyderivationagent.kg_operations` to form its SPARQL query/update utilities. For technical details, below are a few useful links: -- [`pyderivationagent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_derivation_agent) - python wrapper for derivation agent -- [`py4jps`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_wrapper) - python wrapper for jps-base-lib +- [`pyderivationagent`](https://github.com/TheWorldAvatar/baselib/tree/main/python_derivation_agent) - python wrapper for derivation agent +- [`py4jps`](https://github.com/TheWorldAvatar/baselib/tree/main/python_wrapper) - python wrapper for jps-base-lib # Installation # For development and testing reasons, follow below instructions to get a copy of the project up and running on your local system. @@ -50,7 +50,7 @@ This package provides ontological data models from five main ontologies, namely: All of the concepts are directly or indirectly inherited from the `BaseOntology` class which itself is inherited from `pydantic.BaseModel`. The design of these data model classes serve as a persistence layer between the agent operations in chemistry_and_robots and the data stored in the knowledge graph. Additionally, all TBox IRIs involved in the chemistry_and_robots as part of The World Avatar project are provided in the `chemistry_and_robots.data_model.iris.py`. Developer can import this module to make use of the concepts and relationships. ## SPARQL client -A SPARQL client class `chemistry_and_robots.kg_operations.sparql_client.ChemistryAndRobotsSparqlClient` is provided as part of this package. It provides a few SPARQL query and update functions that are helpful in handling data instantiated using the above ontology data models. These functions have been used to develope a few python agents, for more details, please refer to: [`DoEAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/DoEAgent), [`VapourtecExecutionAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/VapourtecExecutionAgent), [`HPLCPostProAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HPLCPostProAgent), [`VapourtecAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/VapourtecAgent), and [`HPLCAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HPLCAgent). +A SPARQL client class `chemistry_and_robots.kg_operations.sparql_client.ChemistryAndRobotsSparqlClient` is provided as part of this package. It provides a few SPARQL query and update functions that are helpful in handling data instantiated using the above ontology data models. These functions have been used to develope a few python agents, for more details, please refer to: [`DoEAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/DoEAgent), [`VapourtecScheduleAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/VapourtecScheduleAgent), [`HPLCPostProAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HPLCPostProAgent), [`VapourtecAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/VapourtecAgent), and [`HPLCAgent`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/HPLCAgent). ## Test Unit and integration tests are written for this package. The tests should pass if you already correctly setup the [Docker Environment](https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Environment) and obtained access to [Docker Image Registry](https://github.com/cambridge-cares/TheWorldAvatar/wiki/Docker%3A-Image-registry). To run tests, please execute below commands (remember to replace the `` with actual path): diff --git a/Agents/utils/python-utils/README.md b/Agents/utils/python-utils/README.md index 67896623e5b..70692b96cb2 100644 --- a/Agents/utils/python-utils/README.md +++ b/Agents/utils/python-utils/README.md @@ -1,6 +1,6 @@ # TheWorldAvatar - Python Utils -This Python package contains a number of logging utilities that may be useful to any Python-based project within The World Avatar (TWA) ecosystem. At the time of writing, this project builds an isolated package named `agentlogging` that users can import in their own code. In the future, this package may be bundled with the Python wrapper for the JPS Base Library so that only one dependency is required. **Deprecation Warning: `agentlogging` is packaged with `py4jps` as of version [1.0.29](https://pypi.org/project/py4jps/1.0.29/). Please do NOT use or develop this isolated package further. Instead, please use and continue develop [`TheWorldAvatar/JPS_BASE_LIB/python_wrapper/py4jps/agentlogging`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/python_wrapper/py4jps/agentlogging).** +This Python package contains a number of logging utilities that may be useful to any Python-based project within The World Avatar (TWA) ecosystem. At the time of writing, this project builds an isolated package named `agentlogging` that users can import in their own code. In the future, this package may be bundled with the Python wrapper for the JPS Base Library so that only one dependency is required. **Deprecation Warning: `agentlogging` is packaged with `py4jps` as of version [1.0.29](https://pypi.org/project/py4jps/1.0.29/). Please do NOT use or develop this isolated package further. Instead, please use and continue develop [`TheWorldAvatar/JPS_BASE_LIB/python_wrapper/twa/agentlogging`](https://github.com/TheWorldAvatar/baselib/tree/main/python_wrapper/twa/agentlogging).** ## Functions diff --git a/Apps/BMSQueryApp/README.md b/Apps/BMSQueryApp/README.md index c8807454eba..797904ec3da 100644 --- a/Apps/BMSQueryApp/README.md +++ b/Apps/BMSQueryApp/README.md @@ -7,12 +7,12 @@ This is an Android app project to monitor and control lab devices. The minimum a Device Control. ## Development Setup -All agents and database should be deployed in a server with local [stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager), and the app acts as a standalone frontend. This section is aimed for local development setup. +All agents and database should be deployed in a server with local [stack](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager), and the app acts as a standalone frontend. This section is aimed for local development setup. Agents required: - [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent) - [BMSQueryAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BMSQueryAgent) -- [BMSUpdateAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BMSWriteAgent) +- [BMSUpdateAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/BMSUpdateAgent) ### Feature Info Agent Configuration Before launching Feature Info Agent in the stack, config files need to be copied into the feature info agent folder. Copy all `.sparql` files in `feature-info-agent-queries/` to `TheWorldAvatar/Agent/FeatureInfoAgent/queries`. @@ -27,5 +27,5 @@ Update the endpoints and IRIs in `Apps/BMSQueryApp/app/src/main/res/values/confi ### Authentication This app uses [AppAuth](https://github.com/openid/AppAuth-Android) to communicate with OpenID Connect providers. -1. Register app client in Keycloak as OpenID Connect type client. Check [here](https://www.keycloak.org/docs/23.0.4/server_admin/#_oidc_clients) for complete setup guide. The app isn't a resource client, so no authorization is needed to setup. +1. Register app client in Keycloak as OpenID Connect type client. Check [here](https://www.keycloak.org/docs/latest/server_admin/index.html#_oidc_clients) for complete setup guide. The app isn't a resource client, so no authorization is needed to setup. 2. Replace `` with the stack address in Apps/BMSQueryApp/app/src/main/res/raw/auth_config.json \ No newline at end of file diff --git a/Apps/Modules/camera/README.md b/Apps/Modules/camera/README.md index 584041097a8..83ad3d779e1 100644 --- a/Apps/Modules/camera/README.md +++ b/Apps/Modules/camera/README.md @@ -22,5 +22,5 @@ Since multiple feature modules can extend the base camera fragment for different ## Extend Camera Fragment The base camera fragment need to be extended for the capture result to be used for different use cases. Navigating directly to this fragment and click the capture button won't have any effect. Here are some helpful links: -- [QR code scanning example](https://github.com/cambridge-cares/TheWorldAvatar/tree/1584-asset-management-app/Apps/AssetManagementApp/feature/qrscan) +- [QR code scanning example](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Apps/AssetManagementApp/feature/qrscan) - [Google ML Kit](https://developers.google.com/ml-kit): provide various vision APIs such as barcode scanning, face detection and text recognition \ No newline at end of file diff --git a/Apps/Modules/login/README.md b/Apps/Modules/login/README.md index f5b5e727f22..7ff4a829cf2 100644 --- a/Apps/Modules/login/README.md +++ b/Apps/Modules/login/README.md @@ -10,14 +10,14 @@ This module contains two parts: core and feature. It integrates [AppAuth for And > Import :core:login 2. Open the `Modules` tab and click the `+` to add new modules under `core` 3. In the pop up `Create New Module` window, click `Import...` -4. Select [/core/login](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Apps/Modules/core/login) as source location +4. Select [/core/login](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Apps/Modules/login/core/login) as source location 5. Set the `Module name` to `:core:login` 1. You may see warning about missing `:core:utils` module. Ignore this warning while importing and fix the error in code after import. > Import :feature:login 6. Click the `+` to add new modules under `feature` 7. In the pop up `Create New Module` window, click `Import...` -8. Select [/feature/login](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Apps/Modules/feature/login) as source location +8. Select [/feature/login](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Apps/Modules/login/feature/login) as source location 9. Set the `Module name` to `:feature:login` 10. Click `Finish` and wait for the module to be imported diff --git a/Apps/PirmasensToiletApp/README.md b/Apps/PirmasensToiletApp/README.md index eeafbffc87d..e58af70165a 100644 --- a/Apps/PirmasensToiletApp/README.md +++ b/Apps/PirmasensToiletApp/README.md @@ -48,17 +48,17 @@ Any data access required should be contacted through someone working on the repo ### 2.1 Backend Services -The app will require a running [The World Avatar stack](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) as the backend. +The app will require a running [The World Avatar stack](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) as the backend. ### 2.1.1 Stack Data Uploader -Data specified in [this section](#12-data-sources) should be uploaded using the [Stack Data Uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader). All the relevant configurations and settings are included in the [`./inputs/data/`](./inputs/data/) directory. +Data specified in [this section](#12-data-sources) should be uploaded using the [Stack Data Uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader). All the relevant configurations and settings are included in the [`./inputs/data/`](./inputs/data/) directory. Briefly, the app will only require the `Wasgau` and `Toilet` datasets, as well as the associated ontologies and OBDA mappings for the base functionality of this application. Extended configuration are for routing purposes. ### 2.1.2 Feature Info Agent -The stack will also require the [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent) service to retrieve metadata. Please read setting up the [built-in service section](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager#built-in-containers) for more details on deploying this. This agent will require the following configuration targeted at the toilet class in `fia-config.json`: +The stack will also require the [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent) service to retrieve metadata. Please read setting up the [built-in service section](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager#built-in-containers) for more details on deploying this. This agent will require the following configuration targeted at the toilet class in `fia-config.json`: ```json { diff --git a/Apps/PirmasensToiletApp/inputs/data/README.md b/Apps/PirmasensToiletApp/inputs/data/README.md index 3fbcd73d6b7..e0df644d0e0 100644 --- a/Apps/PirmasensToiletApp/inputs/data/README.md +++ b/Apps/PirmasensToiletApp/inputs/data/README.md @@ -1,6 +1,6 @@ # Stack Data Uploader contents -This directory contains the different data contents for the [`stack-data-uploader`](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader). +This directory contains the different data contents for the [`stack-data-uploader`](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader). ## Table of Contents @@ -32,7 +32,7 @@ Additional datasets required for routing purposes: ## 2. Configuration File -As per the [documentation](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader), the relevant configuration file is as follows. Users are also able to upload icons using the `staticGeoServerData` key. +As per the [documentation](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader), the relevant configuration file is as follows. Users are also able to upload icons using the `staticGeoServerData` key. ```json { diff --git a/Apps/SampleApp/feature/todo/README.md b/Apps/SampleApp/feature/todo/README.md index 7fcdccdd7dc..82592f24c6b 100644 --- a/Apps/SampleApp/feature/todo/README.md +++ b/Apps/SampleApp/feature/todo/README.md @@ -3,7 +3,7 @@ A module demonstrates in module navigation with action, data binding and the complete workflow of retrieving data from internet and displaying in the app. ## 1. Workflow -The workflow of data transfer has been discussed in [SampleApp/README.md](https://github.com/cambridge-cares/TheWorldAvatar/blob/1786-android-documentation/Apps/SampleApp/README.md#22-data-transfer). Please refer to it for more details. +The workflow of data transfer has been discussed in [SampleApp/README.md](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Apps/SampleApp/README.md#22-data-transfer). Please refer to it for more details. ## 2. In Module Navigation In module navigation has been discussed in [SampleApp/README.md](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Apps/SampleApp/README.md#213-action). Please refer to it for more details. diff --git a/Deploy/stacks/AI4PublicHealth/Stack_Deployment/README.md b/Deploy/stacks/AI4PublicHealth/Stack_Deployment/README.md index b3f051c8873..5767d744232 100644 --- a/Deploy/stacks/AI4PublicHealth/Stack_Deployment/README.md +++ b/Deploy/stacks/AI4PublicHealth/Stack_Deployment/README.md @@ -140,7 +140,7 @@ Jiying Chen (jc2341@cam.ac.uk), Nov 2024 [Remote - SSH]: https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-ssh [Docker]: https://code.visualstudio.com/docs/containers/overview [REST Client]: https://marketplace.visualstudio.com/items?itemName=humao.rest-client -[Stack Manager README]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md +[Stack Manager README]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md [OntoFHRS]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-ai4ph-ontologies/JPS_Ontology/ontology/ontofhrs [OntoPOI]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-ai4ph-ontologies/JPS_Ontology/ontology/ontopoi [OntoGreenspace]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-ai4ph-ontologies/JPS_Ontology/ontology/ontogreenspace @@ -153,9 +153,9 @@ Jiying Chen (jc2341@cam.ac.uk), Nov 2024 [Mapbox visualisation guidance]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/web/twa-vis-framework/docs/mapbox.md -[common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Stack Data Uploader]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader -[Stack Manager]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md +[common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Stack Data Uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader +[Stack Manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md [AccessAgent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AccessAgent @@ -164,8 +164,8 @@ Jiying Chen (jc2341@cam.ac.uk), Nov 2024 [AirQuality Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AirQualityAgent -[xml_converter]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-AI-for-Healthcare/Deploy/stacks/AI4PublicHealth/Common_Script/xml_converter -[FoodHygieneRating]: ./stack-data-uploader/obda_mappings/FoodHygieneRating.obda -[here]: https://github.com/cambridge-cares/TheWorldAvatar/blob/dev-AI-for-Healthcare/Agents/FenlandTrajectoryAgent/README.md +[xml_converter]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/AI4PublicHealth/Common_Script/xml_converter +[FoodHygieneRating]: ./stack-data-uploader/data/FoodHygiene/FoodHygieneRating.obda +[here]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/FenlandTrajectoryAgent/README.md [OntoDevice]: https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-ai4ph-ontologies/JPS_Ontology/ontology/ontodevice -[mapping folder]: ./stack-data-uploader/obda_mappings +[mapping folder]: ./stack-data-uploader/data diff --git a/Deploy/stacks/Chile/README.md b/Deploy/stacks/Chile/README.md index 70758fd35fa..9012bfd8c63 100644 --- a/Deploy/stacks/Chile/README.md +++ b/Deploy/stacks/Chile/README.md @@ -151,11 +151,10 @@ Use (ST_Dump(...)).* to expand all of the columns, this should work for all func * https://postgis.net/docs/RT_reference.html * https://epsg.io/32719 * https://postgis.net/docs/reference.html -* https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md -* https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-data-uploader/README.md +* https://github.com/TheWorldAvatar/stack/blob/main/stack-manager/README.md +* https://github.com/TheWorldAvatar/stack/blob/main/stack-data-uploader/README.md * https://github.com/cambridge-cares/TheWorldAvatar/blob/main/web/twa-vis-framework/docs/mapbox.md * https://slideplayer.com/slide/7417666/ -* https://manifold.net/doc/mfd9/sql_example__custom_contour_intervals.html * https://nronnei.github.io/blog/2017/03/creating-rasters-from-scratch-in-postgis-pt3/ * https://www.w3schools.com/SQL/sql_update.asp * https://docs.oracle.com/en/database/oracle/oracle-database/18/geors/raster-algebra-and-analytics.html#GUID-C75744C9-FA04-4391-96F2-59EF2EA212FF diff --git a/Deploy/stacks/KingsLynn/StackDeployment/README.md b/Deploy/stacks/KingsLynn/StackDeployment/README.md index 26a1bc2820f..cb5f5261592 100644 --- a/Deploy/stacks/KingsLynn/StackDeployment/README.md +++ b/Deploy/stacks/KingsLynn/StackDeployment/README.md @@ -480,15 +480,15 @@ HAVING(?streets > 1) [MetOffice My Account]: https://register.metoffice.gov.uk/MyAccountClient/account/view -[common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Stack data uploader]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader -[Stack manager]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md +[common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Stack data uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader +[Stack manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md [AccessAgent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AccessAgent [CityImportAgent]: https://github.com/cambridge-cares/CitiesKG/tree/develop/agents [TSDAgent]: https://github.com/cambridge-cares/CitiesKG/tree/develop/agents -[UPRN Agent]: https://github.com/cambridge-cares/CitiesKG/tree/uprn-agent +[UPRN Agent]: https://github.com/cambridge-cares/CitiesKG/tree/develop/agents [Building Matching Readme]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/BuildingMatchingAgent/README.md [EPC Agent]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Agents/EnergyPerformanceCertificateAgent/README.md [Average Square Metre Price Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AverageSquareMetrePriceAgent/README.md @@ -503,13 +503,13 @@ HAVING(?streets > 1) [AirQuality Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AirQualityAgent -[data.json]: /StackDeployment/inputs/stack-manager/inputs/data/visualisation/data.json -[Agent docker-compose file folder]: /StackDeployment/inputs/docker_compose_files -[resources]: /StackDeployment/resources -[river_level_agent input folder]: /StackDeployment/inputs/river_level_agent +[data.json]: inputs/stack-manager/inputs/data/visualisation/data.json +[Agent docker-compose file folder]: inputs/docker_compose_files +[resources]: resources +[river_level_agent input folder]: inputs/river_level_agent [UPRN Agent in batches]: ../Utilities/uprn_agent/run_uprn_agent_in_chunks.py [Utilities]: ../Utilities -[routing.json]: /StackDeployment/inputs/access_agent/routing.json +[routing.json]: inputs/access_agent/routing.json [CKG config.properties]: https://github.com/cambridge-cares/CitiesKG/blob/develop/agents/src/main/resources/config.properties \ No newline at end of file diff --git a/Deploy/stacks/KingsLynn/Utilities/README.md b/Deploy/stacks/KingsLynn/Utilities/README.md index a4fedfb726f..2bdc6ca89df 100644 --- a/Deploy/stacks/KingsLynn/Utilities/README.md +++ b/Deploy/stacks/KingsLynn/Utilities/README.md @@ -72,5 +72,5 @@ The scripts within the `kg_utils` subdirectory provide functionality to interact [Semantic 3D City Agents README]: https://github.com/cambridge-cares/CitiesKG/tree/develop/agents -[Access Agent README]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_ACCESS_AGENT -[run_uprn_agent_in_chunks.py]: \uprn_agent\run_uprn_agent_in_chunks.py \ No newline at end of file +[Access Agent README]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/AccessAgent +[run_uprn_agent_in_chunks.py]: uprn_agent/run_uprn_agent_in_chunks.py \ No newline at end of file diff --git a/Deploy/stacks/Pirmasens/README.md b/Deploy/stacks/Pirmasens/README.md index f6de2fb533d..e18577d3363 100644 --- a/Deploy/stacks/Pirmasens/README.md +++ b/Deploy/stacks/Pirmasens/README.md @@ -154,8 +154,11 @@ Kok Foong Lee (kokfoong.lee@cares.cam.ac.uk), November 2023 [grafana-prep readme]: ./stack-manager/inputs/data/grafana-prep/readme.txt [chained derivations]: https://lucid.app/publicSegments/view/8dfdf102-bb7d-47de-bb52-c22d86a50bcf/image.jpeg + + [timeseries.properties]: https://github.com/cambridge-cares/pirmasens/blob/main/districtheating/resources/timeseries.properties [dataproperties.py]: https://github.com/cambridge-cares/pirmasens/blob/main/districtheating/resources/dataproperties.py + [Forecasting Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/ForecastingAgent @@ -163,4 +166,4 @@ Kok Foong Lee (kokfoong.lee@cares.cam.ac.uk), November 2023 [DH Emission Estimation Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/DistrictHeatingEmissionEstimationAgent [Aermod Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_VIRTUALSENSOR/AermodAgent [DH Optimisation Trigger Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/DistrictHeatingOptimisationTriggerAgent -[Stack manager]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md +[Stack manager]: https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md diff --git a/Deploy/stacks/Singapore-sea-level-rise/README.md b/Deploy/stacks/Singapore-sea-level-rise/README.md index 3df16bea4c8..a4e97cad7eb 100644 --- a/Deploy/stacks/Singapore-sea-level-rise/README.md +++ b/Deploy/stacks/Singapore-sea-level-rise/README.md @@ -1,5 +1,5 @@ # Singapore Sea-Level Rise -This repository contains the instructions, directory structure and configurations required to deploy Singapore stack for Sea-Level-Rise analysis which builds on top of the existing [Augmented Singapore](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-singapore-stack/Deploy/stacks/Singapore). +This repository contains the instructions, directory structure and configurations required to deploy Singapore stack for Sea-Level-Rise analysis which builds on top of the existing [Augmented Singapore](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/Singapore). ## Data Comprehensive data collated can be found in the [CARES dropbox link](https://www.dropbox.com/scl/fo/s4youc2epx7quqapolgw6/AH_IAMDhH9FppOosYpKd3zs?rlkey=4ab335m057bkv64zs7e8xdn20&dl=0). @@ -58,7 +58,7 @@ curl -X POST http://localhost:3838/buildingflooragent/ ``` - Check contents of ```gfa_floors.floors```, the number of rows should equate the number of buildings ### GFAAgent -[GFAAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-sea-level-rise-singapore/Agents/GFAAgent) computes the Gross Floor Area (GFA) and the construction cost of buildings. +[GFAAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/GFAAgent) computes the Gross Floor Area (GFA) and the construction cost of buildings. - Prequisites: 1) Floors data added by BuildingFloorAgent @@ -196,7 +196,7 @@ There are two sets of postcode data from running the stack data uploader with di 2) sgpostcode - More comprehensive and updated dataset - - Sourced from [https://github.com/isen-ng/singapore-postal-codes-1] + - Sourced from https://github.com/isen-ng/singapore-postal-codes-1 - Linked to buildings using building identification agent by running the HTTP request [postcode_matching.http] ### data.json diff --git a/Deploy/stacks/Singapore/README.md b/Deploy/stacks/Singapore/README.md index 951aa68f0aa..76f145348e4 100644 --- a/Deploy/stacks/Singapore/README.md +++ b/Deploy/stacks/Singapore/README.md @@ -3,7 +3,7 @@ This repository contains the instructions, directory structure, and configuratio ## 1. Preparations ### Knowledge of the stack tools adopted in The World Avatar -Please read through the [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) and [Stack Data Uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader) to set up your stack accordingly. +Please read through the [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) and [Stack Data Uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader) to set up your stack accordingly. At the moment, a working understanding of these two tools will suffice for the deployment of the Singapore stack. diff --git a/Deploy/stacks/UK-building-retrofit/StackDeployment/README.md b/Deploy/stacks/UK-building-retrofit/StackDeployment/README.md index f254cecd06f..4f8f15eee68 100644 --- a/Deploy/stacks/UK-building-retrofit/StackDeployment/README.md +++ b/Deploy/stacks/UK-building-retrofit/StackDeployment/README.md @@ -60,7 +60,7 @@ bash ./stack.sh remove CamElyWis-DT -v # Remove individual service bash ./stack.sh remove CamElyWis-DT ``` -After spinning up the stack, the GUI endpoints to the running containers can be accessed via Browser (i.e. adminer, blazegraph, ontop, geoserver). The exact endpoints and login details can be found in the [Stack Manager README](Deploy/stacks/dynamic/stack-manager/README.md). +After spinning up the stack, the GUI endpoints to the running containers can be accessed via Browser (i.e. adminer, blazegraph, ontop, geoserver). The exact endpoints and login details can be found in the [Stack Manager README](https://github.com/TheWorldAvatar/stack/blob/main/stack-manager/README.md). ### Spinning Up the Stack Remotely via SSH @@ -135,9 +135,9 @@ The [Feature Info Agent] serves as an access point for the visualisation, enabli [Feature Info Agent]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent -[common stack scripts]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/common-scripts -[Stack data uploader]: https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader -[Stack manager]: https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md +[common stack scripts]: https://github.com/TheWorldAvatar/stack/tree/main/common-scripts +[Stack data uploader]: https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader +[Stack manager]: https://github.com/TheWorldAvatar/stack/blob/main/stack-manager/README.md [fia_queries]: ./Stack-manager/inputs/data/fia-queries diff --git a/Deploy/stacks/UK-building-retrofit/Utilities/TOPSIS/README.md b/Deploy/stacks/UK-building-retrofit/Utilities/TOPSIS/README.md index a294d85a897..56b63f78a58 100644 --- a/Deploy/stacks/UK-building-retrofit/Utilities/TOPSIS/README.md +++ b/Deploy/stacks/UK-building-retrofit/Utilities/TOPSIS/README.md @@ -70,4 +70,5 @@ result/TOPSIS_result.csv - The default weighting method is `"equal_weight"`, but it can be changed to `"entropy_weight_method"` in `criteria_weight.json`. + [entropy-based approach]: https://www.sciencedirect.com/topics/engineering/entropy-method#:~:text=The%20entropy%20method%20is%20an,indicators%20through%20the%20information%20entropy. \ No newline at end of file diff --git a/Deploy/stacks/cares-lab/README.md b/Deploy/stacks/cares-lab/README.md index 4742fee9b2c..02ac9a6703c 100644 --- a/Deploy/stacks/cares-lab/README.md +++ b/Deploy/stacks/cares-lab/README.md @@ -3,7 +3,7 @@ This repository contains the instructions, directory structure, and configuratio ## 1. Preparations ### Knowledge of the stack tools adopted in The World Avatar -Please read through the [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) to set up your stack accordingly. +Please read through the [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) to set up your stack accordingly. ## 2. Deployment Workflow ### Stack manager diff --git a/Deploy/stacks/db/fileserver/README.md b/Deploy/stacks/db/fileserver/README.md index 082010a9705..d92e7bf8441 100644 --- a/Deploy/stacks/db/fileserver/README.md +++ b/Deploy/stacks/db/fileserver/README.md @@ -35,7 +35,7 @@ You should now be able to access the fileserver with a username `fs_user` and th ## Integration with the Stack Manager -The fileserver can also be integrated with the stack manager. Please refer to the general stack-manager documentation for more information, especially the "Specifying a custom container" section. ([Link to stack manager README](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md#specifying-custom-containers)) +The fileserver can also be integrated with the stack manager. Please refer to the general stack-manager documentation for more information, especially the "Specifying a custom container" section. ([Link to stack manager README](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md#specifying-custom-containers)) In short, you need to add a service configuration file and include the file server in the stack configuration file. The following example can be used as a starting point for the service configuration file. Make sure to name the secrets file with the stored credentials "file_server_password", otherwise a default password will be used. After spinning up the stack, you should be able to access the file server with the username `fs_user` and the credentials you set. diff --git a/Deploy/stacks/ontop+geoserver/README.md b/Deploy/stacks/ontop+geoserver/README.md index a841c9d65ed..514be01d9af 100644 --- a/Deploy/stacks/ontop+geoserver/README.md +++ b/Deploy/stacks/ontop+geoserver/README.md @@ -34,7 +34,7 @@ * [Adminer interface to the PostgreSQL database with default login settings][postgres_web] (the default password can be found in the `postgis/postgres.env` file) * [Ontop web endpoint][ontop_web] * [GeoServer web endpoint][geoserver_web] - + [shapefiles]: https://trac.osgeo.org/gdal/wiki/UserDocs/Shapefiles [postgres_web]: http://localhost:2311/?pgsql=host.docker.internal%3A2317&username=postgres&db=the_world_avatar [ontop_web]: http://localhost:2316/ diff --git a/Deploy/stacks/timeline/readme.md b/Deploy/stacks/timeline/readme.md index 4ed82d4695f..8b4580c608a 100644 --- a/Deploy/stacks/timeline/readme.md +++ b/Deploy/stacks/timeline/readme.md @@ -4,7 +4,7 @@ Stack name should be `timeline-test`. -Prepare these secret files in the [stack secret folder](./stack-manager/test/inputs/secrets) +Prepare these secret files in the [stack secret folder](./test/stack-manager/inputs/secrets) - geoserver_password - postgis_password @@ -53,7 +53,7 @@ then restart stack manager again: Stack name should be `timeline`. -Prepare these secret files in [](./stack-manager/prod/inputs/secrets) +Prepare these secret files in [](./test/stack-manager/inputs/secrets) - geoserver_password - postgis_password @@ -124,15 +124,16 @@ IMPORT FOREIGN SCHEMA public 3. Replace in vis-files/keycloak.json with the URL of the KeyCloak server, needs to be an address that can be accessed from client and the server. 1. The configuration assumes a realm called `timeline` exists and a client called `desktop-vis` is set up correctly with the correct redirect urls. 4. Upload [./shacl/timeline.ttl](./shacl/timeline.ttl) to the kb namespace on Blazegraph -5. Download contents of [https://github.com/TheWorldAvatar/viz/tree/main/code/public/images/defaults] into [./test/vis/vis-files/public/images/defaults](./test/vis/vis-files/public/images/defaults). +5. Download contents of https://github.com/TheWorldAvatar/viz/tree/main/code/public/images/defaults into [./test/vis/vis-files/public/images/defaults](./test/vis/vis-files/public/images/defaults). 6. Run `docker compose up -d` in [./test/vis/](./test/vis/). 7. Visualisation will be accessible at port 3000, e.g. http://[IP_ADDRESS]:3000 + ## Oura Ring data Create a namespace called `ouraring` in Blazegraph (default namespace defined in [./test/stack-manager/inputs/config/services/ouraring-agent.json](./test/stack-manager/inputs/config/services/ouraring-agent.json)). Refer to documentation at [../../../Agents/OuraRingAgent](../../../Agents/OuraRingAgent/readme.md) for instructions to instantiate Oura Ring data -[keycloak-test.json]: ./stack-manager/test/inputs/config/services/keycloak-test.json -[keycloak-prod.json]: ./stack-manager/test/inputs/config/services/keycloak-prod.json \ No newline at end of file +[keycloak-test.json]: ./test/stack-manager/inputs/config/services/keycloak-test.json +[keycloak-prod.json]: ./prod/stack-manager/inputs/config/services/keycloak-prod.json diff --git a/Deploy/stacks/web/website/site/user/pages/02.explore/01.marie/marie.md b/Deploy/stacks/web/website/site/user/pages/02.explore/01.marie/marie.md index 4703f1ad3d1..976301f1974 100644 --- a/Deploy/stacks/web/website/site/user/pages/02.explore/01.marie/marie.md +++ b/Deploy/stacks/web/website/site/user/pages/02.explore/01.marie/marie.md @@ -134,4 +134,4 @@ slug: marie
-[plugin:content-inject](/modular/partners) +[plugin:content-inject](../../modular/partners) diff --git a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/01.power-system/uk_power_system.md b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/01.power-system/uk_power_system.md index 7b178b4ee36..04f33e36736 100644 --- a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/01.power-system/uk_power_system.md +++ b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/01.power-system/uk_power_system.md @@ -42,4 +42,4 @@ slug: power-system
-[plugin:content-inject](/modular/partners) \ No newline at end of file +[plugin:content-inject](../../../modular/partners) \ No newline at end of file diff --git a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/02.gas-grid/uk_gas_grid.md b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/02.gas-grid/uk_gas_grid.md index 870e05ebe92..88469b26e19 100644 --- a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/02.gas-grid/uk_gas_grid.md +++ b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/02.gas-grid/uk_gas_grid.md @@ -36,4 +36,4 @@ slug: gas-grid
-[plugin:content-inject](/modular/partners) \ No newline at end of file +[plugin:content-inject](../../../modular/partners) \ No newline at end of file diff --git a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/03.land-use/land_use.md b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/03.land-use/land_use.md index 4023de8e069..46f7717ff16 100644 --- a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/03.land-use/land_use.md +++ b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/03.land-use/land_use.md @@ -38,4 +38,4 @@ slug: land-use
-[plugin:content-inject](/modular/partners) \ No newline at end of file +[plugin:content-inject](../../../modular/partners) \ No newline at end of file diff --git a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/04.flood-risk/flood_risk.md b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/04.flood-risk/flood_risk.md index 1225e40a27b..249bf1d5cbe 100644 --- a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/04.flood-risk/flood_risk.md +++ b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/04.flood-risk/flood_risk.md @@ -35,4 +35,4 @@ slug: flood-risk
-[plugin:content-inject](/modular/partners) \ No newline at end of file +[plugin:content-inject](../../../modular/partners) \ No newline at end of file diff --git a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/default.md b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/default.md index b73f95f966f..61bfe8d2c0f 100644 --- a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/default.md +++ b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/default.md @@ -77,4 +77,4 @@ slug: digital-twin

-[plugin:content-inject](/modular/partners) \ No newline at end of file +[plugin:content-inject](../../modular/partners) \ No newline at end of file diff --git a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/template.md b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/template.md index a8b89ee695c..c26ee16a704 100644 --- a/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/template.md +++ b/Deploy/stacks/web/website/site/user/pages/02.explore/02.digital-twin/template.md @@ -34,4 +34,4 @@ slug: slug
-[plugin:content-inject](/modular/partners) +[plugin:content-inject](../../modular/partners) diff --git a/Deploy/stacks/web/website/site/user/plugins/error/README.md b/Deploy/stacks/web/website/site/user/plugins/error/README.md index ef24726bdd1..9604805fbe7 100644 --- a/Deploy/stacks/web/website/site/user/plugins/error/README.md +++ b/Deploy/stacks/web/website/site/user/plugins/error/README.md @@ -12,7 +12,7 @@ Installing the Error plugin can be done in one of two ways. Our GPM (Grav Packag ## GPM Installation (Preferred) -The simplest way to install this plugin is via the [Grav Package Manager (GPM)](http://learn.getgrav.org/advanced/grav-gpm) through your system's Terminal (also called the command line). From the root of your Grav install type: +The simplest way to install this plugin is via the [Grav Package Manager (GPM)](https://learn.getgrav.org/16/cli-console/grav-cli-gpm) through your system's Terminal (also called the command line). From the root of your Grav install type: bin/gpm install error @@ -75,7 +75,7 @@ As development for the Error plugin continues, new versions may become available ## GPM Update (Preferred) -The simplest way to update this plugin is via the [Grav Package Manager (GPM)](http://learn.getgrav.org/advanced/grav-gpm). You can do this with this by navigating to the root directory of your Grav install using your system's Terminal (also called command line) and typing the following: +The simplest way to update this plugin is via the [Grav Package Manager (GPM)](https://learn.getgrav.org/16/cli-console/grav-cli-gpm). You can do this with this by navigating to the root directory of your Grav install using your system's Terminal (also called command line) and typing the following: bin/gpm update error diff --git a/Deploy/stacks/web/website/site/user/plugins/form/CHANGELOG.md b/Deploy/stacks/web/website/site/user/plugins/form/CHANGELOG.md index 16878729a02..d001fcce591 100644 --- a/Deploy/stacks/web/website/site/user/plugins/form/CHANGELOG.md +++ b/Deploy/stacks/web/website/site/user/plugins/form/CHANGELOG.md @@ -1,3 +1,4 @@ + # v4.3.1 ## 01/31/2021 diff --git a/Deploy/stacks/web/website/site/user/plugins/form/README.md b/Deploy/stacks/web/website/site/user/plugins/form/README.md index d171e9e0a1c..2c22edc2651 100644 --- a/Deploy/stacks/web/website/site/user/plugins/form/README.md +++ b/Deploy/stacks/web/website/site/user/plugins/form/README.md @@ -21,7 +21,7 @@ enabled: true # How to use the Form Plugin The Learn site has two pages describing how to use the Form Plugin: -- [Forms](http://learn.getgrav.org/advanced/forms) +- [Forms](https://learn.getgrav.org/17/forms) - [Add a contact form](http://learn.getgrav.org/forms/forms/example-form) # Using email diff --git a/Deploy/stacks/web/website/site/user/plugins/form/vendor/google/recaptcha/README.md b/Deploy/stacks/web/website/site/user/plugins/form/vendor/google/recaptcha/README.md index 92e8deae7bd..7119f9eab73 100644 --- a/Deploy/stacks/web/website/site/user/plugins/form/vendor/google/recaptcha/README.md +++ b/Deploy/stacks/web/website/site/user/plugins/form/vendor/google/recaptcha/README.md @@ -1,5 +1,5 @@ # reCAPTCHA PHP client library - + [![Build Status](https://travis-ci.org/google/recaptcha.svg)](https://travis-ci.org/google/recaptcha) [![Coverage Status](https://coveralls.io/repos/github/google/recaptcha/badge.svg)](https://coveralls.io/github/google/recaptcha) [![Latest Stable Version](https://poser.pugx.org/google/recaptcha/v/stable.svg)](https://packagist.org/packages/google/recaptcha) @@ -55,8 +55,7 @@ own autoloader or require the needed files directly in your code. ## Usage First obtain the appropriate keys for the type of reCAPTCHA you wish to -integrate for v2 at https://www.google.com/recaptcha/admin or v3 at -https://g.co/recaptcha/v3. +integrate for v2 or v3 at https://www.google.com/recaptcha/admin. Then follow the [integration guide on the developer site](https://developers.google.com/recaptcha/intro) to add the reCAPTCHA diff --git a/Deploy/stacks/web/website/site/user/plugins/page-inject/README.md b/Deploy/stacks/web/website/site/user/plugins/page-inject/README.md index abe980499a6..229816db634 100644 --- a/Deploy/stacks/web/website/site/user/plugins/page-inject/README.md +++ b/Deploy/stacks/web/website/site/user/plugins/page-inject/README.md @@ -8,7 +8,7 @@ Installing the Page Inject plugin can be done in one of two ways. Our GPM (Grav ## GPM Installation (Preferred) -The simplest way to install this plugin is via the [Grav Package Manager (GPM)](http://learn.getgrav.org/advanced/grav-gpm) through your system's Terminal (also called the command line). From the root of your Grav install type: +The simplest way to install this plugin is via the [Grav Package Manager (GPM)](https://learn.getgrav.org/16/cli-console/grav-cli-gpm) through your system's Terminal (also called the command line). From the root of your Grav install type: bin/gpm install page-inject diff --git a/Deploy/stacks/web/website/site/user/plugins/problems/README.md b/Deploy/stacks/web/website/site/user/plugins/problems/README.md index ad7fd6f8eea..7033e181ebe 100644 --- a/Deploy/stacks/web/website/site/user/plugins/problems/README.md +++ b/Deploy/stacks/web/website/site/user/plugins/problems/README.md @@ -14,7 +14,7 @@ Installing the Problems plugin can be done in one of two ways. Our GPM (Grav Pac ## GPM Installation (Preferred) -The simplest way to install this plugin is via the [Grav Package Manager (GPM)](http://learn.getgrav.org/advanced/grav-gpm) through your system's Terminal (also called the command line). From the root of your Grav install type: +The simplest way to install this plugin is via the [Grav Package Manager (GPM)](https://learn.getgrav.org/17/cli-console/grav-cli-gpm) through your system's Terminal (also called the command line). From the root of your Grav install type: bin/gpm install problems diff --git a/Deploy/stacks/web/website/site/user/themes/quark/README.md b/Deploy/stacks/web/website/site/user/themes/quark/README.md index a65d82171bc..66705cb858f 100644 --- a/Deploy/stacks/web/website/site/user/themes/quark/README.md +++ b/Deploy/stacks/web/website/site/user/themes/quark/README.md @@ -33,7 +33,7 @@ The theme by itself is useful, but you may have an easier time getting up and ru ## GPM Installation (Preferred) -The simplest way to install this theme is via the [Grav Package Manager (GPM)](http://learn.getgrav.org/advanced/grav-gpm) through your system's Terminal (also called the command line). From the root of your Grav install type: +The simplest way to install this theme is via the [Grav Package Manager (GPM)](https://learn.getgrav.org/16/cli-console/grav-cli-gpm) through your system's Terminal (also called the command line). From the root of your Grav install type: bin/gpm install quark diff --git a/Deploy/stacks/web/website/site/vendor/composer/semver/README.md b/Deploy/stacks/web/website/site/vendor/composer/semver/README.md index 409b9dcbaeb..3721a769388 100644 --- a/Deploy/stacks/web/website/site/vendor/composer/semver/README.md +++ b/Deploy/stacks/web/website/site/vendor/composer/semver/README.md @@ -5,7 +5,7 @@ Semver library that offers utilities, version constraint parsing and validation. Originally written as part of [composer/composer](https://github.com/composer/composer), now extracted and made available as a stand-alone library. - + [![Build Status](https://travis-ci.org/composer/semver.svg?branch=master)](https://travis-ci.org/composer/semver) diff --git a/Deploy/stacks/web/website/site/vendor/doctrine/cache/README.md b/Deploy/stacks/web/website/site/vendor/doctrine/cache/README.md index c795a058428..5b12f551539 100644 --- a/Deploy/stacks/web/website/site/vendor/doctrine/cache/README.md +++ b/Deploy/stacks/web/website/site/vendor/doctrine/cache/README.md @@ -1,5 +1,5 @@ # Doctrine Cache - + [![Build Status](https://img.shields.io/travis/doctrine/cache/master.svg?style=flat-square)](http://travis-ci.org/doctrine/cache) [![Code Coverage](https://codecov.io/gh/doctrine/dbal/branch/cache/graph/badge.svg)](https://codecov.io/gh/doctrine/dbal/branch/master) diff --git a/Deploy/stacks/web/website/site/vendor/erusev/parsedown-extra/README.md b/Deploy/stacks/web/website/site/vendor/erusev/parsedown-extra/README.md index cee4b5430ab..1ccdf39744d 100644 --- a/Deploy/stacks/web/website/site/vendor/erusev/parsedown-extra/README.md +++ b/Deploy/stacks/web/website/site/vendor/erusev/parsedown-extra/README.md @@ -1,7 +1,7 @@ -> You might also like [Caret](http://caret.io?ref=parsedown) - our Markdown editor for the Desktop. +> You might also like [Caret](http://caret.io?ref=parsedown) - our Markdown editor for the Desktop. ## Parsedown Extra - + [![Build Status](https://img.shields.io/travis/erusev/parsedown-extra/master.svg?style=flat-square)](https://travis-ci.org/erusev/parsedown-extra) An extension of [Parsedown](http://parsedown.org) that adds support for [Markdown Extra](https://michelf.ca/projects/php-markdown/extra/). @@ -24,7 +24,7 @@ echo $Extra->text('# Header {.sth}'); # prints:

Header

**Who uses Parsedown Extra?** -[October CMS](http://octobercms.com/), [Bolt CMS](http://bolt.cm/), [Kirby CMS](http://getkirby.com/), [Grav CMS](http://getgrav.org/), [Statamic CMS](http://www.statamic.com/) and [more](https://www.versioneye.com/php/erusev:parsedown-extra/references). +[October CMS](http://octobercms.com/), [Bolt CMS](http://bolt.cm/), [Kirby CMS](http://getkirby.com/), [Grav CMS](http://getgrav.org/), [Statamic CMS](http://www.statamic.com/) and more. **How can I help?** diff --git a/Deploy/stacks/web/website/site/vendor/erusev/parsedown/README.md b/Deploy/stacks/web/website/site/vendor/erusev/parsedown/README.md index b5d9ed2ee33..8b9aec41d66 100644 --- a/Deploy/stacks/web/website/site/vendor/erusev/parsedown/README.md +++ b/Deploy/stacks/web/website/site/vendor/erusev/parsedown/README.md @@ -1,8 +1,7 @@ -> I also make [Caret](https://caret.io?ref=parsedown) - a Markdown editor for Mac and PC. +> I also make [Caret](https://caret.io?ref=parsedown) - a Markdown editor for Mac and PC. ## Parsedown -[![Build Status](https://img.shields.io/travis/erusev/parsedown/master.svg?style=flat-square)](https://travis-ci.org/erusev/parsedown) Better Markdown Parser in PHP diff --git a/Deploy/stacks/web/website/site/vendor/gregwar/image/Gregwar/Image/README.md b/Deploy/stacks/web/website/site/vendor/gregwar/image/Gregwar/Image/README.md index 7d2e47788c9..e5b3e011620 100644 --- a/Deploy/stacks/web/website/site/vendor/gregwar/image/Gregwar/Image/README.md +++ b/Deploy/stacks/web/website/site/vendor/gregwar/image/Gregwar/Image/README.md @@ -32,7 +32,7 @@ Image::open('in.png') ->negate() ->save('out.jpg'); ``` - + Here are the resize methods: * `resize($width, $height, $background)`: resizes the image, will preserve scale and never @@ -127,6 +127,7 @@ You can also create image from scratch using: ``` Where 200 is the width and 100 the height + ## Saving the image diff --git a/Deploy/stacks/web/website/site/vendor/guzzlehttp/psr7/README.md b/Deploy/stacks/web/website/site/vendor/guzzlehttp/psr7/README.md index acfabfdcbe3..6348481ceb8 100644 --- a/Deploy/stacks/web/website/site/vendor/guzzlehttp/psr7/README.md +++ b/Deploy/stacks/web/website/site/vendor/guzzlehttp/psr7/README.md @@ -4,7 +4,7 @@ This repository contains a full [PSR-7](http://www.php-fig.org/psr/psr-7/) message implementation, several stream decorators, and some helpful functionality like query string parsing. - + [![Build Status](https://travis-ci.org/guzzle/psr7.svg?branch=master)](https://travis-ci.org/guzzle/psr7) diff --git a/Deploy/stacks/web/website/site/vendor/kodus/psr7-server/README.md b/Deploy/stacks/web/website/site/vendor/kodus/psr7-server/README.md index b3f5cb3aaef..e8bafaaa799 100644 --- a/Deploy/stacks/web/website/site/vendor/kodus/psr7-server/README.md +++ b/Deploy/stacks/web/website/site/vendor/kodus/psr7-server/README.md @@ -1,7 +1,6 @@ # Helper class to create PSR-7 server request [![Latest Version](https://img.shields.io/github/release/Nyholm/psr7-server.svg?style=flat-square)](https://github.com/Nyholm/psr7-server/releases) -[![Build Status](https://img.shields.io/travis/Nyholm/psr7-server/master.svg?style=flat-square)](https://travis-ci.org/Nyholm/psr7-server) [![Code Coverage](https://img.shields.io/scrutinizer/coverage/g/Nyholm/psr7-server.svg?style=flat-square)](https://scrutinizer-ci.com/g/Nyholm/psr7-server) [![Quality Score](https://img.shields.io/scrutinizer/g/Nyholm/psr7-server.svg?style=flat-square)](https://scrutinizer-ci.com/g/Nyholm/psr7-server) [![Total Downloads](https://poser.pugx.org/nyholm/psr7-server/downloads)](https://packagist.org/packages/nyholm/psr7-server) diff --git a/Deploy/stacks/web/website/site/vendor/league/climate/README.md b/Deploy/stacks/web/website/site/vendor/league/climate/README.md index 3481c883799..b4633ee61f7 100644 --- a/Deploy/stacks/web/website/site/vendor/league/climate/README.md +++ b/Deploy/stacks/web/website/site/vendor/league/climate/README.md @@ -2,7 +2,6 @@ [![Latest Version](https://img.shields.io/github/tag/thephpleague/climate.svg?style=flat&label=release)](https://github.com/thephpleague/climate/tags) [![Software License](https://img.shields.io/badge/license-MIT-brightgreen.svg?style=flat)](LICENSE.md) -[![Build Status](https://github.com/thephpleague/climate/workflows/.github/workflows/buildcheck.yml/badge.svg?branch=master)](https://github.com/thephpleague/climate/actions?query=branch%3Amaster+workflow%3Abuildcheck) [![Coverage Status](https://img.shields.io/scrutinizer/coverage/g/thephpleague/climate.svg?style=flat)](https://scrutinizer-ci.com/g/thephpleague/climate/code-structure) [![Quality Score](https://img.shields.io/scrutinizer/g/thephpleague/climate.svg?style=flat)](https://scrutinizer-ci.com/g/thephpleague/climate) [![Total Downloads](https://img.shields.io/packagist/dt/league/climate.svg?style=flat)](https://packagist.org/packages/league/climate) diff --git a/Deploy/stacks/web/website/site/vendor/miljar/php-exif/README.md b/Deploy/stacks/web/website/site/vendor/miljar/php-exif/README.md index aef3eb6db56..1626d98971a 100644 --- a/Deploy/stacks/web/website/site/vendor/miljar/php-exif/README.md +++ b/Deploy/stacks/web/website/site/vendor/miljar/php-exif/README.md @@ -1,4 +1,4 @@ -# [PHPExif v0.6.4](http://github.com/PHPExif/php-exif) [![Build Status](https://travis-ci.org/PHPExif/php-exif.png?branch=master)](https://travis-ci.org/PHPExif/php-exif) [![Coverage Status](https://coveralls.io/repos/PHPExif/php-exif/badge.svg?branch=master)](https://coveralls.io/r/PHPExif/php-exif?branch=master) [![Code Climate](https://codeclimate.com/github/PHPExif/php-exif/badges/gpa.svg)](https://codeclimate.com/github/PHPExif/php-exif) +# [PHPExif v0.6.4](http://github.com/PHPExif/php-exif) [![Build Status](https://travis-ci.org/PHPExif/php-exif.png?branch=master)](https://travis-ci.org/PHPExif/php-exif) [![Coverage Status](https://coveralls.io/repos/PHPExif/php-exif/badge.svg?branch=master)](https://coveralls.io/r/PHPExif/php-exif?branch=master) PHPExif is a library which gives you easy access to the EXIF meta-data of an image. @@ -15,13 +15,6 @@ PHPExif serves as a wrapper around some native or CLI tools which access this EX composer require miljar/php-exif ``` - -## Usage - -[Before v0.3.0](Resources/doc/usage_0.2.1.md) - -[v0.3.0+](Resources/doc/usage.md) - ## Contributing Please submit all pull requests against the correct branch. The release branch for the next version is a branch with the same name as the next version. Bugfixes should go in the master branch, unless they are for code in a new release branch. diff --git a/Deploy/stacks/web/website/site/vendor/monolog/monolog/README.md b/Deploy/stacks/web/website/site/vendor/monolog/monolog/README.md index a578eb22892..04534a427e4 100644 --- a/Deploy/stacks/web/website/site/vendor/monolog/monolog/README.md +++ b/Deploy/stacks/web/website/site/vendor/monolog/monolog/README.md @@ -1,4 +1,4 @@ -# Monolog - Logging for PHP [![Build Status](https://img.shields.io/travis/Seldaek/monolog.svg)](https://travis-ci.org/Seldaek/monolog) +# Monolog - Logging for PHP [![Build Status](https://img.shields.io/travis/Seldaek/monolog.svg)](https://travis-ci.org/Seldaek/monolog) [![Total Downloads](https://img.shields.io/packagist/dt/monolog/monolog.svg)](https://packagist.org/packages/monolog/monolog) [![Latest Stable Version](https://img.shields.io/packagist/v/monolog/monolog.svg)](https://packagist.org/packages/monolog/monolog) @@ -40,13 +40,6 @@ $log->addWarning('Foo'); $log->addError('Bar'); ``` -## Documentation - -- [Usage Instructions](doc/01-usage.md) -- [Handlers, Formatters and Processors](doc/02-handlers-formatters-processors.md) -- [Utility classes](doc/03-utilities.md) -- [Extending Monolog](doc/04-extending.md) - ## Third Party Packages Third party handlers, formatters and processors are @@ -77,7 +70,7 @@ Bugs and feature request are tracked on [GitHub](https://github.com/Seldaek/mono - [XOOPS 2.6](http://xoops.org/) comes out of the box with Monolog. - [Aura.Web_Project](https://github.com/auraphp/Aura.Web_Project) comes out of the box with Monolog. - [Nette Framework](http://nette.org/en/) can be used with Monolog via [Kdyby/Monolog](https://github.com/Kdyby/Monolog) extension. -- [Proton Micro Framework](https://github.com/alexbilbie/Proton) comes out of the box with Monolog. +- [Proton Micro Framework](https://github.com/alexbilbie/Proton) comes out of the box with Monolog. ### Author diff --git a/Deploy/stacks/web/website/site/vendor/nyholm/psr7/README.md b/Deploy/stacks/web/website/site/vendor/nyholm/psr7/README.md index 4c53bdf5d87..1d9df943c2b 100644 --- a/Deploy/stacks/web/website/site/vendor/nyholm/psr7/README.md +++ b/Deploy/stacks/web/website/site/vendor/nyholm/psr7/README.md @@ -1,7 +1,6 @@ # PSR-7 implementation [![Latest Version](https://img.shields.io/github/release/Nyholm/psr7.svg?style=flat-square)](https://github.com/Nyholm/psr7/releases) -[![Build Status](https://img.shields.io/travis/Nyholm/psr7/master.svg?style=flat-square)](https://travis-ci.org/Nyholm/psr7) [![Code Coverage](https://img.shields.io/scrutinizer/coverage/g/Nyholm/psr7.svg?style=flat-square)](https://scrutinizer-ci.com/g/Nyholm/psr7) [![Quality Score](https://img.shields.io/scrutinizer/g/Nyholm/psr7.svg?style=flat-square)](https://scrutinizer-ci.com/g/Nyholm/psr7) [![Total Downloads](https://poser.pugx.org/nyholm/psr7/downloads)](https://packagist.org/packages/nyholm/psr7) @@ -101,7 +100,7 @@ $response = $psr17Factory->createResponse(200)->withBody($responseBody); ## Our goal -This package is currently maintained by [Tobias Nyholm](http://nyholm.se) and +This package is currently maintained by [Tobias Nyholm](http://tnyholm.se) and [Martijn van der Ven](https://vanderven.se/martijn/). They have decided that the goal of this library should be to provide a super strict implementation of [PSR-7](https://www.php-fig.org/psr/psr-7/) that is blazing fast. diff --git a/Deploy/stacks/web/website/site/vendor/php-http/message-factory/README.md b/Deploy/stacks/web/website/site/vendor/php-http/message-factory/README.md index 4654495a72f..471bbda2170 100644 --- a/Deploy/stacks/web/website/site/vendor/php-http/message-factory/README.md +++ b/Deploy/stacks/web/website/site/vendor/php-http/message-factory/README.md @@ -18,12 +18,7 @@ $ composer require php-http/message-factory ## Documentation -Please see the [official documentation](http://php-http.readthedocs.org/en/latest/message-factory/). - - -## Contributing - -Please see [CONTRIBUTING](CONTRIBUTING.md) and [CONDUCT](CONDUCT.md) for details. +Please see the [official documentation](https://docs.php-http.org/en/latest/message/message-factory.html). ## Security diff --git a/Deploy/stacks/web/website/site/vendor/rockettheme/toolbox/README.md b/Deploy/stacks/web/website/site/vendor/rockettheme/toolbox/README.md index 324d8a48169..4afe9f8f4c6 100644 --- a/Deploy/stacks/web/website/site/vendor/rockettheme/toolbox/README.md +++ b/Deploy/stacks/web/website/site/vendor/rockettheme/toolbox/README.md @@ -3,9 +3,9 @@ [![PHPStan](https://img.shields.io/badge/PHPStan-enabled-brightgreen.svg?style=flat)](https://github.com/phpstan/phpstan) [![Latest Version](http://img.shields.io/packagist/v/rockettheme/toolbox.svg?style=flat)](https://packagist.org/packages/rockettheme/toolbox) [![Software License](https://img.shields.io/badge/license-MIT-brightgreen.svg?style=flat)](LICENSE) -[![Build Status](https://img.shields.io/travis/rockettheme/toolbox/master.svg?style=flat)](https://travis-ci.org/rockettheme/toolbox) -[![Coverage Status](https://img.shields.io/scrutinizer/coverage/g/rockettheme/toolbox.svg?style=flat)](https://scrutinizer-ci.com/g/rockettheme/toolbox/code-structure) -[![Quality Score](https://img.shields.io/scrutinizer/g/rockettheme/toolbox.svg?style=flat)](https://scrutinizer-ci.com/g/rockettheme/toolbox) +[![Build Status](https://img.shields.io/travis/rockettheme/toolbox/master.svg?style=flat)](https://travis-ci.org/rockettheme/toolbox) +[![Coverage Status](https://img.shields.io/scrutinizer/coverage/g/rockettheme/toolbox.svg?style=flat)](https://scrutinizer-ci.com/g/rockettheme/toolbox/code-structure) +[![Quality Score](https://img.shields.io/scrutinizer/g/rockettheme/toolbox.svg?style=flat)](https://scrutinizer-ci.com/g/rockettheme/toolbox) [![Total Downloads](https://img.shields.io/packagist/dt/rockettheme/toolbox.svg?style=flat)](https://packagist.org/packages/rockettheme/toolbox) RocketTheme\Toolbox package contains a set of reusable PHP interfaces, classes and traits. diff --git a/Deploy/stacks/web/website/site/vendor/willdurand/negotiation/README.md b/Deploy/stacks/web/website/site/vendor/willdurand/negotiation/README.md index 1d85c123485..0c0cc59fa0c 100644 --- a/Deploy/stacks/web/website/site/vendor/willdurand/negotiation/README.md +++ b/Deploy/stacks/web/website/site/vendor/willdurand/negotiation/README.md @@ -2,7 +2,7 @@ Negotiation =========== [![Build -Status](https://travis-ci.org/willdurand/Negotiation.svg?branch=master)](http://travis-ci.org/willdurand/Negotiation) +Status](https://travis-ci.org/willdurand/Negotiation.svg?branch=master)](http://travis-ci.org/willdurand/Negotiation) [![Build status](https://ci.appveyor.com/api/projects/status/6tbe8j3gofdlfm4v?svg=true)](https://ci.appveyor.com/project/willdurand/negotiation) [![Total @@ -170,7 +170,7 @@ Run it using PHPUnit: Contributing ------------ -See [CONTRIBUTING](CONTRIBUTING.md) file. +See [CONTRIBUTING](CONTRIBUTING.md) file. Credits diff --git a/EntityRDFizer/README.md b/EntityRDFizer/README.md index 5081ebd9f4f..8ae3abbf2b9 100644 --- a/EntityRDFizer/README.md +++ b/EntityRDFizer/README.md @@ -3,7 +3,7 @@ The `entityrdfizer` project is designed to convert entities of any domain and their data and metadata into RDF. It requires the entities and their data to be provided as inputs in an ABox CSV template, that is filled in with data. A group of ABox CSV template files are provided under the following URL: -https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/KBTemplates/ABox +https://github.com/TheWorldAvatar/ontology/blob/main/KBTemplates/ABox # Installation # These instructions will get you a copy of the project up and running on your local machine for development and testing purposes. diff --git a/JPS_ARBITRAGE/README.md b/JPS_ARBITRAGE/README.md index 98ee042f80d..773a2a56526 100644 --- a/JPS_ARBITRAGE/README.md +++ b/JPS_ARBITRAGE/README.md @@ -33,18 +33,18 @@ How do we know if it's not working? If the python logger logging to tomcat serve - http://www.usinflationcalculator.com/ - https://www.icis.com/resources/news/2013/11/08/9723077/se-asia-to-china-palm-oil-freight-rates-may-fall-on-weak-demand/ - https://www.icis.com/resources/news/2013/11/08/9723077/se-asia-to-china-palm-oil-freight-rates-may-fall-on-weak-demand/ - - https://www.ema.gov.sg/Non_Residential_Programmes_Electricity_Tariffs.aspx + - Non_Residential_Programmes_Electricity_Tariffs.aspx - missing - 2. exchange_rates.pyw + 2. exchange_rates.pyw - http://apilayer.net/api/live?access_key=402d77f0850c35adfa5a797e325262dd¤cies=CNY,SGD&source=USD&format=1 3. FAME_download.pyw - - http://www.cmegroup.com/trading/energy/refined-products/fame-0-argus-biodiesel-fob-rdam-red-compliant-swap-futures.html + - https://www.cmegroup.com/markets/energy/biofuels/fame-0-argus-biodiesel-fob-rdam-red-compliant-swap-futures.html 4. HNG_download.pyw - - http://www.cmegroup.com/trading/energy/natural-gas/natural-gas_quotes_globex.html + - https://www.cmegroup.com/markets/energy/natural-gas/natural-gas.quotes.html 5. NG_to_MeOH_MoDS.py - https://business.directenergy.com/understanding-energy/energy-tools/conversion-factors. - 6. ZCE_download.pyw + 6. ZCE_download.pyw - http://english.czce.com.cn/enportal/DFSStaticFiles/Future/EnglishFutureQuotesMA.htm ### TODO: diff --git a/JPS_BLAZEGRAPH/Readme.md b/JPS_BLAZEGRAPH/Readme.md index 261dafa5cdb..6d748d7381b 100644 --- a/JPS_BLAZEGRAPH/Readme.md +++ b/JPS_BLAZEGRAPH/Readme.md @@ -1,6 +1,6 @@ # Blazegraph Project ### Authors -* [Feroz Farazi](msff2@cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) ### Ontology Upload diff --git a/JPS_ESS/README.MD b/JPS_ESS/README.MD index 0b6708073b8..cbd2ef4af99 100644 --- a/JPS_ESS/README.MD +++ b/JPS_ESS/README.MD @@ -57,7 +57,7 @@ - Second, check if the test `testCreateScenarioAndCallESSCoordinate` is working. This calls C as a whole, thus errors can come from there. - Third, if both of the above conditions run without errors, then it's most likely due to the visualization being broken. It could be that you are reading from the wrong location (at which case, look at \web\CO2Web\public\javascripts\pwBaseFile.js, Line 2. Enable it to Line 3, and it should be reading from Claudius rather than local deployment. - Possible errors on the backend could include: - 1. Check if the domain name is available on Claudius. If it fails at BatteryCreator or OptimizationAgent, than it's most likely that. So access [J-ParkSimulator](www.jparksimulator.com). + 1. Check if the domain name is available on Claudius. If it fails at BatteryCreator or OptimizationAgent, than it's most likely that. So access [J-ParkSimulator](http://www.jparksimulator.com). 2. JPS-POWSYS components. This program depends on POWSYS war file being available, and utilizes ENAgent and retrofitAgents from the retrofit package. 3. GAMS not being installed and hidden under GAMS DIR. If you don't want to change your directory name, then Line 105 of EnergyStorageSystem should be changed to where your GAMS is installed. 4. Python not being installed diff --git a/JPS_VIRTUALSENSOR/README.md b/JPS_VIRTUALSENSOR/README.md index 34fef2ef196..2237130c44c 100644 --- a/JPS_VIRTUALSENSOR/README.md +++ b/JPS_VIRTUALSENSOR/README.md @@ -5,7 +5,7 @@ - mapbox_api_key - mapbox_username 2) Set openweather API key in stack-manager/inputs/config/services/weather-agent.json, the API key needs to have OneCall enabled (credit card required, you can set the call limit below the limit before it starts charging). -3) If running AERMOD for static point sources, it is necessary to instantiate the input data required for AERMOD Agent according to OntoDispersion (https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodispersion). See the JurongIslandInputAgent folder for an example of an agent that does this. +3) If running AERMOD for static point sources, it is necessary to instantiate the input data required for AERMOD Agent according to OntoDispersion (https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontodispersion). See the JurongIslandInputAgent folder for an example of an agent that does this. 4) Elevation data (optional): AERMOD agent will try to query elevation data from a table named `elevation` in the default database. AERMOD agent can query the data stored in any SRID, but the table needs to contain data in one SRID only, hence it's recommended to convert any elevation data to a uniform SRID, e.g. 4326. An example is provided in [elevation.json]. Note that this config file is written for data in SRID=32632 and it needs to be changed according to your source data. The raw data files should be stored in `./stack-data-uploader/inputs/data/elevation`, any format supported by gdal should work, see https://gdal.org/drivers/raster/index.html for more info. 5) Buildings data (optional for ships, compulsory for static point source use cases): @@ -51,6 +51,7 @@ disp:tempMeasure rdf:type om:Measure ; om:hasUnit om:kelvin . ``` + An instance can emit multiple pollutants, the class of pollutant ID needs to be one of the following: - - @@ -59,6 +60,7 @@ An instance can emit multiple pollutants, the class of pollutant ID needs to be - - - + ## Important for visualisation if not deployed locally Modify instances of `http://localhost:4242` in [data.json] to the external URL of where the stack is going to be deployed. diff --git a/QuestionAnswering/JPS_Chatbot/README.md b/QuestionAnswering/JPS_Chatbot/README.md index 622b4b7fae3..7989d95ca77 100644 --- a/QuestionAnswering/JPS_Chatbot/README.md +++ b/QuestionAnswering/JPS_Chatbot/README.md @@ -5,7 +5,7 @@ Before making any changes to it, please consult the application's developer (Xia ### In a Docker stack -The JPS Chatbot (and it's associated LDF server) have been added to the 'agent' Docker stack (see the [deployment readme](../Deploy/README.md) for more info). +The JPS Chatbot (and it's associated LDF server) have been added to the 'agent' Docker stack (see the [deployment readme](../../Deploy/README.md) for more info). ### In isolation (for development and local testing) The instructions below are intended for isolated development and testing only. diff --git a/QuestionAnswering/JPS_LDF/dependencies/README.md b/QuestionAnswering/JPS_LDF/dependencies/README.md index de0e2652d9a..cc7ea5a35a8 100644 --- a/QuestionAnswering/JPS_LDF/dependencies/README.md +++ b/QuestionAnswering/JPS_LDF/dependencies/README.md @@ -4,7 +4,7 @@ Each of the three dependencies in this directory are managed using separate Mave The descriptor.xml specifies the content and format (zip) of each dependency, while the pom.xml contains the artifact's metadata and describes how to package and deploy it. To upload a new version of one of the artifacts: -1. Ensure you have [Maven](https://maven.apache.org) installed and configured with appropriate settings to allow upload to the World Avatar Maven repo. See [this readme](../../Deploy/examples/maven_dependency/deploy/README.md) for instructions. +1. Ensure you have [Maven](https://maven.apache.org) installed and configured with appropriate settings to allow upload to the World Avatar Maven repo. See [this readme](../../../Deploy/examples/maven_dependency/deploy/README.md) for instructions. 2. Place the new files/directories in the dependency sub-directory. The required content is: ``` ./custom_node_modules diff --git a/QuestionAnswering/MARIE_AND_BERT/Training/EntityLinking/readme.md b/QuestionAnswering/MARIE_AND_BERT/Training/EntityLinking/readme.md index cc325dfa96a..cfbe513e228 100644 --- a/QuestionAnswering/MARIE_AND_BERT/Training/EntityLinking/readme.md +++ b/QuestionAnswering/MARIE_AND_BERT/Training/EntityLinking/readme.md @@ -35,7 +35,7 @@ Shown below is an example of the expected folder structure after set-up: ## Train SMILES NER ###Training on Windows -Follow the [setup guide](setup) to configure the environment. Move the `/data` folder created in [Data Preparation](#data-preparation) under `MARIE_AND_BERT/Training/EntityLinking`. +Follow the [setup guide](#setup) to configure the environment. Move the `/data` folder created in [Data Preparation](#data-preparation) under `MARIE_AND_BERT/Training/EntityLinking`. Use `MARIE_AND_BERT/Training/EntityLinking` as root folder to run the following command to train the SMILES NER model: ``` @@ -125,7 +125,7 @@ workdir="/home/[your_CRSid]/[your_training_folder]/NEL_Training_Marie_and_Bert" ``` ###Training on Windows -Follow the [setup guide](setup) to configure the environment. Move the `/data` folder created in [Data Preparation](data-preparation) under `MARIE_AND_BERT/Training/EntityLinking`. Also use `MARIE_AND_BERT/Training/EntityLinking` as the root folder to run the following commands: +Follow the [setup guide](#setup) to configure the environment. Move the `/data` folder created in [Data Preparation](#data-preparation) under `MARIE_AND_BERT/Training/EntityLinking`. Also use `MARIE_AND_BERT/Training/EntityLinking` as the root folder to run the following commands: First step: diff --git a/QuestionAnswering/MARIE_AND_BERT/Training/readme.md b/QuestionAnswering/MARIE_AND_BERT/Training/readme.md index 6bf5ee9896b..5a95a570bd0 100644 --- a/QuestionAnswering/MARIE_AND_BERT/Training/readme.md +++ b/QuestionAnswering/MARIE_AND_BERT/Training/readme.md @@ -122,7 +122,7 @@ under the ontology folders or sub-ontology folders. ### File requirement 1. All the files required for embedding and the trained embedding files. -2. `score_model_training.tsv`, see [readme.md for dataset creation](./KGToolbox/readme.md) to create the file +2. `score_model_training.tsv`, see [readme.md for dataset creation](../KGToolbox/readme.md) to create the file The files need to be placed in `MARIE_AND_BERT/DATA/CrossGraph/[ontology_name]/[sub-ontology_name]` if there is a sub ontology folder, otherwise, the files need to be placed in `CrossGraph/[ontology_name]` diff --git a/QuestionAnswering/MARIE_AND_BERT/readme.md b/QuestionAnswering/MARIE_AND_BERT/readme.md index eb14970ba9d..d71a0803e62 100644 --- a/QuestionAnswering/MARIE_AND_BERT/readme.md +++ b/QuestionAnswering/MARIE_AND_BERT/readme.md @@ -1,8 +1,6 @@ # Marie and BERT (Marie 3.0) -The ``Marie and Bert`` a.k.a `Marie 3.0` project is developed by [Xiaochi Zhou](xz378@cam.ac.uk) and [Shaocong Zhang](sz375@cam.ac.uk) and [Mehal Agarwal](ma988@cam.ac.uk). - -A demonstration webpage is deployed at [Marie Website](http://159.223.42.53:5003/) +The ``Marie and Bert`` a.k.a `Marie 3.0` project is developed by [Xiaochi Zhou](mailto:xz378@cam.ac.uk) and [Shaocong Zhang](mailto:sz375@cam.ac.uk) and [Mehal Agarwal](mailto:ma988@cam.ac.uk). ## Architecture @@ -97,7 +95,7 @@ The user will need to change `/tmp/directory/for/models` to their folder of choi To run the full functions of the Marie system, three other systems are required: 1. The LDF server. See [LDF server readme](../JPS_LDF/README.md) to run it. -2. The semantic agents. See [PCE Agent readme](../Agents/PCEAgent/README.md) and [STDC Agent readme](../Agents/STDCThermoAgent/README.md) to create docker containers running them. +2. The semantic agents. See [PCE Agent readme](../../Agents/PCEAgent/README.md) and [STDC Agent readme](../../Agents/STDCThermoAgent/README.md) to create docker containers running them. ## Docker Deployment @@ -122,6 +120,7 @@ The deployment requires at least 16 GB of memory allocated to docker. The buildi To deploy the local LDF server (For reaction queries) and the Agents system (For agent queries) 1. Created a folder `/home/user1/Marie/TheWorldAvatar/MARIE_AND_BERT/DATA/KG` . Create `ontospecies.nt` and `ontocompchem.nt` using + ``` python KGToolbox/SPARQLEndpoint/export_triples.py --endpoint http://www.theworldavatar.com/blazegraph/namespace/copy_ontospecies_marie @@ -133,6 +132,7 @@ python KGToolbox/SPARQLEndpoint/export_triples.py --endpoint http://www.theworldavatar.com/blazegraph/namespace/ontocompchem --output_filename ontocompchem.nt ``` + The script needs to be run under `/home/user1/Marie/TheWorldAvatar/MARIE_AND_BERT` and the files will be created under `MARIE_AND_BERT/DATA/KG`. diff --git a/QuestionAnswering/MARIE_SEQ2SEQ/training/README.md b/QuestionAnswering/MARIE_SEQ2SEQ/training/README.md index 09d417f247d..5efc00b2a1f 100644 --- a/QuestionAnswering/MARIE_SEQ2SEQ/training/README.md +++ b/QuestionAnswering/MARIE_SEQ2SEQ/training/README.md @@ -2,7 +2,7 @@ Prerequisites -- Linux OS (recommended) . It is not advisable to run this project on Windows as [`bitsandbytes` is not supported on Windows]((https://github.com/TimDettmers/bitsandbytes/issues/30)). +- Linux OS (recommended) . It is not advisable to run this project on Windows as [`bitsandbytes` is not supported on Windows](https://github.com/TimDettmers/bitsandbytes/issues/30). - [conda](https://conda.io/projects/conda/en/latest/index.html) (recommended). - `python==3.10`. - CUDA @@ -69,7 +69,7 @@ Additional dependencies: - ONNX Runtime for GPU: `pip install optimum==1.12.0 && pip install optimum[onnxruntime-gpu]` - TensorRT: - [CUDA toolkit](https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html) - - [cuDNN](https://docs.nvidia.com/deeplearning/cudnn/install-guide/index.html) + - [cuDNN](https://docs.nvidia.com/deeplearning/cudnn/archives/cudnn-860/install-guide/index.html) - [TensorRT](https://docs.nvidia.com/deeplearning/tensorrt/install-guide/index.html) (installation from tar ball recommended) - To enable memory profiling with the command line argument `--do_profile`, run `pip install memory-profiler==0.61.0`. diff --git a/QuestionAnswering/QA_ICL/data_generation/README.md b/QuestionAnswering/QA_ICL/data_generation/README.md index 530ae1749b7..73191d025f1 100644 --- a/QuestionAnswering/QA_ICL/data_generation/README.md +++ b/QuestionAnswering/QA_ICL/data_generation/README.md @@ -406,13 +406,12 @@ Most scripts in this directory are in Python. The command line arguments support - Generate lexicons specific to an entity type: - Entities of `purl:Element` type: [lexicon/Element_lexicon.py](lexicon/Element_lexicon.py). - - Entities of `disp:Ship` type: [lexicon/Ship_lexicon.py](lexicon/Ship_lexicon.py). ### KG Schema Extraction -- Extract edge type info from an ABox exposed via a SPARQL endpoint: [simplified_schema/extract_edgetypes_from_tbox.py](simplified_schema/extract_edgetypes_from_abox.py). -- Extract relation type info from OWL files: [simplified_schema/extract_relations_from_tbox.py](simplified_schema/extract_schema_from_tbox.py). -- Extract relation type info from an ABox exposed via a SPARQL endpoint: [simplified_schema/extract_relations_from_abox.py](simplified_schema/extract_schema_from_abox.py) +- Extract edge type info from an ABox exposed via a SPARQL endpoint: [simplified_schema/extract_edgetypes_from_tbox.py](simplified_schema/extract_edgetypes_from_tbox.py). +- Extract relation type info from OWL files: [simplified_schema/extract_relations_from_tbox.py](simplified_schema/extract_relations_from_tbox.py). +- Extract relation type info from an ABox exposed via a SPARQL endpoint: [simplified_schema/extract_relations_from_abox.py](simplified_schema/extract_relations_from_abox.py) ### CSV-to-JSON Conversion of Data Request Generation Examples diff --git a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/history.md b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/history.md index 1f99983200a..0f92f6eff1c 100644 --- a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/history.md +++ b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/history.md @@ -22,7 +22,7 @@ [5] L. Pascazio, D. Tran, S. D. Rihm, Jiaru Bai, J. Akroyd, S. Mosbach, and M. Kraft, "Question-answering system for combustion kinetics", Technical Report 315, c4e-Preprint Series, Cambridge, 2023 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/c4e-preprint-315.pdf)). -[6] D. Tran, S. D. Rihm, A. Kondniski, L. Pascazio, F. Saluz, S. Mosbach, J. Akroyd, and M. Kraft, "Natural Language Access Point to Digital Metal-Organic Polyhedra Chemistry in The World Avatar", Technical Report 327, c4e-Preprint Series, Cambridge, 2024 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/c4e_327_nmdt2_MarieICL__preprint_.pdf)). +[6] D. Tran, S. D. Rihm, A. Kondniski, L. Pascazio, F. Saluz, S. Mosbach, J. Akroyd, and M. Kraft, "Natural Language Access Point to Digital Metal-Organic Polyhedra Chemistry in The World Avatar", Technical Report 327, c4e-Preprint Series, Cambridge, 2024 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/c4e_327_nmdt2_MarieICL__preprint__dih1fxm.pdf)). ## Previous versions diff --git a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontocompchem.md b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontocompchem.md index f1552e96366..69b3854f12a 100644 --- a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontocompchem.md +++ b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontocompchem.md @@ -8,7 +8,7 @@ OntoCompChem is an ontology designed to represent the input and output processes ### Download -- [OntoCompChem.owl](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontocompchem/OntoCompChem.owl) +- [OntoCompChem.owl](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontocompchem/ontocompchem.owl) ### Access diff --git a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontokin.md b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontokin.md index 76469718bf4..fabc5efee5c 100644 --- a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontokin.md +++ b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontokin.md @@ -8,7 +8,7 @@ OntoKin is an ontology designed to represent reaction mechanisms. It details set ### Download -- [OntoKin.owl](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontokin/OntoKin.owl) +- [OntoKin.owl](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontokin/OntoKin.owl) ### Access @@ -26,7 +26,7 @@ OntoKin is an ontology designed to represent reaction mechanisms. It details set ### Preprints -[1] F. Farazi, J. Akroyd, S. Mosbach, P. Buerger, D. Nurkowski, and M. Kraft, "OntoKin: An ontology for chemical kinetic reaction mechanisms", Technical Report 218, c4e-Preprint Series, Cambridge, 2019 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/c4e-preprint-218.pdf)) +[1] F. Farazi, J. Akroyd, S. Mosbach, P. Buerger, D. Nurkowski, and M. Kraft, "OntoKin: An ontology for chemical kinetic reaction mechanisms", Technical Report 218, c4e-Preprint Series, Cambridge, 2019 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/c4e-Preprint-218.pdf)) [2] F. Farazi, N. Krdzavac, J. Akroyd, S. Mosbach, A. Menon, D. Nurkowski, and M. Kraft, "Linking reaction mechanisms and quantum chemistry: An ontological approach", Technical Report 236, c4e-Preprint Series, Cambridge, 2019 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/c4e-preprint-236.pdf)) diff --git a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontomops.md b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontomops.md index c2ba9897ae4..69b253e96c7 100644 --- a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontomops.md +++ b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontomops.md @@ -8,7 +8,7 @@ The OntoMOPs ontology is designed to provide and enrich semantic relationships b ### Download -- [OntoMOPs.owl](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontomops/OntoMOPs.owl) +- [ontomops-ogm.ttl](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontomops/ontomops-ogm.ttl) ### Publications @@ -18,4 +18,4 @@ The OntoMOPs ontology is designed to provide and enrich semantic relationships b [1] A. Kondinski, A. Menon, D. Nurkowski, F. Farazi, S. Mosbach, J. Akroyd, and M. Kraft, "Automated Rational Design of Metal-Organic Polyhedra", Technical Report 292, c4e-Preprint Series, Cambridge, 2022 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/OntoMOPs_preprint_4AP6w6C.pdf)). -[2] D. Tran, S. D. Rihm, A. Kondniski, L. Pascazio, F. Saluz, S. Mosbach, J. Akroyd, and M. Kraft, "Natural Language Access Point to Digital Metal-Organic Polyhedra Chemistry in The World Avatar", Technical Report 327, c4e-Preprint Series, Cambridge, 2024 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/c4e_327_nmdt2_MarieICL__preprint_.pdf)). \ No newline at end of file +[2] D. Tran, S. D. Rihm, A. Kondniski, L. Pascazio, F. Saluz, S. Mosbach, J. Akroyd, and M. Kraft, "Natural Language Access Point to Digital Metal-Organic Polyhedra Chemistry in The World Avatar", Technical Report 327, c4e-Preprint Series, Cambridge, 2024 ([PDF](https://como.ceb.cam.ac.uk/media/preprints/c4e_327_nmdt2_MarieICL__preprint__dih1fxm.pdf)). \ No newline at end of file diff --git a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontospecies.md b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontospecies.md index 9bc517966c0..cc7576e5182 100644 --- a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontospecies.md +++ b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontospecies.md @@ -8,7 +8,7 @@ OntoSpecies serves as core ontology within TWA chemistry domain. It is an ontolo ### Download -- [OntoSpecies_v2.owl](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontospecies/OntoSpecies_v2.owl) +- [OntoSpecies_v2.owl](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontospecies/OntoSpecies_v2.owl) ### Access diff --git a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontozeolite.md b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontozeolite.md index 228ea8977b0..003e0806d55 100644 --- a/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontozeolite.md +++ b/QuestionAnswering/QA_ICL/frontend/next_app_marie/resources/tbox-info/ontozeolite.md @@ -12,8 +12,8 @@ The OntoCrystal ontology provides a semantic representation of crystallographic ### Download -- [OntoZeolite.owl](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontozeolite/ontozeolite.owl) -- [OntoCrystal.owl](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/JPS_Ontology/ontology/ontozeolite/ontocrystal.owl) +- [OntoZeolite.owl](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontozeolite/ontozeolite.owl) +- [OntoCrystal.owl](https://github.com/TheWorldAvatar/ontology/blob/main/ontology/ontozeolite/ontocrystal.owl) ### Access diff --git a/README.md b/README.md index 2a7ea8dbba8..cbbfe18c779 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ The World Avatar represents information in a dynamic knowledge graph using techn Listed below are a number of the key technical features available within The World Avatar ecosystem. More information on these, and other features, can be seen on [The World Avatar Wiki](https://github.com/cambridge-cares/TheWorldAvatar/wiki). **TWA Stack:**
-The knowledge graph and its agents are hosted using collections of containers. How to use them is explained in the [stack manager](./Deploy/stacks/dynamic/stack-manager) and [stack data uploader](./Deploy/stacks/dynamic/stack-data-uploader) folders. +The knowledge graph and its agents are hosted using collections of containers. How to use them is explained in the [stack manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) and [stack data uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader) folders. **TWA Base Library:**
The [base lib](./JPS_BASE_LIB) is a collection of functionality that is shared among many parts of the code. Core functions include the ability to generate and upload TBoxes, query KGs and RDBs, implement RESTful APIs, and triple cloning. diff --git a/obsolete/BIMCesiumVisualisation/ifcto3Dtilesnext/README.md b/obsolete/BIMCesiumVisualisation/ifcto3Dtilesnext/README.md index a91491e8bfa..3fe0fbf7919 100644 --- a/obsolete/BIMCesiumVisualisation/ifcto3Dtilesnext/README.md +++ b/obsolete/BIMCesiumVisualisation/ifcto3Dtilesnext/README.md @@ -1,3 +1,4 @@ + # IFC2Tileset Agent ## Description @@ -128,4 +129,5 @@ As Git does not allow empty directories, `.gitignore` files have been added to t - Fridge - Ensure that the assets are classified as Furniture or Generic Models for the converter to recognise them - `Furniture` are exported as IfcFurnishingElement while `Generic Models` are exported as IfcBuildingElementProxy - - For new asset types, please include their name (or part of) into line 60 of `agent/ifc2gltf.py` \ No newline at end of file + - For new asset types, please include their name (or part of) into line 60 of `agent/ifc2gltf.py` + \ No newline at end of file diff --git a/obsolete/JPS_DES/README.md b/obsolete/JPS_DES/README.md index e041684115e..ca822490c11 100644 --- a/obsolete/JPS_DES/README.md +++ b/obsolete/JPS_DES/README.md @@ -1,3 +1,4 @@ + # README for Distributed Energy System ## Python Dependencies @@ -134,4 +135,4 @@ Then, it calls upon the BlockchainWrapper agent that communicates with the block ## TODO: - [ ] TesseractOCR isn't the best for weather forecast, and its error rate has been increasing. Find another way of getting real weather data, but the current method works for now. (i.e. incomplete data comprehension due to using OCR as a scraping method) - [ ] virtual environment for python - + diff --git a/obsolete/JPS_Version_0/BMS/BMSMap/LICENSE.md b/obsolete/JPS_Version_0/BMS/BMSMap/LICENSE.md index 722f2f10529..3e5f5b619c7 100644 --- a/obsolete/JPS_Version_0/BMS/BMSMap/LICENSE.md +++ b/obsolete/JPS_Version_0/BMS/BMSMap/LICENSE.md @@ -8,7 +8,7 @@ https://github.com/kekscom/Color.js Copyright (c) 2017, Jan Marsch Triangulate.js -https://github.com/OSMBuildings/Triangulation +https://github.com/OSMBuildings/Triangulate Copyright (c) 2016, Jan Marsch, OSM Buildings diff --git a/obsolete/JPS_Version_0/BMS/BMSMap/README.md b/obsolete/JPS_Version_0/BMS/BMSMap/README.md index fea47e880b6..2354e128910 100644 --- a/obsolete/JPS_Version_0/BMS/BMSMap/README.md +++ b/obsolete/JPS_Version_0/BMS/BMSMap/README.md @@ -13,7 +13,7 @@ OSM Buildings is a JavaScript library for visualizing OpenStreetMap building geo The library version in this repository is a WebGL only variant of OSM Buildings. At some point it will fully integrate the Classic 2.5D version. -For the latest information about the project [follow us on Twitter](https://twitter.com/osmbuildings), read [our blog](http://blog.osmbuildings.org), or just mail us at mail@osmbuildings.org. +For the latest information about the project [follow us on Twitter](https://twitter.com/osmbuildings), read [our blog](https://medium.com/@osmbuildings), or just mail us at mail@osmbuildings.org. ### Not sure which version to use? diff --git a/ontology-tools/CMCLOntoChemExp/README.md b/ontology-tools/CMCLOntoChemExp/README.md index 790d04466f0..1cc866a3ee7 100644 --- a/ontology-tools/CMCLOntoChemExp/README.md +++ b/ontology-tools/CMCLOntoChemExp/README.md @@ -45,7 +45,7 @@ Under construction... - For the purpose of linking to OntoSpecies - ontospecies.uniquespeciesiri.kb.server.url - the server address where triple-store for OntoSpecies ABox files located - ontospecies.uniquespeciesiri.kb.repository.id - the namespace of the triple-store that contains OntoSpecies ABox files - - ontospecies.uniquespeciesiri.kb.abox.iri - the base URL of the OntoSpecies ABox files, should be http://www.theworldavatar.com/kb/ontospecies/ by default + - ontospecies.uniquespeciesiri.kb.abox.iri - the base URL of the OntoSpecies ABox files, should be http://www.theworldavatar.com/kb/ontospecies/ by default - For provenance information of the experiment data - Under construction... will be updated in v1.3... - For controlling if generated ABox files are to be uploaded to triple-store automatically @@ -61,7 +61,7 @@ Under construction... ## Useful links -Automated Calibration of a Poly(oxymethylene) Dimethyl Ether Oxidation Mechanism Using the Knowledge Graph Technology [[paper](https://doi.org/10.1021/acs.jcim.0c01322)] [[preprint](https://como.ceb.cam.ac.uk/preprints/262/)] +Automated Calibration of a Poly(oxymethylene) Dimethyl Ether Oxidation Mechanism Using the Knowledge Graph Technology [[paper](https://pubs.acs.org/doi/10.1021/acs.jcim.0c01322)] [[preprint](https://como.ceb.cam.ac.uk/preprints/262/)] @@ -76,7 +76,7 @@ Automated Calibration of a Poly(oxymethylene) Dimethyl Ether Oxidation Mechanism - Made class `BibliographyLink` `EQUIVALENT-TO` class `OntoKin:Reference` - Added class `Velocity`, `SootYield`, `MassBurningRate`, `Mass`, `SpecificSurfaceArea`, `Material`, `Fraction`, `Voltage`, `Temperature`, `Length`, `Pressure`, `Density`, `Volume`, `FlowRate`, `Time`, `VolumetricFlowRate`, `ResidenceTime`, `LaminarBurningVelocity`, `Distance`, `InitialComposition`, `IgnitionDelay`, `Composition`, `Concentration`, `EquivalenceRatio`, `TemperatureInReferenceState`, `PressureInReferenceState`, `VolumetricFlowRateInReferenceState`, `ReactorLength`, `Diameter`, `JunctionArchitecture`, `DonorConstructionType`, `Acceptor`, `Donor`, `HomoEnergy`, `LumoEnergy`, `HomoLumoEnergyGap`, `OpticalEnergyGap`, `OpenCircuitPotential`, `ShortCircuitCurrentDensity`, `PowerConversionEfficiency`, `FillFactor` as subclass of `DimensionalQuantity` - Added class `http://xmlns.com/foaf/0.1/Agent`, `http://xmlns.com/foaf/0.1/Person`, `http://xmlns.com/foaf/0.1/Organization`, `http://purl.org/ontology/bibo/Journal` - - Inherited class `OntoKin:PublicationSpecification`, `OntoKin:JournalSpecification`, `OntoKin:ProceedingsSpecification`, `OntoKin:PreprintSpecification` from [`OntoKin`](http://theworldavatar.com/ontology/ontokin/OntoKin.owl) ontology + - Inherited class `OntoKin:PublicationSpecification`, `OntoKin:JournalSpecification`, `OntoKin:ProceedingsSpecification`, `OntoKin:PreprintSpecification` from [`OntoKin`](https://raw.githubusercontent.com/TheWorldAvatar/ontology/refs/heads/main/ontology/ontokin/OntoKin.owl) ontology - Relationships @@ -89,7 +89,7 @@ Automated Calibration of a Poly(oxymethylene) Dimethyl Ether Oxidation Mechanism - `` to make the direct connection between measured data point `X` with the physical `DimensionalQuantity` it represents - ``, `` - Redundant `hasDataPointX` and `hasUncertainty` related to `X1`-`X11` - - Inherited publication-related object property from [`OntoKin`](http://theworldavatar.com/ontology/ontokin/OntoKin.owl) ontology + - Inherited publication-related object property from [`OntoKin`](https://raw.githubusercontent.com/TheWorldAvatar/ontology/refs/heads/main/ontology/ontokin/OntoKin.owl) ontology - `` - `` - `` @@ -108,7 +108,7 @@ Automated Calibration of a Poly(oxymethylene) Dimethyl Ether Oxidation Mechanism - `` - `` - Redundant `hasValue` related to `X1`-`X11` - - Inherited publication-related data property from [`OntoKin`](http://theworldavatar.com/ontology/ontokin/OntoKin.owl) ontology + - Inherited publication-related data property from [`OntoKin`](https://raw.githubusercontent.com/TheWorldAvatar/ontology/refs/heads/main/ontology/ontokin/OntoKin.owl) ontology - `` - `` - `` diff --git a/thermo/README.md b/thermo/README.md index 857c26eb6c4..916d5e21d7e 100644 --- a/thermo/README.md +++ b/thermo/README.md @@ -32,19 +32,18 @@ Repository Name: thermochemistry * OpenBabel: This is a headache. The project JOpenBabel (v2.3.1 or v2.4.1) does not seem to exist in any maven repository any more. - The maven repository at the Chemistry Department (https://maven.ch.cam.ac.uk/m2repo) also does not exist any more. + The maven repository at the Chemistry Department also does not exist any more. Therefore, both have been commented out in all pom.xml files. For the CoMoThermodynamics project, this seems to be without consequence, as there does not appear to be a genuine dependency. The CoMoOpenBabel project still builds, but in order to run the tests, one needs the babel command-line executable as well as the DLL/shared object library (but no jar apparently). NB The name of the DLL/shared object library is hard-coded in CoMoOpenBabel/src/main/.../openbabel/util/OpenBabelUtil.java, currently as openbabel_java (works for Windows only). The CoMoTools project is the only project with a genuine OpenBabel dependency, through the source file CoMoTools/src/main/.../tools/structure/util/OpenBabelCompoundConverter.java, but it does not appear to be used by anything in the repository so it has been excluded from the build (by renaming the file). References: - http://openbabel.org/wiki/Main_Page + https://openbabel.org/docs/index.html https://sourceforge.net/projects/openbabel/ (NB This does install a .jar file.) sudo yum install {openbabel,openbabel-devel} (NB This installs babel command-line executable and shared library libopenbabel.so, but no jars.) - Note perhaps also: http://dev.cyfronet.pl/mvnrepo/openbabel/openbabel/ - http://openbabel.org/docs/current/UseTheLibrary/Java.html#macosx-and-linux - https://openbabel.org/docs/dev/Installation/install.html + Note perhaps also: https://openbabel.org/docs/UseTheLibrary/Java.html#macosx-and-linux + https://openbabel.org/docs/Installation/install.html * Jmol/JSmol: https://sourceforge.net/projects/jmol/files/ (https://sourceforge.net/projects/jsmol/ is deprecated) diff --git a/thermo/obda-thermochemistry/README.md b/thermo/obda-thermochemistry/README.md index fccc5a8019f..5c9c773340d 100644 --- a/thermo/obda-thermochemistry/README.md +++ b/thermo/obda-thermochemistry/README.md @@ -1,7 +1,7 @@ # Developing an Ontology Based Data Access (OBDA) Project ### Authors -* [Nenad Krdzavac](caresssd@hermes.cam.ac.uk) -* [Feroz Farazi](msff2@cam.ac.uk) +* [Nenad Krdzavac](mailto:caresssd@hermes.cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) OBDA is a means to access and query data stored in databases using SPARQL. This short document aims to describe the steps required to develop an OBDA project using Java and PostgreSQL relational database management system. @@ -84,7 +84,7 @@ For adding these data to the *tb_books* table, you can use the following INSERT You can either copy the *exampleBooks.owl* ontology from [here](https://www.dropbox.com/home/IRP3%20CAPRICORN%20shared%20folder/_JPS%20Development/data) to the *resources* (src/main/resources) folder in the Maven project you created or develop the same ontology by following the steps below and put this under the same folder. If you already have copied the ontology into the resourced folder, you can go to the next section. -* Create an ontology with the OntologyIRI *http://theworldavatar.com/ontology/obda/exampleBooks.owl*. Include the classes from the following hierarchy in this ontology. Classes which have the same indentation are siblings, and classes which have different indentations are connected with subclass of relations. Classes indented to the right are subclasses of the class which is indented to the left and above. For example, Author and Book are siblings, AudioBook and E-Book are siblings, AudioBook is a subclass of Book and E-Book is a subclass of Book. +* Create an ontology with the OntologyIRI *https://raw.githubusercontent.com/cambridge-cares/TheWorldAvatar/main/thermo/obda-thermochemistry/resources/books/exampleBooks.owl*. Include the classes from the following hierarchy in this ontology. Classes which have the same indentation are siblings, and classes which have different indentations are connected with subclass of relations. Classes indented to the right are subclasses of the class which is indented to the left and above. For example, Author and Book are siblings, AudioBook and E-Book are siblings, AudioBook is a subclass of Book and E-Book is a subclass of Book. Author EmergingWriter @@ -125,7 +125,7 @@ It is important to remember that the expressivity of the ontology used in OBDA s The following SPARQL query extracts the code and title of books. Save this query in a file called *book_code_title.rq* and put this file under the resources folder of the Maven project. - PREFIX books: + PREFIX books: SELECT DISTINCT ?book ?title WHERE { ?book a books:Book . @@ -138,7 +138,7 @@ The following SPARQL query extracts the code and title of books. Save this query Create a mapping file called *books_all.obda* in the resources folder of the Maven project and put the following three blocks of code into this file by maintaining the order of their appearance. This file establishes mapping(s) between a SPARQL query and the database via the ontology. [PrefixDeclaration] - : http://theworldavatar.com/ontology/obda/exampleBooks.owl# + : https://raw.githubusercontent.com/cambridge-cares/TheWorldAvatar/main/thermo/obda-thermochemistry/resources/books/exampleBooks.owl# owl: http://www.w3.org/2002/07/owl# rdf: http://www.w3.org/1999/02/22-rdf-syntax-ns# rdfs: http://www.w3.org/2000/01/rdf-schema# @@ -166,7 +166,7 @@ Do not forget to provide your user name and password for PostgreSQL in the Sourc The mapping file contains three blocks. -* It can be understood from the name *PrefixDeclaration* that the *first block* includes prefix declarations including *owl*, *rdf*, and *:*, which represents the prefix of the default namespace (the Ontology IRI followed by #) and in this particular example project it is *http://theworldavatar.com/ontology/obda/exampleBooks.owl#*. +* It can be understood from the name *PrefixDeclaration* that the *first block* includes prefix declarations including *owl*, *rdf*, and *:*, which represents the prefix of the default namespace (the Ontology IRI followed by #) and in this particular example project it is *https://raw.githubusercontent.com/cambridge-cares/TheWorldAvatar/main/thermo/obda-thermochemistry/resources/books/exampleBooks.owl#*. * The *second block* is called *SourceDeclaration*, which includes information about *sourceUri* that is the name of database. The feature *connectionUrl* represents the Java Database Connectivity (*JDBC*) for the target database. *username* and *password* are credentials for accessing the database. The *driverClass* is the driver class for the database. diff --git a/thermo/spin-thermochemistry/README.md b/thermo/spin-thermochemistry/README.md index d23e30a7e48..9e41b42348a 100644 --- a/thermo/spin-thermochemistry/README.md +++ b/thermo/spin-thermochemistry/README.md @@ -1,7 +1,7 @@ # Developing a SPARQL Inferencing Notation (SPIN) Project ### Authors -* [Nenad Krdzavac](caresssd@hermes.cam.ac.uk) -* [Feroz Farazi](msff2@cam.ac.uk) +* [Nenad Krdzavac](mailto:caresssd@hermes.cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) SPIN is an industry-standard to represent SPARQL rules and constraints on Semantic Web models [[1](https://spinrdf.org)]. This short document aims to describe the steps required to develop a SPIN project using Java. diff --git a/web/augmented-uk/README.md b/web/augmented-uk/README.md index 0629c5b7848..4f5c3d3e3aa 100644 --- a/web/augmented-uk/README.md +++ b/web/augmented-uk/README.md @@ -1,6 +1,6 @@ # Visualisation of Augmented UK -The augmented UK visualisation has been put together as a single stack with no requirements on any external services. Both the data required for the visualisation, and the visualisation itself are hosted within the stack instance. For more information on the stack, read the [documentation here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +The augmented UK visualisation has been put together as a single stack with no requirements on any external services. Both the data required for the visualisation, and the visualisation itself are hosted within the stack instance. For more information on the stack, read the [documentation here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). This directory contains the documentation, configuration files, and associated scripts for a visualisation window into the base world of The World Avatar (focusing on assets within the United Kingdom). Whilst other data and capabilities related to the project may exist elsewhere in The World Avatar, this documentation only covers the steps needed to acquire, upload, and visualise data used in the deployed visualisation currently available from [The World Avatar's website](https://theworldavatar.io). @@ -51,7 +51,7 @@ To support metadata for the visualisation, the related stack needs to also launc ### 2.3 Deploying the stack -Before following the deployment steps, please read the following section to get a better understanding of the workflow. Once the correct files for each data source have been acquired, we need to first spin up an instance of the stack (see [here](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) for details on how to do this). Once ready, we have to upload data using the [data uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader) into a relational database. Please read the [uploader's documentation](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader) before commencing upload as this file will not detail the generic upload process. +Before following the deployment steps, please read the following section to get a better understanding of the workflow. Once the correct files for each data source have been acquired, we need to first spin up an instance of the stack (see [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) for details on how to do this). Once ready, we have to upload data using the [data uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader) into a relational database. Please read the [uploader's documentation](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader) before commencing upload as this file will not detail the generic upload process. With each data set come a number of pre-written associated files (configurations, queries, styles etc.). These files are documented along with their corresponding data source on the [Data](./docs/data.md) page. diff --git a/web/augmented-uk/docs/data.md b/web/augmented-uk/docs/data.md index 2bb233c6862..5bcbf4d5b39 100644 --- a/web/augmented-uk/docs/data.md +++ b/web/augmented-uk/docs/data.md @@ -38,14 +38,14 @@ Data directories: Once a year, the UK government publishes a Digest of UK Energy Statistics (DUKES); note this was formally published by the Department for Business, Energy and Industrial Strategy (BEIS) before it was dissolved, subsequent publications should be from the new Department for Energy Security and Net Zero (DESNZ). -Read the associated [DUKES Data](./docs/data-dukes.md) page for details on how the DUKES data was acquired and processed. +Read the associated [DUKES Data](./data-dukes.md) page for details on how the DUKES data was acquired and processed. #### Associated files -- [Uploader config](./augmented-uk/config/uploader/config/dukes_2023.json) -- [Ontop mapping](./augmented-uk/config/uploader/data/dukes_2023/dukes_2023.obda) +- [Uploader config](../inputs/uploader/config/dukes2023.json) +- [Ontop mapping](../inputs/uploader/data/dukes_2023/dukes_2023.obda) - Note that at the time of writing, this mapping utilises TBoxes that do not appear within the OntoEIP ontology. Nothing in the mapping contradicts the ontology, but the existing ontology does not contain enough concepts to cover all of the concepts provided by DUKES. -- [OntoEIP ontology](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoeip) +- [OntoEIP ontology](https://github.com/TheWorldAvatar/ontology/tree/main/ontology/ontoeip) - Note that when uploading the ontology files, you may need to rename any Turtle files with the `.ttl` extension. The stack data uploader assumes that `.owl` files are using the XML format, if an `.owl` file is using Turtle then this will cause errors during upload. Open the OntoEIP ontology link and find the resource_network folder. Put 'resource_network.ttl' into 'inputs/data/ontoeip' folder. Contact CMCL if you need the newest dukes data. @@ -65,10 +65,10 @@ No pre-processing is needed on this data set, we're using it as is. #### Associated files -- [Uploader config](./augmented-uk/config/uploader/config/population.json) -- [Geospatial SQL Query](./augmented-uk/config/uploader/config/sql/dukes_2023_pop.sql) +- [Uploader config](../inputs/uploader/config/population.json) +- [Geospatial SQL Query](../inputs/uploader/config/sql/dukes_2023_pop.sql) - An SQL query to determine the number of people within a 1KM radius of each power plant. -- [Raster style](./augmented-uk/config/uploader/config/sld/uk-population-style.sld) +- [Raster style](../inputs/uploader/config/sld/uk-population-style.sld) - SLD file to style the population raster data in GeoServer. ### Digest of UK Energy Statistics (DUKES) @@ -87,5 +87,5 @@ Shapefiles are obtained from [national forestry inventory 2020](https://data-for ### Streetlamps, traffic signals and England highways -Links to sources (mostly local council data portals) are in each relevant [data folder](inputs/config/uploader/data). +Links to sources (mostly local council data portals) are in each relevant [data folder](../inputs/uploader/data). diff --git a/web/augmented-uk/inputs/uploader/data/street_light/king's_lynn/README.md b/web/augmented-uk/inputs/uploader/data/street_light/king's_lynn/README.md index cc922797bd9..01081e2447e 100644 --- a/web/augmented-uk/inputs/uploader/data/street_light/king's_lynn/README.md +++ b/web/augmented-uk/inputs/uploader/data/street_light/king's_lynn/README.md @@ -1,3 +1,4 @@ + Add Norfolk street light data here from https://www.whatdotheyknow.com/request/street_light_locations_and_refer. Convert the Excel Spreadsheet to CSV form. Merge the two CSVs into one. diff --git a/web/augmented-uk/inputs/uploader/data/ukpn/vector/132kv-overhead-lines/README.md b/web/augmented-uk/inputs/uploader/data/ukpn/vector/132kv-overhead-lines/README.md index da2c8a11918..eae78c31937 100644 --- a/web/augmented-uk/inputs/uploader/data/ukpn/vector/132kv-overhead-lines/README.md +++ b/web/augmented-uk/inputs/uploader/data/ukpn/vector/132kv-overhead-lines/README.md @@ -1 +1 @@ -Add 132kV overhead line ShapeFile or geojsonfiles here from https://ukpowernetworks.opendatasoft.com/explore/dataset/132kv-overhead-lines. \ No newline at end of file +Add 132kV overhead line ShapeFile or geojsonfiles here from https://ukpowernetworks.opendatasoft.com/explore/dataset/ukpn-132kv-overhead-lines. \ No newline at end of file diff --git a/web/augmented-uk/inputs/uploader/data/ukpn/vector/132kv-poles-towers/README.md b/web/augmented-uk/inputs/uploader/data/ukpn/vector/132kv-poles-towers/README.md index a81a1edfdb9..e515712173d 100644 --- a/web/augmented-uk/inputs/uploader/data/ukpn/vector/132kv-poles-towers/README.md +++ b/web/augmented-uk/inputs/uploader/data/ukpn/vector/132kv-poles-towers/README.md @@ -1 +1 @@ -Add 132kV poles and towers ShapeFile or geojson files here from https://ukpowernetworks.opendatasoft.com/explore/dataset/132kv-poles-towers. \ No newline at end of file +Add 132kV poles and towers ShapeFile or geojson files here from https://ukpowernetworks.opendatasoft.com/explore/dataset/ukpn-132kv-poles-towers/. \ No newline at end of file diff --git a/web/augmented-uk/inputs/uploader/data/ukpn/vector/33kv-overhead-lines/README.md b/web/augmented-uk/inputs/uploader/data/ukpn/vector/33kv-overhead-lines/README.md index ce5663ac712..93284ff7e7b 100644 --- a/web/augmented-uk/inputs/uploader/data/ukpn/vector/33kv-overhead-lines/README.md +++ b/web/augmented-uk/inputs/uploader/data/ukpn/vector/33kv-overhead-lines/README.md @@ -1 +1 @@ -Add 33kV overhead line ShapeFile or geojson files here from https://ukpowernetworks.opendatasoft.com/explore/dataset/33kv-overhead-lines. \ No newline at end of file +Add 33kV overhead line ShapeFile or geojson files here from https://ukpowernetworks.opendatasoft.com/explore/dataset/ukpn-33kv-overhead-lines. \ No newline at end of file diff --git a/web/augmented-uk/inputs/uploader/data/ukpn/vector/ukpn-66kv-overhead-lines-shapefile/README.md b/web/augmented-uk/inputs/uploader/data/ukpn/vector/ukpn-66kv-overhead-lines-shapefile/README.md index 5df36b07839..6af685d8668 100644 --- a/web/augmented-uk/inputs/uploader/data/ukpn/vector/ukpn-66kv-overhead-lines-shapefile/README.md +++ b/web/augmented-uk/inputs/uploader/data/ukpn/vector/ukpn-66kv-overhead-lines-shapefile/README.md @@ -1 +1 @@ -Add 33kV poles and towers ShapeFile or geojson files here from https://ukpowernetworks.opendatasoft.com/explore/dataset/ukpn-66kv-overhead-lines-shapefile. \ No newline at end of file +Add 33kV poles and towers ShapeFile or geojson files here from https://ukpowernetworks.opendatasoft.com/explore/dataset/ukpn-66kv-overhead-lines-shapefile/. \ No newline at end of file diff --git a/web/docs/README.md b/web/docs/README.md index b354fcc4efa..1f74cde4337 100644 --- a/web/docs/README.md +++ b/web/docs/README.md @@ -8,6 +8,7 @@ Note that these documentation pages are a constant work in progress, and will be The TWA project has a publicly facing website located at [https://theworldavatar.io](https://theworldavatar.io). This is site was created by, and it hosted at, CMCL in Cambridge. It runs from Docker containers using standard [Wordpress](https://wordpress.com/) installations, behind a single reverse proxy created using [NGINX](https://www.nginx.com/). Two versions of the site are hosted at once; production (available by the aforementioned URL), and development (accessed using the `dev.` subdomain). + For more details on how the TWA website was created and hosted, see the [repository here](https://github.com/cmcl-io/theworldavatar.io). Note that this is a private repository created by CMCL, you'll need their permission to access it. ### Making changes diff --git a/web/kingslynn-flood-routing/stack-manager-inputs/README.md b/web/kingslynn-flood-routing/stack-manager-inputs/README.md index d79198f36ee..6c0df5a9538 100644 --- a/web/kingslynn-flood-routing/stack-manager-inputs/README.md +++ b/web/kingslynn-flood-routing/stack-manager-inputs/README.md @@ -1,3 +1,3 @@ # Stack Manager Input -Please populate the `secrets` folder as per instructions [here](https://github.com/cambridge-cares/TheWorldAvatar/blob/main/Deploy/stacks/dynamic/stack-manager/README.md). \ No newline at end of file +Please populate the `secrets` folder as per instructions [here](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager/README.md). \ No newline at end of file diff --git a/web/pylon-visualisation/README.md b/web/pylon-visualisation/README.md index 9edab475454..af70e1cb839 100644 --- a/web/pylon-visualisation/README.md +++ b/web/pylon-visualisation/README.md @@ -3,4 +3,4 @@ Visualisation of pylon data from National Grid and UK Power Networks compared with vegetation data. ## Requirements -Please see [Building The Image](..\example-mapbox-vis\README.md#building-the-image) for the requirements. [Forestry](../../../Deploy/stacks/dynamic/example_datasets/inputs/data/forestry/), [cropmap](../../../Deploy/stacks/dynamic/example_datasets/inputs/data/cropmap/), [UK Power Networks' pylons](../../../Deploy/stacks/dynamic/example_datasets/inputs/data/ukpn_pylons/), and [National Grid pylons](../../../Deploy/stacks/dynamic/example_datasets/inputs/data/ng_pylons/) data are required to be uploaded using the configuration file [pylons-and-veg.json](../../../Deploy/stacks/dynamic/example_datasets/inputs/config/pylons-and-veg.json). Instructions on loading data into the stack can be found [here](../../../Deploy/stacks/dynamic/stack-data-uploader/README.md). \ No newline at end of file +Please see [Spinning up the example stack](../twa-vis-framework/example/README.md#spinning-up-the-example-stack) for the requirements. [Forestry](https://github.com/TheWorldAvatar/stack/tree/main/examples/datasets/inputs/data/forestry/), [cropmap](https://github.com/TheWorldAvatar/stack/tree/main/examples/datasets/inputs/data/cropmap/), [UK Power Networks' pylons](https://github.com/TheWorldAvatar/stack/tree/main/examples/datasets/inputs/data/ukpn_pylons/), and [National Grid pylons](https://github.com/TheWorldAvatar/stack/tree/main/examples/datasets/inputs/data/ng_pylons/) data are required to be uploaded using the configuration file [pylons-and-veg.json](https://github.com/TheWorldAvatar/stack/tree/main/examples/datasets/inputs/config/pylons-and-veg.json). Instructions on loading data into the stack can be found [here](https://github.com/TheWorldAvatar/stack/blob/main/stack-data-uploader/README.md). \ No newline at end of file diff --git a/web/twa-vis-framework/docs/cesium.md b/web/twa-vis-framework/docs/cesium.md index d3b96a34ef9..d31a503a66b 100644 --- a/web/twa-vis-framework/docs/cesium.md +++ b/web/twa-vis-framework/docs/cesium.md @@ -127,7 +127,7 @@ The default location of the Cesium map can be set using the below format in the An example CesiumJS visualisation has been committed to repository to act both as an example, and a template for users putting together new visualisations. -You can find the visualisation, along with documentation of how it was put together, in the [example-cesium-vis](../example-cesium-vis/) directory. +You can find the visualisation, along with documentation of how it was put together, in the [example](../example/) directory.

diff --git a/web/twa-vis-framework/docs/mapbox.md b/web/twa-vis-framework/docs/mapbox.md index 5f6b78614b4..0e0a968ba39 100644 --- a/web/twa-vis-framework/docs/mapbox.md +++ b/web/twa-vis-framework/docs/mapbox.md @@ -116,7 +116,7 @@ Mapbox also supports a system called [SDF Icons](https://docs.mapbox.com/help/tr These are effectively PNG icons that only contains transparency and a single colour; when used it allows Mapbox to dynamically change the colour of icons based on their dynamic styling system. As SDF icons are loaded slightly differently to regular ones, any icons you wish to be treated as SDF icons will need `-sdf` added to their file name. -Icons can be uploaded to and served by GeoServer via the [stack-data-uploader](../../../Deploy/stacks/dynamic/stack-data-uploader/README.md#staticgeoserverdata). +Icons can be uploaded to and served by GeoServer via the [stack-data-uploader](https://github.com/TheWorldAvatar/stack/blob/main/stack-data-uploader/README.md#staticgeoserverdata).
@@ -141,7 +141,7 @@ The default location of the Mapbox map can be set using the below format in the An example Mapbox visualisation has been committed to repository to act both as an example, and a template for users putting together new visualisations. -You can find the visualisation, along with documentation of how it was put together, in the [example-mapbox-vis](../example-mapbox-vis/) directory. +You can find the visualisation, along with documentation of how it was put together, in the [example](../example/) directory.

diff --git a/web/twa-vis-framework/docs/overview.md b/web/twa-vis-framework/docs/overview.md index b6bf8b81c7d..20f4d4a54f3 100644 --- a/web/twa-vis-framework/docs/overview.md +++ b/web/twa-vis-framework/docs/overview.md @@ -12,7 +12,7 @@ Once displayed, a number of standard interaction handlers are also added. These ## Mapping providers -At the time of writing the available mapping providers are [Mapbox](https://www.mapbox.com/) and [Cesium](https://cesium.com/platform/Cesium/). The core differences between providers is as follows: +At the time of writing the available mapping providers are [Mapbox](https://www.mapbox.com/) and [Cesium](https://cesium.com/platform/). The core differences between providers is as follows: * Mapbox can only handle 2D data (with the option to extrude 2D polygons into basic 3D polyhedrons) from local files or from [WMS endpoints](https://en.wikipedia.org/wiki/Web_Map_Service). Unlike Cesium (see below), Mapbox can display 2D vector data (including use of SVGs for icons, under certain conditions) if the data is hosted using the [Mapbox Vector Tiles](https://docs.mapbox.com/data/tilesets/guides/vector-tiles-introduction/) format. It is however quite customisable and has relatively small performance overhead. Unless you're plotting 3D building data, it's advised to use this mapping provider. @@ -24,7 +24,7 @@ At the time of writing the available mapping providers are [Mapbox](https://www. Before we can start specifying the data to be hosted within the visualisation, we need to create a Docker container that can host the web files the visualisation uses. This can be done by running a container based on the `twa-vf` image; an image that contains the pre-built TWA-VF libraries (available from the `/var/www/html/twa-vf` directory) and a webserver. -Users can either write their own `docker-compose.yml` file to run a standalone visualisation (i.e. outside of a TWA Stack environment), or use the TWA Stack to create a standard visualisation integrated within a stack instance (see the [TWA Stack Manager documentation](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) for more details on the latter). +Users can either write their own `docker-compose.yml` file to run a standalone visualisation (i.e. outside of a TWA Stack environment), or use the TWA Stack to create a standard visualisation integrated within a stack instance (see the [TWA Stack Manager documentation](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) for more details on the latter). ### Importing the library @@ -154,7 +154,7 @@ The `index.html` file of the example Mapbox & Cesium visualisations has been pro In addition to the aforementioned configuration files, two additional files are required to house a Mapbox username and associated API key. Note these are required, even in Cesium visualisations, as the base map imagery is still provided by Mapbox. -To set these two files, either create and populate `mapbox_username`, and `mapbox_api_key` files within the hosted webspace, or use the stack infrastructure to provide these as Docker secrets. You can learn more about the latter by reading [the stack's documentation](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager). +To set these two files, either create and populate `mapbox_username`, and `mapbox_api_key` files within the hosted webspace, or use the stack infrastructure to provide these as Docker secrets. You can learn more about the latter by reading [the stack's documentation](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager). Once present, these files are queried by the TWA-VF, loading in the required credentials. Note that previous versions of the TWA-VF required these parameters to be set within each visualisation's `index.html` file, this is no longer required (see the example visualisations to learn about the new format). @@ -164,13 +164,13 @@ It's worth noting that these credential files should **not** be committed; to th Display of meta and timeseries data is also a feature offered by the TWA-VF (regardless of the chosen mapping provider). However, the processing of getting this system setup can be quite lengthy. -To query for dynamic data, each selectable feature of your data also needs to contain `iri` and `endpoint` properties. Once selected, these are sent to a remote agent ([FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-feature-info-agent/Agents/FeatureInfoAgent)) running in a stack. Data is queried from the knowledge graph and/or relational database, then returned for display in the visualisation's side panel. +To query for dynamic data, each selectable feature of your data also needs to contain `iri` and `endpoint` properties. Once selected, these are sent to a remote agent ([FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent)) running in a stack. Data is queried from the knowledge graph and/or relational database, then returned for display in the visualisation's side panel. A breakdown of the requirements to use this system are below, for more information check out the FeatureInfoAgent's documentation. * A stack instance needs to be running (at some location, can be remote), containing: * A Blazegraph instance holding metadata on the visualised features. - * An instance of the [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/dev-feature-info-agent/Agents/FeatureInfoAgent) with a mapping of the possible feature classes to pre-written SPARQL queries. These queries must return data in a specific tabular format. + * An instance of the [FeatureInfoAgent](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Agents/FeatureInfoAgent) with a mapping of the possible feature classes to pre-written SPARQL queries. These queries must return data in a specific tabular format. * If applicable, a PostgreSQL instance containing time series data. * Geospatial data needs to contain `iri`, and `endpoint` fields for each feature (regardless of how the data is served, i.e. locally or via WMS). * The `iri` field needs to contain the full IRI of the feature as represented in the knowledge graph. diff --git a/web/twa-vis-framework/docs/troubleshooting.md b/web/twa-vis-framework/docs/troubleshooting.md index 0a01d2d4c95..adbbef5e7d0 100644 --- a/web/twa-vis-framework/docs/troubleshooting.md +++ b/web/twa-vis-framework/docs/troubleshooting.md @@ -13,7 +13,7 @@ This section is relevant to all uses of the TWA-VF, regardless of the selected m | Issue/Question | Solution/Answer | | ----------- | ----------- | | Do my data files have to be hosted online? | Both mapping providers require a valid URL to load data files, this does mean that they have to be accessible online. However, data files can be included within the visualisation container (which uses Apache to host a web server) so that they can be accessed via a URL relative to the visualisation's hosted directory (i.e. "/data/tileset.json"). | -| Visualisation not updating after changes | Try clearing ([or disabling](https://www.webinstinct.com/faq/how-to-disable-browser-cache#:~:text=When%20you're%20in%20Google,close%20out%20of%20Developer%20Tools.)) your browser cache before reloading the page. | +| Visualisation not updating after changes | Try clearing ([or disabling](https://stackoverflow.com/a/7000899)) your browser cache before reloading the page. | | Visualisation not updating after changes | If running the visualisation within a Docker container, you may need to rebuild the Docker image and run a new container to see recent file changes. | | No data is shown | If no data is shown _and_ no layer tree is built then this suggests that one (or more) of the user defined JSON files is invalid. Please use an external validator tool (or website) to ensure that the JSON is valid. | | Hovering over a feature does nothing | The framework supports mouse hovering effects if the input data contains certain metadata fields. To show a small description box the geospatial data needs to contain a `name` and `description` field. | diff --git a/web/twa-vis-framework/docs/tutorial-mapbox.md b/web/twa-vis-framework/docs/tutorial-mapbox.md index eb669826a45..86934645329 100644 --- a/web/twa-vis-framework/docs/tutorial-mapbox.md +++ b/web/twa-vis-framework/docs/tutorial-mapbox.md @@ -60,7 +60,7 @@ These raw CSV files also contain some strange characters that aren't supported i ## Writing an ontology -As an example, a very simple sample NHS ontology has been put together to describe the concepts within this tutorial's data set. This ontology has been created as a CSV file, and uploaded via the use of the [TBox Generator](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/JPS_BASE_LIB/src/main/java/uk/ac/cam/cares/jps/base/converter), see the Stack Data Uploader's documentation for more details on how to upload it. +As an example, a very simple sample NHS ontology has been put together to describe the concepts within this tutorial's data set. This ontology has been created as a CSV file, and uploaded via the use of the [TBox Generator](https://github.com/TheWorldAvatar/BaseLib/tree/main/src/main/java/uk/ac/cam/cares/jps/base/converter), see the Stack Data Uploader's documentation for more details on how to upload it. A copy of the simple ontology used can be seen below as well as in the TWA repository [here](./resources/nhs.csv). @@ -151,7 +151,7 @@ source SELECT "OrganisationCode" AS id , ## Uploading the data -To upload the data so that it can be accessed as a Virtual Knowledge Graph, and stored as geospatial data in PostGIS, we first need to write a configuration file for [The Stack Data Uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader). Information on how to write the file, where to place it, then upload the data can be see on the data uploader's page. +To upload the data so that it can be accessed as a Virtual Knowledge Graph, and stored as geospatial data in PostGIS, we first need to write a configuration file for [The Stack Data Uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader). Information on how to write the file, where to place it, then upload the data can be see on the data uploader's page. An example configuration file that covers all three CSVs used in tutorial, can be seen below and also found in the TWA repository [here](./resources/nhs.json). @@ -281,7 +281,7 @@ If you haven't already, it's worth reading through the [Overview](./overview.md) The first step here is to spin up an empty visualisation. When creating a new visualisation, it is recommended that the committed example visualisation is used. -To that end, copy the [example Mapbox visualisation](../example-mapbox-vis/) (the one containing the `run.sh` and `docker-compose.yml` files) to a new directory (of your choice) on your local machine. Using the README file within, you should be able to then spin up a docker container hosting the visualisation. +To that end, copy the [example Mapbox visualisation](../example/) (the one containing the `run.sh` and `docker-compose.yml` files) to a new directory (of your choice) on your local machine. Using the README file within, you should be able to then spin up a docker container hosting the visualisation. If you then access the visualisation (usually at `localhost`), you should see the example visualisation along with its sample data in Cambridge, India, and Singapore. @@ -492,7 +492,7 @@ SELECT ?Property (GROUP_CONCAT(?tmp; separator=", ") AS ?Value) WHERE { Now that we've configured the FIA to register a metadata query for IRIs with the `http://theworldavatar.com/ontology/health/nhs.owl#GPPractice` class, we can spin the agent up within our stack. -For information on how to restart the stack with the FIA agent, please see the [Stack Manager](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-manager) documentation. +For information on how to restart the stack with the FIA agent, please see the [Stack Manager](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) documentation. **3. Configuring the visualisation:**
diff --git a/web/twa-vis-framework/docs/tutorials.md b/web/twa-vis-framework/docs/tutorials.md index f6a730f5bf9..53b1cbec25c 100644 --- a/web/twa-vis-framework/docs/tutorials.md +++ b/web/twa-vis-framework/docs/tutorials.md @@ -13,7 +13,7 @@ A curated list of recommended external, non TWA-VF tutorials is shown below.
**Mapbox:** -* [Add custom markers to a map with Mapbox GL JS](https://docs.mapbox.com/help/tutorials/custom-markers-gl-js-video/) +* [Add custom markers to a map with Mapbox GL JS](https://docs.mapbox.com/help/tutorials/custom-markers-gl-js/) * [Get started with Mapbox GL JS expressions](https://docs.mapbox.com/help/tutorials/mapbox-gl-js-expressions/) * [Create interactive hover effects with Mapbox GL JS](https://docs.mapbox.com/help/tutorials/create-interactive-hover-effects-with-mapbox-gl-js/) diff --git a/web/twa-vis-framework/example/README.md b/web/twa-vis-framework/example/README.md index c8954780db8..f9a89acc594 100644 --- a/web/twa-vis-framework/example/README.md +++ b/web/twa-vis-framework/example/README.md @@ -2,7 +2,7 @@ This directory contains the sample data, configuration, and miscellaneous resources needed to spin up a TWA Stack with some example visualisations. -Please note that this example is aimed as showing new users what the visualisation framework is capable of, and to create an experimentation space in which data formats, styling, and other visualisation functionality can be played with. The generation of the sample data, and its stack configuration files are not explained here; for more details on this, see the [TWA Stack](../../../Deploy/stacks/dynamic/stack-manager) documentation. +Please note that this example is aimed as showing new users what the visualisation framework is capable of, and to create an experimentation space in which data formats, styling, and other visualisation functionality can be played with. The generation of the sample data, and its stack configuration files are not explained here; for more details on this, see the [TWA Stack](https://github.com/TheWorldAvatar/stack/tree/main/stack-manager) documentation. ## Mapbox diff --git a/web/twa-vis-framework/example/mapbox.md b/web/twa-vis-framework/example/mapbox.md index 4f275639c95..d6083861766 100644 --- a/web/twa-vis-framework/example/mapbox.md +++ b/web/twa-vis-framework/example/mapbox.md @@ -28,7 +28,7 @@ A small amount of sample data has been committed to demonstrate the power of the In most deployed visualisations, an online stack of microservices will provide data endpoints through which data can be queried/loaded onto the visualisation. In this example, no online stack is used, solely to remove a lengthy prerequisite step. Instead, sample data in local GeoJSON files have been added (to be hosted by the visualisation's web server) and, in one case, a community provided WMS endpoint connected to. -In production, it is advised that all data is loaded into a geospatial data provider (like GeoServer) and a WMS endpoint used; local files can be utilised but then do not offer the optimisation and caching of services like GeoServer. For more information on how to do this, see the README for the [Stack Data Uploader](https://github.com/cambridge-cares/TheWorldAvatar/tree/main/Deploy/stacks/dynamic/stack-data-uploader). +In production, it is advised that all data is loaded into a geospatial data provider (like GeoServer) and a WMS endpoint used; local files can be utilised but then do not offer the optimisation and caching of services like GeoServer. For more information on how to do this, see the README for the [Stack Data Uploader](https://github.com/TheWorldAvatar/stack/tree/main/stack-data-uploader). It's also worth noting that with this example visualisation, no triplestore data or FeatureInfoAgent is used, hence there is no support for dynamic metadata and timeseries data is unavailable. This is something that we plan to work on in future. diff --git a/web/twa-vis-framework/library/README.md b/web/twa-vis-framework/library/README.md index 00a49233b93..c22fa14c712 100644 --- a/web/twa-vis-framework/library/README.md +++ b/web/twa-vis-framework/library/README.md @@ -39,7 +39,7 @@ To function correctly, visualisations using this framework also needs to include * [Turf](https://turfjs.org/) * [Hummingbird Treeview](https://github.com/hummingbird-dev/hummingbird-treeview) -An example of the required import statements should be available in the example [Mapbox](../example-mapbox-vis/webspace/index.html) and [Cesium](../example-cesium-vis/webspace/index.html) visualisations. +An example of the required import statements should be available in the example [Mapbox](../example/mapbox.md) and [Cesium](../example/cesium.md) visualisations. ## Architecture @@ -93,11 +93,11 @@ The following automated GitHub actions have been setup for the TWA-VF (all defin ## Planned changes -An overview of bug reports, feature requests, and open PRs can be see using the [TWA Visualisation Framework](https://github.com/orgs/cambridge-cares/projects/1) project. Any new reports or requests should be linked to this project to ensure that it contains a complete overview of all related information. +An overview of bug reports, feature requests, and open PRs can be see using the [TWA Visualisation Framework](https://github.com/TheWorldAvatar/viz) project. Any new reports or requests should be linked to this project to ensure that it contains a complete overview of all related information. ### Issues -Bugs should be reported as GitHub issues using the `TWA-VF:` prefix along with a short name for the issue. A detailed description of the issue along with reproduction steps, and if possible, [an image of the issue](https://gist.github.com/NawalJAhmed/2168f7659c08b6a033e7f6daf8db69a6). +Bugs should be reported as GitHub issues using the `TWA-VF:` prefix along with a short name for the issue. A detailed description of the issue along with reproduction steps, and if possible, [an image of the issue](https://gist.github.com/namirjahmed/2168f7659c08b6a033e7f6daf8db69a6). Issue reporting a bug should also use the provided `bug` tag and link to the TWA Visualisation Framework project. From 7f3d05e315b52913c1ee3165d30813759d8fee57 Mon Sep 17 00:00:00 2001 From: Myles MacDonald Date: Sat, 12 Apr 2025 16:50:30 -0400 Subject: [PATCH 09/30] fix config --- .github/workflows/twa-md-push.json | 48 ------------------------------ 1 file changed, 48 deletions(-) diff --git a/.github/workflows/twa-md-push.json b/.github/workflows/twa-md-push.json index 716e99fccd0..48d365b4414 100644 --- a/.github/workflows/twa-md-push.json +++ b/.github/workflows/twa-md-push.json @@ -38,54 +38,6 @@ }, { "pattern": "https?://docs.unity3d.com/?" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoassetmanagement" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobim" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontolab" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotechnicalsystem" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/pirmasens/tree/main/districtheating_stack" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/pirmasens/tree/main/psdt/stack\\-data\\-uploader\\-inputs/data/dlm" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/pirmasens/blob/main/psdt/stack\\-data\\-uploader\\-inputs/config/dlm\\.json" - }, - { - "pattern": "https://caret\\.io\\?ref=parsedown" - }, - { - "pattern": "http://caret\\.io\\?ref=parsedown" - }, - { - "pattern": "tjl47@cam.ac.uk" - }, - { - "pattern": "sh2000@cam.ac.uk" - }, - { - "pattern": "msff2@cam.ac.uk" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobuiltenv" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/dev\\-sea\\-level\\-rise\\-singapore/Deploy/stacks/Singapore\\-sea\\-level\\-rise" } ], "aliveStatusCodes": [ From 442b4152f7f4e37551814dd4b297b8ee571ee8b6 Mon Sep 17 00:00:00 2001 From: Myles MacDonald <65673247+mayomatsuda@users.noreply.github.com> Date: Sat, 12 Apr 2025 17:05:55 -0400 Subject: [PATCH 10/30] Dev markdown link check (#4) * dev-markdown-link-check: add github workflow to check markdown links. Resolves #464 * fix markdown links * fix broken links * fix readmes * fix links * fix broken links and configure github action * add on merge for modified files * fix config --------- Co-authored-by: Myles MacDonald --- .github/workflows/twa-md-push.json | 48 ------------------------------ 1 file changed, 48 deletions(-) diff --git a/.github/workflows/twa-md-push.json b/.github/workflows/twa-md-push.json index 716e99fccd0..48d365b4414 100644 --- a/.github/workflows/twa-md-push.json +++ b/.github/workflows/twa-md-push.json @@ -38,54 +38,6 @@ }, { "pattern": "https?://docs.unity3d.com/?" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontoassetmanagement" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobim" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontodevice" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontolab" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotechnicalsystem" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontotimeseries" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/pirmasens/tree/main/districtheating_stack" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/pirmasens/tree/main/psdt/stack\\-data\\-uploader\\-inputs/data/dlm" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/pirmasens/blob/main/psdt/stack\\-data\\-uploader\\-inputs/config/dlm\\.json" - }, - { - "pattern": "https://caret\\.io\\?ref=parsedown" - }, - { - "pattern": "http://caret\\.io\\?ref=parsedown" - }, - { - "pattern": "tjl47@cam.ac.uk" - }, - { - "pattern": "sh2000@cam.ac.uk" - }, - { - "pattern": "msff2@cam.ac.uk" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/main/JPS_Ontology/ontology/ontobuiltenv" - }, - { - "pattern": "https://github\\.com/cambridge\\-cares/TheWorldAvatar/tree/dev\\-sea\\-level\\-rise\\-singapore/Deploy/stacks/Singapore\\-sea\\-level\\-rise" } ], "aliveStatusCodes": [ From 39e9a4f29dc6763dfd5a46c066886c5734a23302 Mon Sep 17 00:00:00 2001 From: Myles MacDonald Date: Sat, 12 Apr 2025 17:10:54 -0400 Subject: [PATCH 11/30] add on merge fix --- .github/workflows/twa-md-merge.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/twa-md-merge.yml b/.github/workflows/twa-md-merge.yml index 86dafaf48fe..0b1efbd5d85 100644 --- a/.github/workflows/twa-md-merge.yml +++ b/.github/workflows/twa-md-merge.yml @@ -15,6 +15,7 @@ on: jobs: # Check for broken links only within Markdown files changed by the pull request markdown-link-check: + on: [pull_request] name: Check markdown files for broken links runs-on: ubuntu-latest steps: From 2d3ad19215a005f3634502eaf0aa9b8c75c23500 Mon Sep 17 00:00:00 2001 From: Myles MacDonald <65673247+mayomatsuda@users.noreply.github.com> Date: Sat, 12 Apr 2025 17:11:57 -0400 Subject: [PATCH 12/30] Dev markdown link check (#5) * dev-markdown-link-check: add github workflow to check markdown links. Resolves #464 * fix markdown links * fix broken links * fix readmes * fix links * fix broken links and configure github action * add on merge for modified files * fix config * add on merge fix --------- Co-authored-by: Myles MacDonald --- .github/workflows/twa-md-merge.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/twa-md-merge.yml b/.github/workflows/twa-md-merge.yml index 86dafaf48fe..0b1efbd5d85 100644 --- a/.github/workflows/twa-md-merge.yml +++ b/.github/workflows/twa-md-merge.yml @@ -15,6 +15,7 @@ on: jobs: # Check for broken links only within Markdown files changed by the pull request markdown-link-check: + on: [pull_request] name: Check markdown files for broken links runs-on: ubuntu-latest steps: From 71689b51b653d03178cc7742efafed8a92735afb Mon Sep 17 00:00:00 2001 From: Myles MacDonald Date: Sat, 12 Apr 2025 17:17:01 -0400 Subject: [PATCH 13/30] remove on merge --- .github/workflows/twa-md-merge.yml | 28 ---------------------------- 1 file changed, 28 deletions(-) delete mode 100644 .github/workflows/twa-md-merge.yml diff --git a/.github/workflows/twa-md-merge.yml b/.github/workflows/twa-md-merge.yml deleted file mode 100644 index 0b1efbd5d85..00000000000 --- a/.github/workflows/twa-md-merge.yml +++ /dev/null @@ -1,28 +0,0 @@ -# -# This workflow contains a job to check for broken links within Markdown files in the repository. -# -name: TWA Markdown Merge - -# Trigger this workflow during pull requests to the 'main' branch if changes to Markdown files -on: - pull_request: - branches: - - main - paths: - - '**.md' - - '**.MD' - -jobs: - # Check for broken links only within Markdown files changed by the pull request - markdown-link-check: - on: [pull_request] - name: Check markdown files for broken links - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - - name: Markdown links check - uses: ruzickap/action-my-markdown-link-checker@v1 - with: - config_file: .github/workflows/twa-md-push.json - check-modified-files-only: 'yes' \ No newline at end of file From cd659bb30043915f407dfda873c84d59dd8d1e9f Mon Sep 17 00:00:00 2001 From: Myles MacDonald Date: Sun, 13 Apr 2025 13:58:14 -0400 Subject: [PATCH 14/30] finish link fixes --- .github/workflows/twa-md-push.json | 9 +++++++++ Agents/AssetManagerAgent/README.md | 4 ++-- Agents/CARESWeatherStationAgent/README.md | 4 ++-- Agents/CarparkAgent/README.md | 2 +- Agents/MetOfficeAgent/README.md | 4 ++-- Agents/ThingspeakAgent/README.MD | 2 +- .../TWA-VP/public/optional-pages/help-page.md | 12 ++++++------ .../site/vendor/php-http/message-factory/README.md | 2 +- 8 files changed, 24 insertions(+), 15 deletions(-) diff --git a/.github/workflows/twa-md-push.json b/.github/workflows/twa-md-push.json index 48d365b4414..5b65a8e40ad 100644 --- a/.github/workflows/twa-md-push.json +++ b/.github/workflows/twa-md-push.json @@ -38,6 +38,15 @@ }, { "pattern": "https?://docs.unity3d.com/?" + }, + { + "pattern": "#(/[^ ]*)?" + }, + { + "pattern": "/user(/[^ ]*)?" + }, + { + "pattern": "/explore(/[^ ]*)?" } ], "aliveStatusCodes": [ diff --git a/Agents/AssetManagerAgent/README.md b/Agents/AssetManagerAgent/README.md index cd5ea52e359..0d5e780d37a 100644 --- a/Agents/AssetManagerAgent/README.md +++ b/Agents/AssetManagerAgent/README.md @@ -34,7 +34,7 @@ The agent also handles the instantiation of purchase documents such as purchase Currently the agent is designed specifically to handle assets of CARES. The agent is designed to handle assets on 2 different locations, CARES office and the Research Wing. Hence the agent relies on inputting the information regarding locations into 2 namespaces: the `lab` and `office` namespace in the knowledge graph. Assets on other locations are put into the `office` namespace as default, where their location will only be recorded as a literal. -As this agent is designed to work together with the asset management app, the app requires a check whether the asset has some sort of timeseries attached to its concept when retrieved (for example, a fumehood’s air consumption). This timeseries is commonly contained in the `bms` namespace and will be used as default for this search, since currently the agent is designed specifically to handle assets of CARES. The details of this timeseries search on retrieve can be read further at [/retrieve](#--retrieve). +As this agent is designed to work together with the asset management app, the app requires a check whether the asset has some sort of timeseries attached to its concept when retrieved (for example, a fumehood’s air consumption). This timeseries is commonly contained in the `bms` namespace and will be used as default for this search, since currently the agent is designed specifically to handle assets of CARES. The details of this timeseries search on retrieve can be read further at [/retrieve](#retrieval). @@ -67,7 +67,7 @@ Update the following files in the `./config` folder: - `/ontologyMap.properties`: A map of the asset type and their IRIs. Refer to the available values in [AssetClass](#instantiation) -- `/tsSearch.properties`: Parameters of asset timeseries data search on retrieve. The details of this timeseries search on retrieve can be read further at [/retrieve](#--retrieve). +- `/tsSearch.properties`: Parameters of asset timeseries data search on retrieve. The details of this timeseries search on retrieve can be read further at [/retrieve](#retrieval). - `depth`: Search depth for looking for asset timeseries existence - `predicate`: Triple's predicate used to detect if an asset have a timeseries or not. An array of IRI (please use the full IRI, don't use prefixes) separated by a comma (`,`). diff --git a/Agents/CARESWeatherStationAgent/README.md b/Agents/CARESWeatherStationAgent/README.md index 3bcb203c12a..57090a29346 100644 --- a/Agents/CARESWeatherStationAgent/README.md +++ b/Agents/CARESWeatherStationAgent/README.md @@ -22,7 +22,7 @@ The actual endpoint has the following structure: https://api.weather.com/v2/pws/observations/all/1day?stationId=[]&format=json&units=s&numericPrecision=decimal&apiKey=[] ``` where `[stationId]` is the id of the weather station which is taking the physical readings. The `[apiKey]` is the key needed to access the API. By setting `units=s` one ensures that the values of the readings correspond to SI units. -Finally, the option `numericPrecision=decimal` enables the numerical readings to be returned in decimal values (unless according to the API the field under observation can only return an integer. See also the [API documentation](#Weather-Station-API)). +Finally, the option `numericPrecision=decimal` enables the numerical readings to be returned in decimal values (unless according to the API the field under observation can only return an integer. See also the [API documentation](#weather-station-api)). The endpoint controls what type of data is retrieved and its form. #### Example readings @@ -88,7 +88,7 @@ The time-series client property file needs to contain all credentials and endpoi More information can be found in the example property file `client.properties` in the `config` folder. #### API properties -The API properties contain the credentials to authorize access to the weather Station API (see the [API description](#Weather-Station-API)), +The API properties contain the credentials to authorize access to the weather Station API (see the [API description](#weather-station-api)), as well as, the url of the API and the identifier of the weather station. More specifically, the API properties file should contain the following keys: - `weather.api_key` the key needed to access the API. - `weather.stationId` the stationId associated with the sensor. diff --git a/Agents/CarparkAgent/README.md b/Agents/CarparkAgent/README.md index 0a4c269e9dc..b0be270d436 100644 --- a/Agents/CarparkAgent/README.md +++ b/Agents/CarparkAgent/README.md @@ -15,7 +15,7 @@ The second API allows the retrieval of carpark ratings. More information can be ## Property files For running the agent, three property files are required: - One [property file for the agent](#agent-properties) itself pointing to the mapping configuration. -- One [property file for the time-series client](#time-series-client-properties) defining how to access the database and SPARQL endpoint. +- One [property file for the time-series client](#client-properties) defining how to access the database and SPARQL endpoint. - One [property file for the carpark APIs](#api-properties) defining the properties needed to access the API. ### Agent properties diff --git a/Agents/MetOfficeAgent/README.md b/Agents/MetOfficeAgent/README.md index cc31baaaff8..a340d9156f6 100755 --- a/Agents/MetOfficeAgent/README.md +++ b/Agents/MetOfficeAgent/README.md @@ -234,8 +234,8 @@ Markus Hofmeister (mh807@cam.ac.uk), January 2022 [Dockerfile]: Dockerfile [docker compose file]: docker-compose.yml -[docker-compose.test.yml]: tests\docker-compose.test.yml -[example retrieve all request]: resources\HTTPRequest_retrieve_all.http +[docker-compose.test.yml]: tests/docker-compose.test.yml +[example retrieve all request]: resources/HTTPRequest_retrieve_all.http [resources]: resources [stack.sh]: stack.sh [tests]: tests diff --git a/Agents/ThingspeakAgent/README.MD b/Agents/ThingspeakAgent/README.MD index a535c3f09e3..2c88beb5079 100644 --- a/Agents/ThingspeakAgent/README.MD +++ b/Agents/ThingspeakAgent/README.MD @@ -73,7 +73,7 @@ The time-series client property file needs to contain all credentials and endpoi More information can be found in the example property file `client.properties` in the `config` folder. #### API properties -The API properties contains the parameters needed to access the Thingspeak API (see the [API description](#Thingspeak-API)). It should contain the following keys: +The API properties contains the parameters needed to access the Thingspeak API (see the [API description](#thingspeak-api)). It should contain the following keys: - `thingspeak.channelNumber` the ID of the channel. - `thingspeak.apiKey` the API Key needed to read data from a private channel, a public channel do not require an API Key and this key can be set to equal to "None". - `thingspeak.results` the number of results to retrieve from the Thingspeak server. diff --git a/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/help-page.md b/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/help-page.md index 19bcd972b68..3d96d3c709d 100644 --- a/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/help-page.md +++ b/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/help-page.md @@ -5,7 +5,7 @@ slug: help -

 Help

+

 Help

## Getting Started @@ -16,12 +16,12 @@ The platform offers a web map and dashboard tool for data exploration and trend Users can navigate the platform by clicking on buttons or the navigation bar at the top of the screen.
- +
Fig 1a: Button to return to previous page
- +
Fig 1b: Navigation bar at top of the screen
@@ -31,9 +31,9 @@ Interaction is enabled for mouse or pointing device, but keyboard shortcuts are
- - - + + +
Fig 2: Mouse controls
diff --git a/Deploy/stacks/web/website/site/vendor/php-http/message-factory/README.md b/Deploy/stacks/web/website/site/vendor/php-http/message-factory/README.md index 471bbda2170..7830f0178d4 100644 --- a/Deploy/stacks/web/website/site/vendor/php-http/message-factory/README.md +++ b/Deploy/stacks/web/website/site/vendor/php-http/message-factory/README.md @@ -23,7 +23,7 @@ Please see the [official documentation](https://docs.php-http.org/en/latest/mess ## Security -If you discover any security related issues, please contact us at [security@php-http.org](mailto:security@php-http.org). +If you discover any security related issues, please contact us at [security@php-http.org](mailto:security@php-http.org). ## License From a455cef7a90b8ddd16962238b75d7094a433809e Mon Sep 17 00:00:00 2001 From: Myles MacDonald Date: Sun, 13 Apr 2025 14:02:22 -0400 Subject: [PATCH 15/30] finish link fixes --- .github/workflows/twa-md-push.json | 3 +++ .../TWA-VP/public/optional-pages/help-page.md | 12 ++++++------ 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/.github/workflows/twa-md-push.json b/.github/workflows/twa-md-push.json index 5b65a8e40ad..5eaf84cf24c 100644 --- a/.github/workflows/twa-md-push.json +++ b/.github/workflows/twa-md-push.json @@ -47,6 +47,9 @@ }, { "pattern": "/explore(/[^ ]*)?" + }, + { + "pattern": "./images/defaults(/[^ ]*)?" } ], "aliveStatusCodes": [ diff --git a/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/help-page.md b/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/help-page.md index 3d96d3c709d..19bcd972b68 100644 --- a/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/help-page.md +++ b/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/help-page.md @@ -5,7 +5,7 @@ slug: help -

 Help

+

 Help

## Getting Started @@ -16,12 +16,12 @@ The platform offers a web map and dashboard tool for data exploration and trend Users can navigate the platform by clicking on buttons or the navigation bar at the top of the screen.
- +
Fig 1a: Button to return to previous page
- +
Fig 1b: Navigation bar at top of the screen
@@ -31,9 +31,9 @@ Interaction is enabled for mouse or pointing device, but keyboard shortcuts are
- - - + + +
Fig 2: Mouse controls
From ef473c8024cb44dfea1d352a0b7a7f17f20bf412 Mon Sep 17 00:00:00 2001 From: Myles MacDonald <65673247+mayomatsuda@users.noreply.github.com> Date: Sun, 13 Apr 2025 14:03:04 -0400 Subject: [PATCH 16/30] Dev markdown link check (#7) * dev-markdown-link-check: add github workflow to check markdown links. Resolves #464 * fix markdown links * fix broken links * fix readmes * fix links * fix broken links and configure github action * add on merge for modified files * fix config * add on merge fix * finish link fixes * finish link fixes --------- Co-authored-by: Myles MacDonald --- .github/workflows/twa-md-push.json | 12 ++++++++++++ Agents/AssetManagerAgent/README.md | 4 ++-- Agents/CARESWeatherStationAgent/README.md | 4 ++-- Agents/CarparkAgent/README.md | 2 +- Agents/MetOfficeAgent/README.md | 4 ++-- Agents/ThingspeakAgent/README.MD | 2 +- .../site/vendor/php-http/message-factory/README.md | 2 +- 7 files changed, 21 insertions(+), 9 deletions(-) diff --git a/.github/workflows/twa-md-push.json b/.github/workflows/twa-md-push.json index 48d365b4414..5eaf84cf24c 100644 --- a/.github/workflows/twa-md-push.json +++ b/.github/workflows/twa-md-push.json @@ -38,6 +38,18 @@ }, { "pattern": "https?://docs.unity3d.com/?" + }, + { + "pattern": "#(/[^ ]*)?" + }, + { + "pattern": "/user(/[^ ]*)?" + }, + { + "pattern": "/explore(/[^ ]*)?" + }, + { + "pattern": "./images/defaults(/[^ ]*)?" } ], "aliveStatusCodes": [ diff --git a/Agents/AssetManagerAgent/README.md b/Agents/AssetManagerAgent/README.md index cd5ea52e359..0d5e780d37a 100644 --- a/Agents/AssetManagerAgent/README.md +++ b/Agents/AssetManagerAgent/README.md @@ -34,7 +34,7 @@ The agent also handles the instantiation of purchase documents such as purchase Currently the agent is designed specifically to handle assets of CARES. The agent is designed to handle assets on 2 different locations, CARES office and the Research Wing. Hence the agent relies on inputting the information regarding locations into 2 namespaces: the `lab` and `office` namespace in the knowledge graph. Assets on other locations are put into the `office` namespace as default, where their location will only be recorded as a literal. -As this agent is designed to work together with the asset management app, the app requires a check whether the asset has some sort of timeseries attached to its concept when retrieved (for example, a fumehood’s air consumption). This timeseries is commonly contained in the `bms` namespace and will be used as default for this search, since currently the agent is designed specifically to handle assets of CARES. The details of this timeseries search on retrieve can be read further at [/retrieve](#--retrieve). +As this agent is designed to work together with the asset management app, the app requires a check whether the asset has some sort of timeseries attached to its concept when retrieved (for example, a fumehood’s air consumption). This timeseries is commonly contained in the `bms` namespace and will be used as default for this search, since currently the agent is designed specifically to handle assets of CARES. The details of this timeseries search on retrieve can be read further at [/retrieve](#retrieval). @@ -67,7 +67,7 @@ Update the following files in the `./config` folder: - `/ontologyMap.properties`: A map of the asset type and their IRIs. Refer to the available values in [AssetClass](#instantiation) -- `/tsSearch.properties`: Parameters of asset timeseries data search on retrieve. The details of this timeseries search on retrieve can be read further at [/retrieve](#--retrieve). +- `/tsSearch.properties`: Parameters of asset timeseries data search on retrieve. The details of this timeseries search on retrieve can be read further at [/retrieve](#retrieval). - `depth`: Search depth for looking for asset timeseries existence - `predicate`: Triple's predicate used to detect if an asset have a timeseries or not. An array of IRI (please use the full IRI, don't use prefixes) separated by a comma (`,`). diff --git a/Agents/CARESWeatherStationAgent/README.md b/Agents/CARESWeatherStationAgent/README.md index 3bcb203c12a..57090a29346 100644 --- a/Agents/CARESWeatherStationAgent/README.md +++ b/Agents/CARESWeatherStationAgent/README.md @@ -22,7 +22,7 @@ The actual endpoint has the following structure: https://api.weather.com/v2/pws/observations/all/1day?stationId=[]&format=json&units=s&numericPrecision=decimal&apiKey=[] ``` where `[stationId]` is the id of the weather station which is taking the physical readings. The `[apiKey]` is the key needed to access the API. By setting `units=s` one ensures that the values of the readings correspond to SI units. -Finally, the option `numericPrecision=decimal` enables the numerical readings to be returned in decimal values (unless according to the API the field under observation can only return an integer. See also the [API documentation](#Weather-Station-API)). +Finally, the option `numericPrecision=decimal` enables the numerical readings to be returned in decimal values (unless according to the API the field under observation can only return an integer. See also the [API documentation](#weather-station-api)). The endpoint controls what type of data is retrieved and its form. #### Example readings @@ -88,7 +88,7 @@ The time-series client property file needs to contain all credentials and endpoi More information can be found in the example property file `client.properties` in the `config` folder. #### API properties -The API properties contain the credentials to authorize access to the weather Station API (see the [API description](#Weather-Station-API)), +The API properties contain the credentials to authorize access to the weather Station API (see the [API description](#weather-station-api)), as well as, the url of the API and the identifier of the weather station. More specifically, the API properties file should contain the following keys: - `weather.api_key` the key needed to access the API. - `weather.stationId` the stationId associated with the sensor. diff --git a/Agents/CarparkAgent/README.md b/Agents/CarparkAgent/README.md index 0a4c269e9dc..b0be270d436 100644 --- a/Agents/CarparkAgent/README.md +++ b/Agents/CarparkAgent/README.md @@ -15,7 +15,7 @@ The second API allows the retrieval of carpark ratings. More information can be ## Property files For running the agent, three property files are required: - One [property file for the agent](#agent-properties) itself pointing to the mapping configuration. -- One [property file for the time-series client](#time-series-client-properties) defining how to access the database and SPARQL endpoint. +- One [property file for the time-series client](#client-properties) defining how to access the database and SPARQL endpoint. - One [property file for the carpark APIs](#api-properties) defining the properties needed to access the API. ### Agent properties diff --git a/Agents/MetOfficeAgent/README.md b/Agents/MetOfficeAgent/README.md index cc31baaaff8..a340d9156f6 100755 --- a/Agents/MetOfficeAgent/README.md +++ b/Agents/MetOfficeAgent/README.md @@ -234,8 +234,8 @@ Markus Hofmeister (mh807@cam.ac.uk), January 2022 [Dockerfile]: Dockerfile [docker compose file]: docker-compose.yml -[docker-compose.test.yml]: tests\docker-compose.test.yml -[example retrieve all request]: resources\HTTPRequest_retrieve_all.http +[docker-compose.test.yml]: tests/docker-compose.test.yml +[example retrieve all request]: resources/HTTPRequest_retrieve_all.http [resources]: resources [stack.sh]: stack.sh [tests]: tests diff --git a/Agents/ThingspeakAgent/README.MD b/Agents/ThingspeakAgent/README.MD index a535c3f09e3..2c88beb5079 100644 --- a/Agents/ThingspeakAgent/README.MD +++ b/Agents/ThingspeakAgent/README.MD @@ -73,7 +73,7 @@ The time-series client property file needs to contain all credentials and endpoi More information can be found in the example property file `client.properties` in the `config` folder. #### API properties -The API properties contains the parameters needed to access the Thingspeak API (see the [API description](#Thingspeak-API)). It should contain the following keys: +The API properties contains the parameters needed to access the Thingspeak API (see the [API description](#thingspeak-api)). It should contain the following keys: - `thingspeak.channelNumber` the ID of the channel. - `thingspeak.apiKey` the API Key needed to read data from a private channel, a public channel do not require an API Key and this key can be set to equal to "None". - `thingspeak.results` the number of results to retrieve from the Thingspeak server. diff --git a/Deploy/stacks/web/website/site/vendor/php-http/message-factory/README.md b/Deploy/stacks/web/website/site/vendor/php-http/message-factory/README.md index 471bbda2170..7830f0178d4 100644 --- a/Deploy/stacks/web/website/site/vendor/php-http/message-factory/README.md +++ b/Deploy/stacks/web/website/site/vendor/php-http/message-factory/README.md @@ -23,7 +23,7 @@ Please see the [official documentation](https://docs.php-http.org/en/latest/mess ## Security -If you discover any security related issues, please contact us at [security@php-http.org](mailto:security@php-http.org). +If you discover any security related issues, please contact us at [security@php-http.org](mailto:security@php-http.org). ## License From 306ac015a5fd02125b69745e4cb267c607241ae4 Mon Sep 17 00:00:00 2001 From: Myles MacDonald Date: Sun, 13 Apr 2025 14:05:21 -0400 Subject: [PATCH 17/30] empty commit to test From a507a1f4035bc4ada535ef2c7ee7612faf394f90 Mon Sep 17 00:00:00 2001 From: Myles MacDonald <65673247+mayomatsuda@users.noreply.github.com> Date: Sun, 13 Apr 2025 14:06:49 -0400 Subject: [PATCH 18/30] Dev markdown link check (#8) * dev-markdown-link-check: add github workflow to check markdown links. Resolves #464 * fix markdown links * fix broken links * fix readmes * fix links * fix broken links and configure github action * add on merge for modified files * fix config * add on merge fix * finish link fixes * finish link fixes --------- Co-authored-by: Myles MacDonald From 70cf930ae0c273612d8455c25903df2427b51758 Mon Sep 17 00:00:00 2001 From: Myles MacDonald Date: Sun, 13 Apr 2025 14:09:41 -0400 Subject: [PATCH 19/30] rename workflow --- .github/workflows/twa-md-push.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/twa-md-push.yml b/.github/workflows/twa-md-push.yml index ede7f3e72d0..999ef140db3 100644 --- a/.github/workflows/twa-md-push.yml +++ b/.github/workflows/twa-md-push.yml @@ -1,7 +1,7 @@ # # This workflow contains a job to check for broken links within Markdown files in the repository. # -name: TWA Markdown Push +name: Check Markdown links # Trigger this workflow during pushes to the 'main' branch if changes to Markdown files on: From 056ccb277d3bba4f19f40a0922ccd16f3d2004e8 Mon Sep 17 00:00:00 2001 From: Myles MacDonald <65673247+mayomatsuda@users.noreply.github.com> Date: Sun, 13 Apr 2025 14:10:10 -0400 Subject: [PATCH 20/30] Dev markdown link check (#9) * dev-markdown-link-check: add github workflow to check markdown links. Resolves #464 * fix markdown links * fix broken links * fix readmes * fix links * fix broken links and configure github action * add on merge for modified files * fix config * add on merge fix * finish link fixes * finish link fixes * rename workflow --------- Co-authored-by: Myles MacDonald --- .github/workflows/twa-md-push.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/twa-md-push.yml b/.github/workflows/twa-md-push.yml index ede7f3e72d0..999ef140db3 100644 --- a/.github/workflows/twa-md-push.yml +++ b/.github/workflows/twa-md-push.yml @@ -1,7 +1,7 @@ # # This workflow contains a job to check for broken links within Markdown files in the repository. # -name: TWA Markdown Push +name: Check Markdown links # Trigger this workflow during pushes to the 'main' branch if changes to Markdown files on: From c5bf894b33133a126fe01d8da7ea0fa2070421f7 Mon Sep 17 00:00:00 2001 From: Myles MacDonald Date: Sun, 13 Apr 2025 14:11:47 -0400 Subject: [PATCH 21/30] test --- .../TWA-VP/public/optional-pages/landing-page.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/landing-page.md b/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/landing-page.md index c84a8f0c9ea..9b5e4d3ef90 100644 --- a/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/landing-page.md +++ b/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/landing-page.md @@ -12,3 +12,5 @@ We address this gap using existing timestamped measures of physical activity, lo Results will be relevant to local authorities, including more detailed understanding of how people use space and the duration and timing of health-related exposures. Long term, this research could facilitate real-time nudges of health behaviours using smartphones. This visualisation shows an example of our work in progress. + +test \ No newline at end of file From b66a6c8a7bded62811eedd020901a8d902cc7143 Mon Sep 17 00:00:00 2001 From: Myles MacDonald <65673247+mayomatsuda@users.noreply.github.com> Date: Sun, 13 Apr 2025 14:12:22 -0400 Subject: [PATCH 22/30] Dev markdown link check (#10) * dev-markdown-link-check: add github workflow to check markdown links. Resolves #464 * fix markdown links * fix broken links * fix readmes * fix links * fix broken links and configure github action * add on merge for modified files * fix config * add on merge fix * finish link fixes * finish link fixes * rename workflow * test --------- Co-authored-by: Myles MacDonald --- .../TWA-VP/public/optional-pages/landing-page.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/landing-page.md b/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/landing-page.md index c84a8f0c9ea..9b5e4d3ef90 100644 --- a/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/landing-page.md +++ b/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/landing-page.md @@ -12,3 +12,5 @@ We address this gap using existing timestamped measures of physical activity, lo Results will be relevant to local authorities, including more detailed understanding of how people use space and the duration and timing of health-related exposures. Long term, this research could facilitate real-time nudges of health behaviours using smartphones. This visualisation shows an example of our work in progress. + +test \ No newline at end of file From 5cad977802cfa012657d19ca35dcec46b5465bca Mon Sep 17 00:00:00 2001 From: Myles MacDonald Date: Sun, 13 Apr 2025 14:13:12 -0400 Subject: [PATCH 23/30] undo test --- .../TWA-VP/public/optional-pages/landing-page.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/landing-page.md b/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/landing-page.md index 9b5e4d3ef90..ffbc45f15ea 100644 --- a/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/landing-page.md +++ b/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/landing-page.md @@ -11,6 +11,4 @@ We address this gap using existing timestamped measures of physical activity, lo Results will be relevant to local authorities, including more detailed understanding of how people use space and the duration and timing of health-related exposures. Long term, this research could facilitate real-time nudges of health behaviours using smartphones. -This visualisation shows an example of our work in progress. - -test \ No newline at end of file +This visualisation shows an example of our work in progress. \ No newline at end of file From b0f8192437980bf2a4f55412a630933f0fa49aff Mon Sep 17 00:00:00 2001 From: Myles MacDonald <65673247+mayomatsuda@users.noreply.github.com> Date: Sun, 13 Apr 2025 14:14:14 -0400 Subject: [PATCH 24/30] Dev markdown link check (#11) * dev-markdown-link-check: add github workflow to check markdown links. Resolves #464 * fix markdown links * fix broken links * fix readmes * fix links * fix broken links and configure github action * add on merge for modified files * fix config * add on merge fix * finish link fixes * finish link fixes * rename workflow * test * undo test --------- Co-authored-by: Myles MacDonald --- .../TWA-VP/public/optional-pages/landing-page.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/landing-page.md b/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/landing-page.md index 9b5e4d3ef90..c84a8f0c9ea 100644 --- a/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/landing-page.md +++ b/Deploy/stacks/AI4PublicHealth/TWA-VP/public/optional-pages/landing-page.md @@ -12,5 +12,3 @@ We address this gap using existing timestamped measures of physical activity, lo Results will be relevant to local authorities, including more detailed understanding of how people use space and the duration and timing of health-related exposures. Long term, this research could facilitate real-time nudges of health behaviours using smartphones. This visualisation shows an example of our work in progress. - -test \ No newline at end of file From d963046794e2fdd259a643081c67e640b32f67f8 Mon Sep 17 00:00:00 2001 From: Myles MacDonald Date: Sun, 13 Apr 2025 16:35:08 -0400 Subject: [PATCH 25/30] fix links --- .../MetOfficeSolarSensorAgent/README.md | 6 +++--- .../PostcodeSolarEnergyAgent/README.md | 6 +++--- .../RenewableEnergyAgents/PostcodeWindEnergyAgent/README.md | 6 +++--- .../UrbanObservatoryWindSensorAgent/README.md | 6 +++--- .../stack-data-uploader-inputs/data/dgm1/contours/README.md | 2 +- .../web/website/site/vendor/kodus/psr7-server/README.md | 4 ++-- 6 files changed, 15 insertions(+), 15 deletions(-) diff --git a/Agents/RenewableEnergyAgents/MetOfficeSolarSensorAgent/README.md b/Agents/RenewableEnergyAgents/MetOfficeSolarSensorAgent/README.md index 520952af198..ec5b0ccd27f 100644 --- a/Agents/RenewableEnergyAgents/MetOfficeSolarSensorAgent/README.md +++ b/Agents/RenewableEnergyAgents/MetOfficeSolarSensorAgent/README.md @@ -1,8 +1,8 @@ # An agent to represent and query UK solar data reported by the Met Office. ### Authors -* [Toby Latcham](tjl47@cam.ac.uk) -* [Sophie Hall](sh2000@cam.ac.uk) -* [Feroz Farazi](msff2@cam.ac.uk) +* [Toby Latcham](mailto:tjl47@cam.ac.uk) +* [Sophie Hall](mailto:sh2000@cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) The agent processes UK solar radiation data downloaded as a CSV file from the Met Office Integrated Data Archive System (MIDAS) to represent it in the World Avatar Knowledge Graph (KG) to make it accessible and queryable. The data is reported in the context of sensors installed in different locations all over the UK. diff --git a/Agents/RenewableEnergyAgents/PostcodeSolarEnergyAgent/README.md b/Agents/RenewableEnergyAgents/PostcodeSolarEnergyAgent/README.md index 541d0280dca..091c838c14b 100644 --- a/Agents/RenewableEnergyAgents/PostcodeSolarEnergyAgent/README.md +++ b/Agents/RenewableEnergyAgents/PostcodeSolarEnergyAgent/README.md @@ -1,8 +1,8 @@ # An agent to represent and query solar data reported by the Newcastle Urban Observatory. ### Authors -* [Toby Latcham](tjl47@cam.ac.uk) -* [Sophie Hall](sh2000@cam.ac.uk) -* [Feroz Farazi](msff2@cam.ac.uk) +* [Toby Latcham](mailto:tjl47@cam.ac.uk) +* [Sophie Hall](mailto:sh2000@cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) The agent processes solar energy data consumed across the UK to represent it in the World Avatar Knowledge Graph (KG) to make it accessible and queryable. The data includes annual electricity consumption statistics and daily electricity consumption patterns for each month at a postcode. diff --git a/Agents/RenewableEnergyAgents/PostcodeWindEnergyAgent/README.md b/Agents/RenewableEnergyAgents/PostcodeWindEnergyAgent/README.md index 54aa050c984..171c9b54cdc 100644 --- a/Agents/RenewableEnergyAgents/PostcodeWindEnergyAgent/README.md +++ b/Agents/RenewableEnergyAgents/PostcodeWindEnergyAgent/README.md @@ -1,8 +1,8 @@ # An agent to represent and query solar data reported by the Newcastle Urban Observatory. ### Authors -* [Toby Latcham](tjl47@cam.ac.uk) -* [Sophie Hall](sh2000@cam.ac.uk) -* [Feroz Farazi](msff2@cam.ac.uk) +* [Toby Latcham](mailto:tjl47@cam.ac.uk) +* [Sophie Hall](mailto:sh2000@cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) The agent processes wind energy data consumed across the UK to represent it in the World Avatar Knowledge Graph (KG) to make it accessible and queryable. The data includes annual electricity consumption statistics and daily electricity consumption patterns for each month at a postcode. diff --git a/Agents/RenewableEnergyAgents/UrbanObservatoryWindSensorAgent/README.md b/Agents/RenewableEnergyAgents/UrbanObservatoryWindSensorAgent/README.md index 6c0679e5935..fe269609711 100644 --- a/Agents/RenewableEnergyAgents/UrbanObservatoryWindSensorAgent/README.md +++ b/Agents/RenewableEnergyAgents/UrbanObservatoryWindSensorAgent/README.md @@ -1,8 +1,8 @@ # An agent to represent and query solar data reported by the Newcastle Urban Observatory. ### Authors -* [Toby Latcham](tjl47@cam.ac.uk) -* [Sophie Hall](sh2000@cam.ac.uk) -* [Feroz Farazi](msff2@cam.ac.uk) +* [Toby Latcham](mailto:tjl47@cam.ac.uk) +* [Sophie Hall](mailto:sh2000@cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) The agent processes wind data including wind direction, wind speed and wind gust downloaded as a CSV file from the Newcastle Urban Observatory to represent it in the World Avatar Knowledge Graph (KG) to make it accessible and queryable. The data is reported in the context of sensors positioned around Newcastle. diff --git a/Deploy/stacks/psdt/stack-data-uploader-inputs/data/dgm1/contours/README.md b/Deploy/stacks/psdt/stack-data-uploader-inputs/data/dgm1/contours/README.md index 0310497d7f5..4c234885a36 100644 --- a/Deploy/stacks/psdt/stack-data-uploader-inputs/data/dgm1/contours/README.md +++ b/Deploy/stacks/psdt/stack-data-uploader-inputs/data/dgm1/contours/README.md @@ -2,4 +2,4 @@ Open the files from the `elevation` folder in QGIS. Then, [extract vector contou Optionally, [smooth](https://gis.stackexchange.com/questions/346049/making-elevation-contours-of-raster-smoother-using-qgis) the contours as follows. In the Processing Toolbox, type `smooth` into search bar, double click `SAGA Line Smoothing` as method, select Improved SIA model, 20 iterations, sigma 3.0. Save the layer as shape file, and copy the file into the present folder. -NB To [label](https://opensourceoptions.com/blog/how-to-create-contour-lines-and-labels-with-qgis/) the contours in QGIS, select the `Labels` tab from the Layer Styling panel and choose `Single Labels` from the drop-down. The `Value` field of the labels tab specifies which column to use for labeling. Select the `ELEV` column for the Value. +NB To [label](https://opensourceoptions.com/how-to-create-contour-lines-and-labels-with-qgis/) the contours in QGIS, select the `Labels` tab from the Layer Styling panel and choose `Single Labels` from the drop-down. The `Value` field of the labels tab specifies which column to use for labeling. Select the `ELEV` column for the Value. diff --git a/Deploy/stacks/web/website/site/vendor/kodus/psr7-server/README.md b/Deploy/stacks/web/website/site/vendor/kodus/psr7-server/README.md index e8bafaaa799..7d6eb16e5c9 100644 --- a/Deploy/stacks/web/website/site/vendor/kodus/psr7-server/README.md +++ b/Deploy/stacks/web/website/site/vendor/kodus/psr7-server/README.md @@ -1,12 +1,12 @@ # Helper class to create PSR-7 server request - + [![Latest Version](https://img.shields.io/github/release/Nyholm/psr7-server.svg?style=flat-square)](https://github.com/Nyholm/psr7-server/releases) [![Code Coverage](https://img.shields.io/scrutinizer/coverage/g/Nyholm/psr7-server.svg?style=flat-square)](https://scrutinizer-ci.com/g/Nyholm/psr7-server) [![Quality Score](https://img.shields.io/scrutinizer/g/Nyholm/psr7-server.svg?style=flat-square)](https://scrutinizer-ci.com/g/Nyholm/psr7-server) [![Total Downloads](https://poser.pugx.org/nyholm/psr7-server/downloads)](https://packagist.org/packages/nyholm/psr7-server) [![Monthly Downloads](https://poser.pugx.org/nyholm/psr7-server/d/monthly.png)](https://packagist.org/packages/nyholm/psr7-server) [![Software License](https://img.shields.io/badge/license-MIT-brightgreen.svg?style=flat-square)](LICENSE) - + A helper class that can create ANY PSR-7 server request. ## Installation From 28a7fcbf8de43b850cd5b0b476d709e08b8dc576 Mon Sep 17 00:00:00 2001 From: Myles MacDonald <65673247+mayomatsuda@users.noreply.github.com> Date: Sun, 13 Apr 2025 16:35:53 -0400 Subject: [PATCH 26/30] Dev markdown link check (#12) * dev-markdown-link-check: add github workflow to check markdown links. Resolves #464 * fix markdown links * fix broken links * fix readmes * fix links * fix broken links and configure github action * add on merge for modified files * fix config * add on merge fix * finish link fixes * finish link fixes * rename workflow * test * undo test * fix links --------- Co-authored-by: Myles MacDonald --- .../MetOfficeSolarSensorAgent/README.md | 6 +++--- .../PostcodeSolarEnergyAgent/README.md | 6 +++--- .../RenewableEnergyAgents/PostcodeWindEnergyAgent/README.md | 6 +++--- .../UrbanObservatoryWindSensorAgent/README.md | 6 +++--- .../stack-data-uploader-inputs/data/dgm1/contours/README.md | 2 +- .../web/website/site/vendor/kodus/psr7-server/README.md | 4 ++-- 6 files changed, 15 insertions(+), 15 deletions(-) diff --git a/Agents/RenewableEnergyAgents/MetOfficeSolarSensorAgent/README.md b/Agents/RenewableEnergyAgents/MetOfficeSolarSensorAgent/README.md index 520952af198..ec5b0ccd27f 100644 --- a/Agents/RenewableEnergyAgents/MetOfficeSolarSensorAgent/README.md +++ b/Agents/RenewableEnergyAgents/MetOfficeSolarSensorAgent/README.md @@ -1,8 +1,8 @@ # An agent to represent and query UK solar data reported by the Met Office. ### Authors -* [Toby Latcham](tjl47@cam.ac.uk) -* [Sophie Hall](sh2000@cam.ac.uk) -* [Feroz Farazi](msff2@cam.ac.uk) +* [Toby Latcham](mailto:tjl47@cam.ac.uk) +* [Sophie Hall](mailto:sh2000@cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) The agent processes UK solar radiation data downloaded as a CSV file from the Met Office Integrated Data Archive System (MIDAS) to represent it in the World Avatar Knowledge Graph (KG) to make it accessible and queryable. The data is reported in the context of sensors installed in different locations all over the UK. diff --git a/Agents/RenewableEnergyAgents/PostcodeSolarEnergyAgent/README.md b/Agents/RenewableEnergyAgents/PostcodeSolarEnergyAgent/README.md index 541d0280dca..091c838c14b 100644 --- a/Agents/RenewableEnergyAgents/PostcodeSolarEnergyAgent/README.md +++ b/Agents/RenewableEnergyAgents/PostcodeSolarEnergyAgent/README.md @@ -1,8 +1,8 @@ # An agent to represent and query solar data reported by the Newcastle Urban Observatory. ### Authors -* [Toby Latcham](tjl47@cam.ac.uk) -* [Sophie Hall](sh2000@cam.ac.uk) -* [Feroz Farazi](msff2@cam.ac.uk) +* [Toby Latcham](mailto:tjl47@cam.ac.uk) +* [Sophie Hall](mailto:sh2000@cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) The agent processes solar energy data consumed across the UK to represent it in the World Avatar Knowledge Graph (KG) to make it accessible and queryable. The data includes annual electricity consumption statistics and daily electricity consumption patterns for each month at a postcode. diff --git a/Agents/RenewableEnergyAgents/PostcodeWindEnergyAgent/README.md b/Agents/RenewableEnergyAgents/PostcodeWindEnergyAgent/README.md index 54aa050c984..171c9b54cdc 100644 --- a/Agents/RenewableEnergyAgents/PostcodeWindEnergyAgent/README.md +++ b/Agents/RenewableEnergyAgents/PostcodeWindEnergyAgent/README.md @@ -1,8 +1,8 @@ # An agent to represent and query solar data reported by the Newcastle Urban Observatory. ### Authors -* [Toby Latcham](tjl47@cam.ac.uk) -* [Sophie Hall](sh2000@cam.ac.uk) -* [Feroz Farazi](msff2@cam.ac.uk) +* [Toby Latcham](mailto:tjl47@cam.ac.uk) +* [Sophie Hall](mailto:sh2000@cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) The agent processes wind energy data consumed across the UK to represent it in the World Avatar Knowledge Graph (KG) to make it accessible and queryable. The data includes annual electricity consumption statistics and daily electricity consumption patterns for each month at a postcode. diff --git a/Agents/RenewableEnergyAgents/UrbanObservatoryWindSensorAgent/README.md b/Agents/RenewableEnergyAgents/UrbanObservatoryWindSensorAgent/README.md index 6c0679e5935..fe269609711 100644 --- a/Agents/RenewableEnergyAgents/UrbanObservatoryWindSensorAgent/README.md +++ b/Agents/RenewableEnergyAgents/UrbanObservatoryWindSensorAgent/README.md @@ -1,8 +1,8 @@ # An agent to represent and query solar data reported by the Newcastle Urban Observatory. ### Authors -* [Toby Latcham](tjl47@cam.ac.uk) -* [Sophie Hall](sh2000@cam.ac.uk) -* [Feroz Farazi](msff2@cam.ac.uk) +* [Toby Latcham](mailto:tjl47@cam.ac.uk) +* [Sophie Hall](mailto:sh2000@cam.ac.uk) +* [Feroz Farazi](mailto:msff2@cam.ac.uk) The agent processes wind data including wind direction, wind speed and wind gust downloaded as a CSV file from the Newcastle Urban Observatory to represent it in the World Avatar Knowledge Graph (KG) to make it accessible and queryable. The data is reported in the context of sensors positioned around Newcastle. diff --git a/Deploy/stacks/psdt/stack-data-uploader-inputs/data/dgm1/contours/README.md b/Deploy/stacks/psdt/stack-data-uploader-inputs/data/dgm1/contours/README.md index 0310497d7f5..4c234885a36 100644 --- a/Deploy/stacks/psdt/stack-data-uploader-inputs/data/dgm1/contours/README.md +++ b/Deploy/stacks/psdt/stack-data-uploader-inputs/data/dgm1/contours/README.md @@ -2,4 +2,4 @@ Open the files from the `elevation` folder in QGIS. Then, [extract vector contou Optionally, [smooth](https://gis.stackexchange.com/questions/346049/making-elevation-contours-of-raster-smoother-using-qgis) the contours as follows. In the Processing Toolbox, type `smooth` into search bar, double click `SAGA Line Smoothing` as method, select Improved SIA model, 20 iterations, sigma 3.0. Save the layer as shape file, and copy the file into the present folder. -NB To [label](https://opensourceoptions.com/blog/how-to-create-contour-lines-and-labels-with-qgis/) the contours in QGIS, select the `Labels` tab from the Layer Styling panel and choose `Single Labels` from the drop-down. The `Value` field of the labels tab specifies which column to use for labeling. Select the `ELEV` column for the Value. +NB To [label](https://opensourceoptions.com/how-to-create-contour-lines-and-labels-with-qgis/) the contours in QGIS, select the `Labels` tab from the Layer Styling panel and choose `Single Labels` from the drop-down. The `Value` field of the labels tab specifies which column to use for labeling. Select the `ELEV` column for the Value. diff --git a/Deploy/stacks/web/website/site/vendor/kodus/psr7-server/README.md b/Deploy/stacks/web/website/site/vendor/kodus/psr7-server/README.md index e8bafaaa799..7d6eb16e5c9 100644 --- a/Deploy/stacks/web/website/site/vendor/kodus/psr7-server/README.md +++ b/Deploy/stacks/web/website/site/vendor/kodus/psr7-server/README.md @@ -1,12 +1,12 @@ # Helper class to create PSR-7 server request - + [![Latest Version](https://img.shields.io/github/release/Nyholm/psr7-server.svg?style=flat-square)](https://github.com/Nyholm/psr7-server/releases) [![Code Coverage](https://img.shields.io/scrutinizer/coverage/g/Nyholm/psr7-server.svg?style=flat-square)](https://scrutinizer-ci.com/g/Nyholm/psr7-server) [![Quality Score](https://img.shields.io/scrutinizer/g/Nyholm/psr7-server.svg?style=flat-square)](https://scrutinizer-ci.com/g/Nyholm/psr7-server) [![Total Downloads](https://poser.pugx.org/nyholm/psr7-server/downloads)](https://packagist.org/packages/nyholm/psr7-server) [![Monthly Downloads](https://poser.pugx.org/nyholm/psr7-server/d/monthly.png)](https://packagist.org/packages/nyholm/psr7-server) [![Software License](https://img.shields.io/badge/license-MIT-brightgreen.svg?style=flat-square)](LICENSE) - + A helper class that can create ANY PSR-7 server request. ## Installation From fd50a158d11c5110e6b901398700aa26188e2188 Mon Sep 17 00:00:00 2001 From: Myles MacDonald Date: Sun, 13 Apr 2025 16:55:49 -0400 Subject: [PATCH 27/30] fix config --- .github/workflows/twa-md-push.json | 7 ++++++- Agents/TrafficIncidentAgent/README.md | 2 +- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/twa-md-push.json b/.github/workflows/twa-md-push.json index 5eaf84cf24c..024a278b465 100644 --- a/.github/workflows/twa-md-push.json +++ b/.github/workflows/twa-md-push.json @@ -49,12 +49,17 @@ "pattern": "/explore(/[^ ]*)?" }, { - "pattern": "./images/defaults(/[^ ]*)?" + "pattern": "/images/defaults(/[^ ]*)?" + }, + { + "pattern": "mailto:(/[^ ]*)?" } ], + "retryCount": 3, "aliveStatusCodes": [ 200, 403, + 503, 0 ] } \ No newline at end of file diff --git a/Agents/TrafficIncidentAgent/README.md b/Agents/TrafficIncidentAgent/README.md index d3787d53e73..b4476ec85a2 100644 --- a/Agents/TrafficIncidentAgent/README.md +++ b/Agents/TrafficIncidentAgent/README.md @@ -32,7 +32,7 @@ The TrafficIncidentAgent should be pulled automatically with the stack-manager, ### 5.2 Starting with the stack-manager -The agent has been implemented to work in the stack, which requires the TrafficIncidentAgent Docker container to be deployed in the stack. To do so, place [TrafficIncidentAgent.json](stack-manager-config/inputs/config/services/TrafficIncidentAgent.json) in the [stack-manager config directory]. +The agent has been implemented to work in the stack, which requires the TrafficIncidentAgent Docker container to be deployed in the stack. To do so, place [TrafficIncidentAgent.json](./stack-manager-config/inputs/config/services/trafficincidentagent.json) in the [stack-manager config directory]. Then, run `./stack.sh start ` in the [stack-manager] main folder. This will spin up the agent in the stack. From 45e27a0e8bfa67fd5862f9bfb32a83e8d93cfff8 Mon Sep 17 00:00:00 2001 From: Myles MacDonald <65673247+mayomatsuda@users.noreply.github.com> Date: Sun, 13 Apr 2025 16:56:30 -0400 Subject: [PATCH 28/30] Dev markdown link check (#13) * dev-markdown-link-check: add github workflow to check markdown links. Resolves #464 * fix markdown links * fix broken links * fix readmes * fix links * fix broken links and configure github action * add on merge for modified files * fix config * add on merge fix * finish link fixes * finish link fixes * rename workflow * test * undo test * fix links * fix config --------- Co-authored-by: Myles MacDonald --- .github/workflows/twa-md-push.json | 7 ++++++- Agents/TrafficIncidentAgent/README.md | 2 +- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/twa-md-push.json b/.github/workflows/twa-md-push.json index 5eaf84cf24c..024a278b465 100644 --- a/.github/workflows/twa-md-push.json +++ b/.github/workflows/twa-md-push.json @@ -49,12 +49,17 @@ "pattern": "/explore(/[^ ]*)?" }, { - "pattern": "./images/defaults(/[^ ]*)?" + "pattern": "/images/defaults(/[^ ]*)?" + }, + { + "pattern": "mailto:(/[^ ]*)?" } ], + "retryCount": 3, "aliveStatusCodes": [ 200, 403, + 503, 0 ] } \ No newline at end of file diff --git a/Agents/TrafficIncidentAgent/README.md b/Agents/TrafficIncidentAgent/README.md index d3787d53e73..b4476ec85a2 100644 --- a/Agents/TrafficIncidentAgent/README.md +++ b/Agents/TrafficIncidentAgent/README.md @@ -32,7 +32,7 @@ The TrafficIncidentAgent should be pulled automatically with the stack-manager, ### 5.2 Starting with the stack-manager -The agent has been implemented to work in the stack, which requires the TrafficIncidentAgent Docker container to be deployed in the stack. To do so, place [TrafficIncidentAgent.json](stack-manager-config/inputs/config/services/TrafficIncidentAgent.json) in the [stack-manager config directory]. +The agent has been implemented to work in the stack, which requires the TrafficIncidentAgent Docker container to be deployed in the stack. To do so, place [TrafficIncidentAgent.json](./stack-manager-config/inputs/config/services/trafficincidentagent.json) in the [stack-manager config directory]. Then, run `./stack.sh start ` in the [stack-manager] main folder. This will spin up the agent in the stack. From 88f0f70f23d33b3f516dd079aa6e8c16dd08df44 Mon Sep 17 00:00:00 2001 From: Myles Date: Sat, 13 Sep 2025 14:01:21 -0400 Subject: [PATCH 29/30] fix links --- Agents/HMLandRegistryAgent/README.md | 1 + EntityRDFizer/README.md | 2 +- .../inputs/uploader/data/street_light/bradford/README.md | 2 +- web/twa-vis-framework/library/README.md | 2 +- 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/Agents/HMLandRegistryAgent/README.md b/Agents/HMLandRegistryAgent/README.md index 02a4d6efa3e..111681d3ff2 100644 --- a/Agents/HMLandRegistryAgent/README.md +++ b/Agents/HMLandRegistryAgent/README.md @@ -168,6 +168,7 @@ Markus Hofmeister (mh807@cam.ac.uk), March 2023 [HM Land Registry Open Data]: https://landregistry.data.gov.uk/ [Price Paid Linked Data]: https://landregistry.data.gov.uk/app/root/doc/ppd [UK House Price Index Linked Data]: https://landregistry.data.gov.uk/app/ukhpi/doc + [HM Land Registry SPARQL endpoint]: http://landregistry.data.gov.uk/landregistry/query diff --git a/EntityRDFizer/README.md b/EntityRDFizer/README.md index 8ae3abbf2b9..a49664de3e4 100644 --- a/EntityRDFizer/README.md +++ b/EntityRDFizer/README.md @@ -3,7 +3,7 @@ The `entityrdfizer` project is designed to convert entities of any domain and their data and metadata into RDF. It requires the entities and their data to be provided as inputs in an ABox CSV template, that is filled in with data. A group of ABox CSV template files are provided under the following URL: -https://github.com/TheWorldAvatar/ontology/blob/main/KBTemplates/ABox +https://github.com/TheWorldAvatar/ontology/blob/main/templates/ABox # Installation # These instructions will get you a copy of the project up and running on your local machine for development and testing purposes. diff --git a/web/augmented-uk/inputs/uploader/data/street_light/bradford/README.md b/web/augmented-uk/inputs/uploader/data/street_light/bradford/README.md index 23885ad38b7..16b30196193 100644 --- a/web/augmented-uk/inputs/uploader/data/street_light/bradford/README.md +++ b/web/augmented-uk/inputs/uploader/data/street_light/bradford/README.md @@ -1 +1 @@ -Add Bradford street light data in CSV form here from https://www.data.gov.uk/dataset/650db146-1e18-41fa-912a-33edacd8ac85/street-lighting. \ No newline at end of file +Add Bradford street light data in CSV form here from https://www.data.gov.uk/dataset/baf1c264-7160-444b-a4e3-0fe25c5b1e11/street-lighting. \ No newline at end of file diff --git a/web/twa-vis-framework/library/README.md b/web/twa-vis-framework/library/README.md index c22fa14c712..66e6b8f2aea 100644 --- a/web/twa-vis-framework/library/README.md +++ b/web/twa-vis-framework/library/README.md @@ -97,7 +97,7 @@ An overview of bug reports, feature requests, and open PRs can be see using the ### Issues -Bugs should be reported as GitHub issues using the `TWA-VF:` prefix along with a short name for the issue. A detailed description of the issue along with reproduction steps, and if possible, [an image of the issue](https://gist.github.com/namirjahmed/2168f7659c08b6a033e7f6daf8db69a6). +Bugs should be reported as GitHub issues using the `TWA-VF:` prefix along with a short name for the issue. A detailed description of the issue along with reproduction steps, and if possible, an image of the issue. Issue reporting a bug should also use the provided `bug` tag and link to the TWA Visualisation Framework project. From 7098006402d14d796203cf2fa66db7617fdff43c Mon Sep 17 00:00:00 2001 From: Myles Date: Sat, 13 Sep 2025 14:35:34 -0400 Subject: [PATCH 30/30] accept 202 --- .github/workflows/twa-md-push.json | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/twa-md-push.json b/.github/workflows/twa-md-push.json index 024a278b465..9b7fdf3eaaa 100644 --- a/.github/workflows/twa-md-push.json +++ b/.github/workflows/twa-md-push.json @@ -58,6 +58,7 @@ "retryCount": 3, "aliveStatusCodes": [ 200, + 202, 403, 503, 0