diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/404.html b/404.html new file mode 100644 index 00000000..17dbbada --- /dev/null +++ b/404.html @@ -0,0 +1,728 @@ + + + + + + + + + + + + + + + + + + Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ +

404 - Not found

+ +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/assets/images/favicon.png b/assets/images/favicon.png new file mode 100644 index 00000000..1cf13b9f Binary files /dev/null and b/assets/images/favicon.png differ diff --git a/assets/javascripts/bundle.51d95adb.min.js b/assets/javascripts/bundle.51d95adb.min.js new file mode 100644 index 00000000..b20ec683 --- /dev/null +++ b/assets/javascripts/bundle.51d95adb.min.js @@ -0,0 +1,29 @@ +"use strict";(()=>{var Hi=Object.create;var xr=Object.defineProperty;var Pi=Object.getOwnPropertyDescriptor;var $i=Object.getOwnPropertyNames,kt=Object.getOwnPropertySymbols,Ii=Object.getPrototypeOf,Er=Object.prototype.hasOwnProperty,an=Object.prototype.propertyIsEnumerable;var on=(e,t,r)=>t in e?xr(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,P=(e,t)=>{for(var r in t||(t={}))Er.call(t,r)&&on(e,r,t[r]);if(kt)for(var r of kt(t))an.call(t,r)&&on(e,r,t[r]);return e};var sn=(e,t)=>{var r={};for(var n in e)Er.call(e,n)&&t.indexOf(n)<0&&(r[n]=e[n]);if(e!=null&&kt)for(var n of kt(e))t.indexOf(n)<0&&an.call(e,n)&&(r[n]=e[n]);return r};var Ht=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports);var Fi=(e,t,r,n)=>{if(t&&typeof t=="object"||typeof t=="function")for(let o of $i(t))!Er.call(e,o)&&o!==r&&xr(e,o,{get:()=>t[o],enumerable:!(n=Pi(t,o))||n.enumerable});return e};var yt=(e,t,r)=>(r=e!=null?Hi(Ii(e)):{},Fi(t||!e||!e.__esModule?xr(r,"default",{value:e,enumerable:!0}):r,e));var fn=Ht((wr,cn)=>{(function(e,t){typeof wr=="object"&&typeof cn!="undefined"?t():typeof define=="function"&&define.amd?define(t):t()})(wr,function(){"use strict";function e(r){var n=!0,o=!1,i=null,a={text:!0,search:!0,url:!0,tel:!0,email:!0,password:!0,number:!0,date:!0,month:!0,week:!0,time:!0,datetime:!0,"datetime-local":!0};function s(T){return!!(T&&T!==document&&T.nodeName!=="HTML"&&T.nodeName!=="BODY"&&"classList"in T&&"contains"in T.classList)}function f(T){var Ke=T.type,We=T.tagName;return!!(We==="INPUT"&&a[Ke]&&!T.readOnly||We==="TEXTAREA"&&!T.readOnly||T.isContentEditable)}function c(T){T.classList.contains("focus-visible")||(T.classList.add("focus-visible"),T.setAttribute("data-focus-visible-added",""))}function u(T){T.hasAttribute("data-focus-visible-added")&&(T.classList.remove("focus-visible"),T.removeAttribute("data-focus-visible-added"))}function p(T){T.metaKey||T.altKey||T.ctrlKey||(s(r.activeElement)&&c(r.activeElement),n=!0)}function m(T){n=!1}function d(T){s(T.target)&&(n||f(T.target))&&c(T.target)}function h(T){s(T.target)&&(T.target.classList.contains("focus-visible")||T.target.hasAttribute("data-focus-visible-added"))&&(o=!0,window.clearTimeout(i),i=window.setTimeout(function(){o=!1},100),u(T.target))}function v(T){document.visibilityState==="hidden"&&(o&&(n=!0),B())}function B(){document.addEventListener("mousemove",z),document.addEventListener("mousedown",z),document.addEventListener("mouseup",z),document.addEventListener("pointermove",z),document.addEventListener("pointerdown",z),document.addEventListener("pointerup",z),document.addEventListener("touchmove",z),document.addEventListener("touchstart",z),document.addEventListener("touchend",z)}function re(){document.removeEventListener("mousemove",z),document.removeEventListener("mousedown",z),document.removeEventListener("mouseup",z),document.removeEventListener("pointermove",z),document.removeEventListener("pointerdown",z),document.removeEventListener("pointerup",z),document.removeEventListener("touchmove",z),document.removeEventListener("touchstart",z),document.removeEventListener("touchend",z)}function z(T){T.target.nodeName&&T.target.nodeName.toLowerCase()==="html"||(n=!1,re())}document.addEventListener("keydown",p,!0),document.addEventListener("mousedown",m,!0),document.addEventListener("pointerdown",m,!0),document.addEventListener("touchstart",m,!0),document.addEventListener("visibilitychange",v,!0),B(),r.addEventListener("focus",d,!0),r.addEventListener("blur",h,!0),r.nodeType===Node.DOCUMENT_FRAGMENT_NODE&&r.host?r.host.setAttribute("data-js-focus-visible",""):r.nodeType===Node.DOCUMENT_NODE&&(document.documentElement.classList.add("js-focus-visible"),document.documentElement.setAttribute("data-js-focus-visible",""))}if(typeof window!="undefined"&&typeof document!="undefined"){window.applyFocusVisiblePolyfill=e;var t;try{t=new CustomEvent("focus-visible-polyfill-ready")}catch(r){t=document.createEvent("CustomEvent"),t.initCustomEvent("focus-visible-polyfill-ready",!1,!1,{})}window.dispatchEvent(t)}typeof document!="undefined"&&e(document)})});var un=Ht(Sr=>{(function(e){var t=function(){try{return!!Symbol.iterator}catch(c){return!1}},r=t(),n=function(c){var u={next:function(){var p=c.shift();return{done:p===void 0,value:p}}};return r&&(u[Symbol.iterator]=function(){return u}),u},o=function(c){return encodeURIComponent(c).replace(/%20/g,"+")},i=function(c){return decodeURIComponent(String(c).replace(/\+/g," "))},a=function(){var c=function(p){Object.defineProperty(this,"_entries",{writable:!0,value:{}});var m=typeof p;if(m!=="undefined")if(m==="string")p!==""&&this._fromString(p);else if(p instanceof c){var d=this;p.forEach(function(re,z){d.append(z,re)})}else if(p!==null&&m==="object")if(Object.prototype.toString.call(p)==="[object Array]")for(var h=0;hd[0]?1:0}),c._entries&&(c._entries={});for(var p=0;p1?i(d[1]):"")}})})(typeof global!="undefined"?global:typeof window!="undefined"?window:typeof self!="undefined"?self:Sr);(function(e){var t=function(){try{var o=new e.URL("b","http://a");return o.pathname="c d",o.href==="http://a/c%20d"&&o.searchParams}catch(i){return!1}},r=function(){var o=e.URL,i=function(f,c){typeof f!="string"&&(f=String(f)),c&&typeof c!="string"&&(c=String(c));var u=document,p;if(c&&(e.location===void 0||c!==e.location.href)){c=c.toLowerCase(),u=document.implementation.createHTMLDocument(""),p=u.createElement("base"),p.href=c,u.head.appendChild(p);try{if(p.href.indexOf(c)!==0)throw new Error(p.href)}catch(T){throw new Error("URL unable to set base "+c+" due to "+T)}}var m=u.createElement("a");m.href=f,p&&(u.body.appendChild(m),m.href=m.href);var d=u.createElement("input");if(d.type="url",d.value=f,m.protocol===":"||!/:/.test(m.href)||!d.checkValidity()&&!c)throw new TypeError("Invalid URL");Object.defineProperty(this,"_anchorElement",{value:m});var h=new e.URLSearchParams(this.search),v=!0,B=!0,re=this;["append","delete","set"].forEach(function(T){var Ke=h[T];h[T]=function(){Ke.apply(h,arguments),v&&(B=!1,re.search=h.toString(),B=!0)}}),Object.defineProperty(this,"searchParams",{value:h,enumerable:!0});var z=void 0;Object.defineProperty(this,"_updateSearchParams",{enumerable:!1,configurable:!1,writable:!1,value:function(){this.search!==z&&(z=this.search,B&&(v=!1,this.searchParams._fromString(this.search),v=!0))}})},a=i.prototype,s=function(f){Object.defineProperty(a,f,{get:function(){return this._anchorElement[f]},set:function(c){this._anchorElement[f]=c},enumerable:!0})};["hash","host","hostname","port","protocol"].forEach(function(f){s(f)}),Object.defineProperty(a,"search",{get:function(){return this._anchorElement.search},set:function(f){this._anchorElement.search=f,this._updateSearchParams()},enumerable:!0}),Object.defineProperties(a,{toString:{get:function(){var f=this;return function(){return f.href}}},href:{get:function(){return this._anchorElement.href.replace(/\?$/,"")},set:function(f){this._anchorElement.href=f,this._updateSearchParams()},enumerable:!0},pathname:{get:function(){return this._anchorElement.pathname.replace(/(^\/?)/,"/")},set:function(f){this._anchorElement.pathname=f},enumerable:!0},origin:{get:function(){var f={"http:":80,"https:":443,"ftp:":21}[this._anchorElement.protocol],c=this._anchorElement.port!=f&&this._anchorElement.port!=="";return this._anchorElement.protocol+"//"+this._anchorElement.hostname+(c?":"+this._anchorElement.port:"")},enumerable:!0},password:{get:function(){return""},set:function(f){},enumerable:!0},username:{get:function(){return""},set:function(f){},enumerable:!0}}),i.createObjectURL=function(f){return o.createObjectURL.apply(o,arguments)},i.revokeObjectURL=function(f){return o.revokeObjectURL.apply(o,arguments)},e.URL=i};if(t()||r(),e.location!==void 0&&!("origin"in e.location)){var n=function(){return e.location.protocol+"//"+e.location.hostname+(e.location.port?":"+e.location.port:"")};try{Object.defineProperty(e.location,"origin",{get:n,enumerable:!0})}catch(o){setInterval(function(){e.location.origin=n()},100)}}})(typeof global!="undefined"?global:typeof window!="undefined"?window:typeof self!="undefined"?self:Sr)});var Qr=Ht((Lt,Kr)=>{/*! + * clipboard.js v2.0.11 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */(function(t,r){typeof Lt=="object"&&typeof Kr=="object"?Kr.exports=r():typeof define=="function"&&define.amd?define([],r):typeof Lt=="object"?Lt.ClipboardJS=r():t.ClipboardJS=r()})(Lt,function(){return function(){var e={686:function(n,o,i){"use strict";i.d(o,{default:function(){return ki}});var a=i(279),s=i.n(a),f=i(370),c=i.n(f),u=i(817),p=i.n(u);function m(j){try{return document.execCommand(j)}catch(O){return!1}}var d=function(O){var w=p()(O);return m("cut"),w},h=d;function v(j){var O=document.documentElement.getAttribute("dir")==="rtl",w=document.createElement("textarea");w.style.fontSize="12pt",w.style.border="0",w.style.padding="0",w.style.margin="0",w.style.position="absolute",w.style[O?"right":"left"]="-9999px";var k=window.pageYOffset||document.documentElement.scrollTop;return w.style.top="".concat(k,"px"),w.setAttribute("readonly",""),w.value=j,w}var B=function(O,w){var k=v(O);w.container.appendChild(k);var F=p()(k);return m("copy"),k.remove(),F},re=function(O){var w=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body},k="";return typeof O=="string"?k=B(O,w):O instanceof HTMLInputElement&&!["text","search","url","tel","password"].includes(O==null?void 0:O.type)?k=B(O.value,w):(k=p()(O),m("copy")),k},z=re;function T(j){return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?T=function(w){return typeof w}:T=function(w){return w&&typeof Symbol=="function"&&w.constructor===Symbol&&w!==Symbol.prototype?"symbol":typeof w},T(j)}var Ke=function(){var O=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},w=O.action,k=w===void 0?"copy":w,F=O.container,q=O.target,Le=O.text;if(k!=="copy"&&k!=="cut")throw new Error('Invalid "action" value, use either "copy" or "cut"');if(q!==void 0)if(q&&T(q)==="object"&&q.nodeType===1){if(k==="copy"&&q.hasAttribute("disabled"))throw new Error('Invalid "target" attribute. Please use "readonly" instead of "disabled" attribute');if(k==="cut"&&(q.hasAttribute("readonly")||q.hasAttribute("disabled")))throw new Error(`Invalid "target" attribute. You can't cut text from elements with "readonly" or "disabled" attributes`)}else throw new Error('Invalid "target" value, use a valid Element');if(Le)return z(Le,{container:F});if(q)return k==="cut"?h(q):z(q,{container:F})},We=Ke;function Ie(j){return typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?Ie=function(w){return typeof w}:Ie=function(w){return w&&typeof Symbol=="function"&&w.constructor===Symbol&&w!==Symbol.prototype?"symbol":typeof w},Ie(j)}function Ti(j,O){if(!(j instanceof O))throw new TypeError("Cannot call a class as a function")}function nn(j,O){for(var w=0;w0&&arguments[0]!==void 0?arguments[0]:{};this.action=typeof F.action=="function"?F.action:this.defaultAction,this.target=typeof F.target=="function"?F.target:this.defaultTarget,this.text=typeof F.text=="function"?F.text:this.defaultText,this.container=Ie(F.container)==="object"?F.container:document.body}},{key:"listenClick",value:function(F){var q=this;this.listener=c()(F,"click",function(Le){return q.onClick(Le)})}},{key:"onClick",value:function(F){var q=F.delegateTarget||F.currentTarget,Le=this.action(q)||"copy",Rt=We({action:Le,container:this.container,target:this.target(q),text:this.text(q)});this.emit(Rt?"success":"error",{action:Le,text:Rt,trigger:q,clearSelection:function(){q&&q.focus(),window.getSelection().removeAllRanges()}})}},{key:"defaultAction",value:function(F){return yr("action",F)}},{key:"defaultTarget",value:function(F){var q=yr("target",F);if(q)return document.querySelector(q)}},{key:"defaultText",value:function(F){return yr("text",F)}},{key:"destroy",value:function(){this.listener.destroy()}}],[{key:"copy",value:function(F){var q=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{container:document.body};return z(F,q)}},{key:"cut",value:function(F){return h(F)}},{key:"isSupported",value:function(){var F=arguments.length>0&&arguments[0]!==void 0?arguments[0]:["copy","cut"],q=typeof F=="string"?[F]:F,Le=!!document.queryCommandSupported;return q.forEach(function(Rt){Le=Le&&!!document.queryCommandSupported(Rt)}),Le}}]),w}(s()),ki=Ri},828:function(n){var o=9;if(typeof Element!="undefined"&&!Element.prototype.matches){var i=Element.prototype;i.matches=i.matchesSelector||i.mozMatchesSelector||i.msMatchesSelector||i.oMatchesSelector||i.webkitMatchesSelector}function a(s,f){for(;s&&s.nodeType!==o;){if(typeof s.matches=="function"&&s.matches(f))return s;s=s.parentNode}}n.exports=a},438:function(n,o,i){var a=i(828);function s(u,p,m,d,h){var v=c.apply(this,arguments);return u.addEventListener(m,v,h),{destroy:function(){u.removeEventListener(m,v,h)}}}function f(u,p,m,d,h){return typeof u.addEventListener=="function"?s.apply(null,arguments):typeof m=="function"?s.bind(null,document).apply(null,arguments):(typeof u=="string"&&(u=document.querySelectorAll(u)),Array.prototype.map.call(u,function(v){return s(v,p,m,d,h)}))}function c(u,p,m,d){return function(h){h.delegateTarget=a(h.target,p),h.delegateTarget&&d.call(u,h)}}n.exports=f},879:function(n,o){o.node=function(i){return i!==void 0&&i instanceof HTMLElement&&i.nodeType===1},o.nodeList=function(i){var a=Object.prototype.toString.call(i);return i!==void 0&&(a==="[object NodeList]"||a==="[object HTMLCollection]")&&"length"in i&&(i.length===0||o.node(i[0]))},o.string=function(i){return typeof i=="string"||i instanceof String},o.fn=function(i){var a=Object.prototype.toString.call(i);return a==="[object Function]"}},370:function(n,o,i){var a=i(879),s=i(438);function f(m,d,h){if(!m&&!d&&!h)throw new Error("Missing required arguments");if(!a.string(d))throw new TypeError("Second argument must be a String");if(!a.fn(h))throw new TypeError("Third argument must be a Function");if(a.node(m))return c(m,d,h);if(a.nodeList(m))return u(m,d,h);if(a.string(m))return p(m,d,h);throw new TypeError("First argument must be a String, HTMLElement, HTMLCollection, or NodeList")}function c(m,d,h){return m.addEventListener(d,h),{destroy:function(){m.removeEventListener(d,h)}}}function u(m,d,h){return Array.prototype.forEach.call(m,function(v){v.addEventListener(d,h)}),{destroy:function(){Array.prototype.forEach.call(m,function(v){v.removeEventListener(d,h)})}}}function p(m,d,h){return s(document.body,m,d,h)}n.exports=f},817:function(n){function o(i){var a;if(i.nodeName==="SELECT")i.focus(),a=i.value;else if(i.nodeName==="INPUT"||i.nodeName==="TEXTAREA"){var s=i.hasAttribute("readonly");s||i.setAttribute("readonly",""),i.select(),i.setSelectionRange(0,i.value.length),s||i.removeAttribute("readonly"),a=i.value}else{i.hasAttribute("contenteditable")&&i.focus();var f=window.getSelection(),c=document.createRange();c.selectNodeContents(i),f.removeAllRanges(),f.addRange(c),a=f.toString()}return a}n.exports=o},279:function(n){function o(){}o.prototype={on:function(i,a,s){var f=this.e||(this.e={});return(f[i]||(f[i]=[])).push({fn:a,ctx:s}),this},once:function(i,a,s){var f=this;function c(){f.off(i,c),a.apply(s,arguments)}return c._=a,this.on(i,c,s)},emit:function(i){var a=[].slice.call(arguments,1),s=((this.e||(this.e={}))[i]||[]).slice(),f=0,c=s.length;for(f;f{"use strict";/*! + * escape-html + * Copyright(c) 2012-2013 TJ Holowaychuk + * Copyright(c) 2015 Andreas Lubbe + * Copyright(c) 2015 Tiancheng "Timothy" Gu + * MIT Licensed + */var is=/["'&<>]/;Jo.exports=as;function as(e){var t=""+e,r=is.exec(t);if(!r)return t;var n,o="",i=0,a=0;for(i=r.index;i0&&i[i.length-1])&&(c[0]===6||c[0]===2)){r=0;continue}if(c[0]===3&&(!i||c[1]>i[0]&&c[1]=e.length&&(e=void 0),{value:e&&e[n++],done:!e}}};throw new TypeError(t?"Object is not iterable.":"Symbol.iterator is not defined.")}function W(e,t){var r=typeof Symbol=="function"&&e[Symbol.iterator];if(!r)return e;var n=r.call(e),o,i=[],a;try{for(;(t===void 0||t-- >0)&&!(o=n.next()).done;)i.push(o.value)}catch(s){a={error:s}}finally{try{o&&!o.done&&(r=n.return)&&r.call(n)}finally{if(a)throw a.error}}return i}function D(e,t,r){if(r||arguments.length===2)for(var n=0,o=t.length,i;n1||s(m,d)})})}function s(m,d){try{f(n[m](d))}catch(h){p(i[0][3],h)}}function f(m){m.value instanceof Xe?Promise.resolve(m.value.v).then(c,u):p(i[0][2],m)}function c(m){s("next",m)}function u(m){s("throw",m)}function p(m,d){m(d),i.shift(),i.length&&s(i[0][0],i[0][1])}}function mn(e){if(!Symbol.asyncIterator)throw new TypeError("Symbol.asyncIterator is not defined.");var t=e[Symbol.asyncIterator],r;return t?t.call(e):(e=typeof xe=="function"?xe(e):e[Symbol.iterator](),r={},n("next"),n("throw"),n("return"),r[Symbol.asyncIterator]=function(){return this},r);function n(i){r[i]=e[i]&&function(a){return new Promise(function(s,f){a=e[i](a),o(s,f,a.done,a.value)})}}function o(i,a,s,f){Promise.resolve(f).then(function(c){i({value:c,done:s})},a)}}function A(e){return typeof e=="function"}function at(e){var t=function(n){Error.call(n),n.stack=new Error().stack},r=e(t);return r.prototype=Object.create(Error.prototype),r.prototype.constructor=r,r}var $t=at(function(e){return function(r){e(this),this.message=r?r.length+` errors occurred during unsubscription: +`+r.map(function(n,o){return o+1+") "+n.toString()}).join(` + `):"",this.name="UnsubscriptionError",this.errors=r}});function De(e,t){if(e){var r=e.indexOf(t);0<=r&&e.splice(r,1)}}var Fe=function(){function e(t){this.initialTeardown=t,this.closed=!1,this._parentage=null,this._finalizers=null}return e.prototype.unsubscribe=function(){var t,r,n,o,i;if(!this.closed){this.closed=!0;var a=this._parentage;if(a)if(this._parentage=null,Array.isArray(a))try{for(var s=xe(a),f=s.next();!f.done;f=s.next()){var c=f.value;c.remove(this)}}catch(v){t={error:v}}finally{try{f&&!f.done&&(r=s.return)&&r.call(s)}finally{if(t)throw t.error}}else a.remove(this);var u=this.initialTeardown;if(A(u))try{u()}catch(v){i=v instanceof $t?v.errors:[v]}var p=this._finalizers;if(p){this._finalizers=null;try{for(var m=xe(p),d=m.next();!d.done;d=m.next()){var h=d.value;try{dn(h)}catch(v){i=i!=null?i:[],v instanceof $t?i=D(D([],W(i)),W(v.errors)):i.push(v)}}}catch(v){n={error:v}}finally{try{d&&!d.done&&(o=m.return)&&o.call(m)}finally{if(n)throw n.error}}}if(i)throw new $t(i)}},e.prototype.add=function(t){var r;if(t&&t!==this)if(this.closed)dn(t);else{if(t instanceof e){if(t.closed||t._hasParent(this))return;t._addParent(this)}(this._finalizers=(r=this._finalizers)!==null&&r!==void 0?r:[]).push(t)}},e.prototype._hasParent=function(t){var r=this._parentage;return r===t||Array.isArray(r)&&r.includes(t)},e.prototype._addParent=function(t){var r=this._parentage;this._parentage=Array.isArray(r)?(r.push(t),r):r?[r,t]:t},e.prototype._removeParent=function(t){var r=this._parentage;r===t?this._parentage=null:Array.isArray(r)&&De(r,t)},e.prototype.remove=function(t){var r=this._finalizers;r&&De(r,t),t instanceof e&&t._removeParent(this)},e.EMPTY=function(){var t=new e;return t.closed=!0,t}(),e}();var Or=Fe.EMPTY;function It(e){return e instanceof Fe||e&&"closed"in e&&A(e.remove)&&A(e.add)&&A(e.unsubscribe)}function dn(e){A(e)?e():e.unsubscribe()}var Ae={onUnhandledError:null,onStoppedNotification:null,Promise:void 0,useDeprecatedSynchronousErrorHandling:!1,useDeprecatedNextContext:!1};var st={setTimeout:function(e,t){for(var r=[],n=2;n0},enumerable:!1,configurable:!0}),t.prototype._trySubscribe=function(r){return this._throwIfClosed(),e.prototype._trySubscribe.call(this,r)},t.prototype._subscribe=function(r){return this._throwIfClosed(),this._checkFinalizedStatuses(r),this._innerSubscribe(r)},t.prototype._innerSubscribe=function(r){var n=this,o=this,i=o.hasError,a=o.isStopped,s=o.observers;return i||a?Or:(this.currentObservers=null,s.push(r),new Fe(function(){n.currentObservers=null,De(s,r)}))},t.prototype._checkFinalizedStatuses=function(r){var n=this,o=n.hasError,i=n.thrownError,a=n.isStopped;o?r.error(i):a&&r.complete()},t.prototype.asObservable=function(){var r=new U;return r.source=this,r},t.create=function(r,n){return new wn(r,n)},t}(U);var wn=function(e){ne(t,e);function t(r,n){var o=e.call(this)||this;return o.destination=r,o.source=n,o}return t.prototype.next=function(r){var n,o;(o=(n=this.destination)===null||n===void 0?void 0:n.next)===null||o===void 0||o.call(n,r)},t.prototype.error=function(r){var n,o;(o=(n=this.destination)===null||n===void 0?void 0:n.error)===null||o===void 0||o.call(n,r)},t.prototype.complete=function(){var r,n;(n=(r=this.destination)===null||r===void 0?void 0:r.complete)===null||n===void 0||n.call(r)},t.prototype._subscribe=function(r){var n,o;return(o=(n=this.source)===null||n===void 0?void 0:n.subscribe(r))!==null&&o!==void 0?o:Or},t}(E);var Et={now:function(){return(Et.delegate||Date).now()},delegate:void 0};var wt=function(e){ne(t,e);function t(r,n,o){r===void 0&&(r=1/0),n===void 0&&(n=1/0),o===void 0&&(o=Et);var i=e.call(this)||this;return i._bufferSize=r,i._windowTime=n,i._timestampProvider=o,i._buffer=[],i._infiniteTimeWindow=!0,i._infiniteTimeWindow=n===1/0,i._bufferSize=Math.max(1,r),i._windowTime=Math.max(1,n),i}return t.prototype.next=function(r){var n=this,o=n.isStopped,i=n._buffer,a=n._infiniteTimeWindow,s=n._timestampProvider,f=n._windowTime;o||(i.push(r),!a&&i.push(s.now()+f)),this._trimBuffer(),e.prototype.next.call(this,r)},t.prototype._subscribe=function(r){this._throwIfClosed(),this._trimBuffer();for(var n=this._innerSubscribe(r),o=this,i=o._infiniteTimeWindow,a=o._buffer,s=a.slice(),f=0;f0?e.prototype.requestAsyncId.call(this,r,n,o):(r.actions.push(this),r._scheduled||(r._scheduled=ut.requestAnimationFrame(function(){return r.flush(void 0)})))},t.prototype.recycleAsyncId=function(r,n,o){var i;if(o===void 0&&(o=0),o!=null?o>0:this.delay>0)return e.prototype.recycleAsyncId.call(this,r,n,o);var a=r.actions;n!=null&&((i=a[a.length-1])===null||i===void 0?void 0:i.id)!==n&&(ut.cancelAnimationFrame(n),r._scheduled=void 0)},t}(Ut);var On=function(e){ne(t,e);function t(){return e!==null&&e.apply(this,arguments)||this}return t.prototype.flush=function(r){this._active=!0;var n=this._scheduled;this._scheduled=void 0;var o=this.actions,i;r=r||o.shift();do if(i=r.execute(r.state,r.delay))break;while((r=o[0])&&r.id===n&&o.shift());if(this._active=!1,i){for(;(r=o[0])&&r.id===n&&o.shift();)r.unsubscribe();throw i}},t}(Wt);var we=new On(Tn);var R=new U(function(e){return e.complete()});function Dt(e){return e&&A(e.schedule)}function kr(e){return e[e.length-1]}function Qe(e){return A(kr(e))?e.pop():void 0}function Se(e){return Dt(kr(e))?e.pop():void 0}function Vt(e,t){return typeof kr(e)=="number"?e.pop():t}var pt=function(e){return e&&typeof e.length=="number"&&typeof e!="function"};function zt(e){return A(e==null?void 0:e.then)}function Nt(e){return A(e[ft])}function qt(e){return Symbol.asyncIterator&&A(e==null?void 0:e[Symbol.asyncIterator])}function Kt(e){return new TypeError("You provided "+(e!==null&&typeof e=="object"?"an invalid object":"'"+e+"'")+" where a stream was expected. You can provide an Observable, Promise, ReadableStream, Array, AsyncIterable, or Iterable.")}function Ki(){return typeof Symbol!="function"||!Symbol.iterator?"@@iterator":Symbol.iterator}var Qt=Ki();function Yt(e){return A(e==null?void 0:e[Qt])}function Gt(e){return ln(this,arguments,function(){var r,n,o,i;return Pt(this,function(a){switch(a.label){case 0:r=e.getReader(),a.label=1;case 1:a.trys.push([1,,9,10]),a.label=2;case 2:return[4,Xe(r.read())];case 3:return n=a.sent(),o=n.value,i=n.done,i?[4,Xe(void 0)]:[3,5];case 4:return[2,a.sent()];case 5:return[4,Xe(o)];case 6:return[4,a.sent()];case 7:return a.sent(),[3,2];case 8:return[3,10];case 9:return r.releaseLock(),[7];case 10:return[2]}})})}function Bt(e){return A(e==null?void 0:e.getReader)}function $(e){if(e instanceof U)return e;if(e!=null){if(Nt(e))return Qi(e);if(pt(e))return Yi(e);if(zt(e))return Gi(e);if(qt(e))return _n(e);if(Yt(e))return Bi(e);if(Bt(e))return Ji(e)}throw Kt(e)}function Qi(e){return new U(function(t){var r=e[ft]();if(A(r.subscribe))return r.subscribe(t);throw new TypeError("Provided object does not correctly implement Symbol.observable")})}function Yi(e){return new U(function(t){for(var r=0;r=2;return function(n){return n.pipe(e?_(function(o,i){return e(o,i,n)}):me,Oe(1),r?He(t):zn(function(){return new Xt}))}}function Nn(){for(var e=[],t=0;t=2,!0))}function fe(e){e===void 0&&(e={});var t=e.connector,r=t===void 0?function(){return new E}:t,n=e.resetOnError,o=n===void 0?!0:n,i=e.resetOnComplete,a=i===void 0?!0:i,s=e.resetOnRefCountZero,f=s===void 0?!0:s;return function(c){var u,p,m,d=0,h=!1,v=!1,B=function(){p==null||p.unsubscribe(),p=void 0},re=function(){B(),u=m=void 0,h=v=!1},z=function(){var T=u;re(),T==null||T.unsubscribe()};return g(function(T,Ke){d++,!v&&!h&&B();var We=m=m!=null?m:r();Ke.add(function(){d--,d===0&&!v&&!h&&(p=jr(z,f))}),We.subscribe(Ke),!u&&d>0&&(u=new et({next:function(Ie){return We.next(Ie)},error:function(Ie){v=!0,B(),p=jr(re,o,Ie),We.error(Ie)},complete:function(){h=!0,B(),p=jr(re,a),We.complete()}}),$(T).subscribe(u))})(c)}}function jr(e,t){for(var r=[],n=2;ne.next(document)),e}function K(e,t=document){return Array.from(t.querySelectorAll(e))}function V(e,t=document){let r=se(e,t);if(typeof r=="undefined")throw new ReferenceError(`Missing element: expected "${e}" to be present`);return r}function se(e,t=document){return t.querySelector(e)||void 0}function _e(){return document.activeElement instanceof HTMLElement&&document.activeElement||void 0}function tr(e){return L(b(document.body,"focusin"),b(document.body,"focusout")).pipe(ke(1),l(()=>{let t=_e();return typeof t!="undefined"?e.contains(t):!1}),N(e===_e()),Y())}function Be(e){return{x:e.offsetLeft,y:e.offsetTop}}function Yn(e){return L(b(window,"load"),b(window,"resize")).pipe(Ce(0,we),l(()=>Be(e)),N(Be(e)))}function rr(e){return{x:e.scrollLeft,y:e.scrollTop}}function dt(e){return L(b(e,"scroll"),b(window,"resize")).pipe(Ce(0,we),l(()=>rr(e)),N(rr(e)))}var Bn=function(){if(typeof Map!="undefined")return Map;function e(t,r){var n=-1;return t.some(function(o,i){return o[0]===r?(n=i,!0):!1}),n}return function(){function t(){this.__entries__=[]}return Object.defineProperty(t.prototype,"size",{get:function(){return this.__entries__.length},enumerable:!0,configurable:!0}),t.prototype.get=function(r){var n=e(this.__entries__,r),o=this.__entries__[n];return o&&o[1]},t.prototype.set=function(r,n){var o=e(this.__entries__,r);~o?this.__entries__[o][1]=n:this.__entries__.push([r,n])},t.prototype.delete=function(r){var n=this.__entries__,o=e(n,r);~o&&n.splice(o,1)},t.prototype.has=function(r){return!!~e(this.__entries__,r)},t.prototype.clear=function(){this.__entries__.splice(0)},t.prototype.forEach=function(r,n){n===void 0&&(n=null);for(var o=0,i=this.__entries__;o0},e.prototype.connect_=function(){!zr||this.connected_||(document.addEventListener("transitionend",this.onTransitionEnd_),window.addEventListener("resize",this.refresh),xa?(this.mutationsObserver_=new MutationObserver(this.refresh),this.mutationsObserver_.observe(document,{attributes:!0,childList:!0,characterData:!0,subtree:!0})):(document.addEventListener("DOMSubtreeModified",this.refresh),this.mutationEventsAdded_=!0),this.connected_=!0)},e.prototype.disconnect_=function(){!zr||!this.connected_||(document.removeEventListener("transitionend",this.onTransitionEnd_),window.removeEventListener("resize",this.refresh),this.mutationsObserver_&&this.mutationsObserver_.disconnect(),this.mutationEventsAdded_&&document.removeEventListener("DOMSubtreeModified",this.refresh),this.mutationsObserver_=null,this.mutationEventsAdded_=!1,this.connected_=!1)},e.prototype.onTransitionEnd_=function(t){var r=t.propertyName,n=r===void 0?"":r,o=ya.some(function(i){return!!~n.indexOf(i)});o&&this.refresh()},e.getInstance=function(){return this.instance_||(this.instance_=new e),this.instance_},e.instance_=null,e}(),Jn=function(e,t){for(var r=0,n=Object.keys(t);r0},e}(),Zn=typeof WeakMap!="undefined"?new WeakMap:new Bn,eo=function(){function e(t){if(!(this instanceof e))throw new TypeError("Cannot call a class as a function.");if(!arguments.length)throw new TypeError("1 argument required, but only 0 present.");var r=Ea.getInstance(),n=new Ra(t,r,this);Zn.set(this,n)}return e}();["observe","unobserve","disconnect"].forEach(function(e){eo.prototype[e]=function(){var t;return(t=Zn.get(this))[e].apply(t,arguments)}});var ka=function(){return typeof nr.ResizeObserver!="undefined"?nr.ResizeObserver:eo}(),to=ka;var ro=new E,Ha=I(()=>H(new to(e=>{for(let t of e)ro.next(t)}))).pipe(x(e=>L(Te,H(e)).pipe(C(()=>e.disconnect()))),J(1));function de(e){return{width:e.offsetWidth,height:e.offsetHeight}}function ge(e){return Ha.pipe(S(t=>t.observe(e)),x(t=>ro.pipe(_(({target:r})=>r===e),C(()=>t.unobserve(e)),l(()=>de(e)))),N(de(e)))}function bt(e){return{width:e.scrollWidth,height:e.scrollHeight}}function ar(e){let t=e.parentElement;for(;t&&(e.scrollWidth<=t.scrollWidth&&e.scrollHeight<=t.scrollHeight);)t=(e=t).parentElement;return t?e:void 0}var no=new E,Pa=I(()=>H(new IntersectionObserver(e=>{for(let t of e)no.next(t)},{threshold:0}))).pipe(x(e=>L(Te,H(e)).pipe(C(()=>e.disconnect()))),J(1));function sr(e){return Pa.pipe(S(t=>t.observe(e)),x(t=>no.pipe(_(({target:r})=>r===e),C(()=>t.unobserve(e)),l(({isIntersecting:r})=>r))))}function oo(e,t=16){return dt(e).pipe(l(({y:r})=>{let n=de(e),o=bt(e);return r>=o.height-n.height-t}),Y())}var cr={drawer:V("[data-md-toggle=drawer]"),search:V("[data-md-toggle=search]")};function io(e){return cr[e].checked}function qe(e,t){cr[e].checked!==t&&cr[e].click()}function je(e){let t=cr[e];return b(t,"change").pipe(l(()=>t.checked),N(t.checked))}function $a(e,t){switch(e.constructor){case HTMLInputElement:return e.type==="radio"?/^Arrow/.test(t):!0;case HTMLSelectElement:case HTMLTextAreaElement:return!0;default:return e.isContentEditable}}function Ia(){return L(b(window,"compositionstart").pipe(l(()=>!0)),b(window,"compositionend").pipe(l(()=>!1))).pipe(N(!1))}function ao(){let e=b(window,"keydown").pipe(_(t=>!(t.metaKey||t.ctrlKey)),l(t=>({mode:io("search")?"search":"global",type:t.key,claim(){t.preventDefault(),t.stopPropagation()}})),_(({mode:t,type:r})=>{if(t==="global"){let n=_e();if(typeof n!="undefined")return!$a(n,r)}return!0}),fe());return Ia().pipe(x(t=>t?R:e))}function Me(){return new URL(location.href)}function ot(e){location.href=e.href}function so(){return new E}function co(e,t){if(typeof t=="string"||typeof t=="number")e.innerHTML+=t.toString();else if(t instanceof Node)e.appendChild(t);else if(Array.isArray(t))for(let r of t)co(e,r)}function M(e,t,...r){let n=document.createElement(e);if(t)for(let o of Object.keys(t))typeof t[o]!="undefined"&&(typeof t[o]!="boolean"?n.setAttribute(o,t[o]):n.setAttribute(o,""));for(let o of r)co(n,o);return n}function fr(e){if(e>999){let t=+((e-950)%1e3>99);return`${((e+1e-6)/1e3).toFixed(t)}k`}else return e.toString()}function fo(){return location.hash.substring(1)}function uo(e){let t=M("a",{href:e});t.addEventListener("click",r=>r.stopPropagation()),t.click()}function Fa(){return b(window,"hashchange").pipe(l(fo),N(fo()),_(e=>e.length>0),J(1))}function po(){return Fa().pipe(l(e=>se(`[id="${e}"]`)),_(e=>typeof e!="undefined"))}function Nr(e){let t=matchMedia(e);return Zt(r=>t.addListener(()=>r(t.matches))).pipe(N(t.matches))}function lo(){let e=matchMedia("print");return L(b(window,"beforeprint").pipe(l(()=>!0)),b(window,"afterprint").pipe(l(()=>!1))).pipe(N(e.matches))}function qr(e,t){return e.pipe(x(r=>r?t():R))}function ur(e,t={credentials:"same-origin"}){return ve(fetch(`${e}`,t)).pipe(ce(()=>R),x(r=>r.status!==200?Tt(()=>new Error(r.statusText)):H(r)))}function Ue(e,t){return ur(e,t).pipe(x(r=>r.json()),J(1))}function mo(e,t){let r=new DOMParser;return ur(e,t).pipe(x(n=>n.text()),l(n=>r.parseFromString(n,"text/xml")),J(1))}function pr(e){let t=M("script",{src:e});return I(()=>(document.head.appendChild(t),L(b(t,"load"),b(t,"error").pipe(x(()=>Tt(()=>new ReferenceError(`Invalid script: ${e}`))))).pipe(l(()=>{}),C(()=>document.head.removeChild(t)),Oe(1))))}function ho(){return{x:Math.max(0,scrollX),y:Math.max(0,scrollY)}}function bo(){return L(b(window,"scroll",{passive:!0}),b(window,"resize",{passive:!0})).pipe(l(ho),N(ho()))}function vo(){return{width:innerWidth,height:innerHeight}}function go(){return b(window,"resize",{passive:!0}).pipe(l(vo),N(vo()))}function yo(){return Q([bo(),go()]).pipe(l(([e,t])=>({offset:e,size:t})),J(1))}function lr(e,{viewport$:t,header$:r}){let n=t.pipe(X("size")),o=Q([n,r]).pipe(l(()=>Be(e)));return Q([r,t,o]).pipe(l(([{height:i},{offset:a,size:s},{x:f,y:c}])=>({offset:{x:a.x-f,y:a.y-c+i},size:s})))}(()=>{function e(n,o){parent.postMessage(n,o||"*")}function t(...n){return n.reduce((o,i)=>o.then(()=>new Promise(a=>{let s=document.createElement("script");s.src=i,s.onload=a,document.body.appendChild(s)})),Promise.resolve())}var r=class{constructor(n){this.url=n,this.onerror=null,this.onmessage=null,this.onmessageerror=null,this.m=a=>{a.source===this.w&&(a.stopImmediatePropagation(),this.dispatchEvent(new MessageEvent("message",{data:a.data})),this.onmessage&&this.onmessage(a))},this.e=(a,s,f,c,u)=>{if(s===this.url.toString()){let p=new ErrorEvent("error",{message:a,filename:s,lineno:f,colno:c,error:u});this.dispatchEvent(p),this.onerror&&this.onerror(p)}};let o=new EventTarget;this.addEventListener=o.addEventListener.bind(o),this.removeEventListener=o.removeEventListener.bind(o),this.dispatchEvent=o.dispatchEvent.bind(o);let i=document.createElement("iframe");i.width=i.height=i.frameBorder="0",document.body.appendChild(this.iframe=i),this.w.document.open(),this.w.document.write(` + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + +

How to cite OBA

+

Please cite

+
Stefancsik R, Balhoff JP, Balk MA, Ball RL, Bello SM, Caron AR, Chesler EJ, de
+Souza V, Gehrke S, Haendel M, Harris LW, Harris NL, Ibrahim A, Koehler S,
+Matentzoglu N, McMurry JA, Mungall CJ, Munoz-Torres MC, Putman T, Robinson P,
+Smedley D, Sollis E, Thessen AE, Vasilevsky N, Walton DO, Osumi-Sutherland D.
+The Ontology of Biological Attributes (OBA)-computational traits for the life sciences.
+Mamm Genome. 2023 Apr.
+
+

doi:10.1007/s00335-023-09992-1 +PMID: 37076585

+

See also Zenodo.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/contributing/index.html b/contributing/index.html new file mode 100644 index 00000000..936a2cab --- /dev/null +++ b/contributing/index.html @@ -0,0 +1,759 @@ + + + + + + + + + + + + + + + + + + + + Contributing - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + +

How to contribute to OBA

+

See https://github.com/obophenotype/bio-attribute-ontology/blob/master/CONTRIBUTING.md.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/editors-guide/index.html b/editors-guide/index.html new file mode 100644 index 00000000..a2af140e --- /dev/null +++ b/editors-guide/index.html @@ -0,0 +1,1304 @@ + + + + + + + + + + + + + + + + + + + + + + Editors guide - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + +

Local development workflows with OBA

+

+

Edit files

+

There are three kinds of files to edit in OBO explained in the following:

+
    +
  • The normal OBA edit file (src/ontology/obo-edit.obo)
  • +
  • The OBA SSSOM mappings (src/mappings/*)
  • +
  • The OBA DOSDP pattern files (src/patterns/data/default/*)
  • +
+

The OBA edit file

+

As opposed to other ontologies, the OBA edit file (src/ontology/obo-edit.obo) is barely used. Power curators will use the oba-edit.obo file occasionally to edit the class hierarchy, but as per OBA principles, the class hierarchy is mostly created by reasoning. Most of OBA editing happens by editing the DOSDP templates, see below.

+

The OBA SSSOM mappings

+
    +
  • OBA-VT SSSOM Mapping: The official mappings between OBA and VT. Source of truth is on Google Sheets, not Github.
  • +
  • OBA-EFO SSSOM Mapping: The official mappings between OBA and EFO. Source of truth is on Google Sheets, not Github.
  • +
  • OBA-EFO Excluded Mapping: Terms from EFO that have been reviewed and deemed out of scope for OBA. Source of truth is on Google Sheets, not Github.
  • +
  • OBA-VT Excluded Mapping: Terms from EFO that have been reviewed and deemed out of scope for OBA. Source of truth is on Google Sheets, not Github.
  • +
+

The OBA DOSDP patterns

+

All OBA DOSDP data tables can be found here.

+

DOSDP tables are the main way to edit OBA. You can edit the DOSDP TSV files using a regular text editor or a spreadsheet editor.

+

The main rule is, make sure to review the diff before making a pull request - the diff should only show the things you have actually changed.

+

Updating SSSOM mapping files

+
cd src/ontology
+sh run.sh make sync_sssom_google_sheets
+
+

Creating/updating terms

+

+

Preparing alignment work

+
    +
  1. Update the data required for the alignment: sh run.sh make prepare_oba_alignment -B. This will take a while, as a lot of ontologies are downloaded and syncronised.
  2. +
  3. Start jupyter in your local environment
  4. +
  5. Open src/scripts/oba_alignment.ipynb in your Jupyter environment and run all over night.
  6. +
  7. While the above is running, read everything in the notebook carefully to get a sense what the notebook is doing. The methods section can be skipped during the first read through, but it will likely be necessary to review these in later stages of the alignment process.
  8. +
  9. The notebook run will produce the following files:
      +
    • src/mappings/oba-vt-unreviewed.sssom.tsv: VT mappings identified by pipeline but not reviewed
    • +
    • src/mappings/oba-vt-missed.sssom.tsv: VT mappings identified by looking at OBA IRIs (no need for review)
    • +
    • src/mappings/oba-vt-unmapped.sssom.tsv: VT terms that have not been mapped so far (excluding reviewed and candidate mappings)
    • +
    • src/mappings/oba-vt-unreviewed.dosdp.tsv: VT terms with candidate DOSDP pattern fillings.
    • +
    • src/mappings/oba-efo-unreviewed.sssom.tsv: see above vt analog
    • +
    • src/mappings/oba-efo-unmapped.sssom.tsv: see above vt analog
    • +
    • src/mappings/oba-efo-unreviewed.dosdp.tsv: see above vt analog
    • +
    +
  10. +
+

Curating EFO alignment

+
    +
  1. Follow the steps in the preparing alignment workflow
  2. +
  3. The central pieces for the EFO alignment, if of interest, can be found in the section starting with OBA-EFO Alignment in src/scripts/oba_alignment.ipynb.
  4. +
  5. Review src/mappings/oba-efo-unreviewed.sssom.tsv. These are the new mapping suggestions as determined by the mapping pipeline. Review mappings 1 x 1 and copy them into the official EFO-OBA SSSOM mapping curated on Google Sheets.
  6. +
  7. Review src/mappings/oba-efo-unreviewed.dosdp.tsv. This is the hardest part. The table only provides a handful of suggests on how to map the label using DOSDP. You will have to go through the table subject_id by subject_id and identify the correct corresponding DOSDP pattern tables. Important: when you create an ID (defined_class column DOSDP table) for an EFO-sourced class, you have to add a respective mapping to the official EFO-OBA SSSOM mapping curated on Google Sheets.
  8. +
  9. Optional: Review src/mappings/oba-efo-unmapped.sssom.tsv to figure out what to do about entirely unmapped EFO terms. These may need some careful planning and adjustments of the alignment code.
  10. +
+

Curating VT alignment

+
    +
  1. Follow the steps in the preparing alignment workflow
  2. +
  3. The central pieces for the EFO alignment, if of interest, can be found in the section starting with OBA-VT Alignment in src/scripts/oba_alignment.ipynb.
  4. +
  5. Review src/mappings/oba-vt-missed.sssom.tsv. This should ideally be empty - these are mappings that have not been factored into the official oba-vt mappings yet, but have the VT-style IRI (OBA:VT0010108) which suggests that the class was derived from the respective VT id. Add all mappings in oba-vt-missed.sssom.tsv to the official VT-OBA SSSOM mapping curated on Google Sheets.
  6. +
  7. Review src/mappings/oba-vt-unreviewed.sssom.tsv. These are the new mapping suggestions as determined by the mapping pipeline. Review mappings 1 x 1 and copy them into the official VT-OBA SSSOM mapping curated on Google Sheets.
  8. +
  9. Review src/mappings/oba-vt-unreviewed.dosdp.tsv. This is the hardest part. The table only provides a handful of suggests on how to map the label using DOSDP. You will have to go through the table subject_id by subject_id and identify the correct corresponding DOSDP pattern tables. Important: when you create an ID (defined_class column DOSDP table) for a VT-sourced class, you add a special IRI that looks like OBA:VT123. This way, mappings will be curated automatically by the framework and you dont have to add them manually.
  10. +
  11. Optional: Review src/mappings/oba-vt-unmapped.sssom.tsv to figure out what to do about entirely unmapped VT terms. These may need some careful planning and adjustments of the alignment code.
  12. +
+

Adding "measured in" annotations

+
    +
  1. Go to Google sheet for "measured in" annotations and add annotations
  2. +
  3. Go to cd src/ontology in your terminal
  4. +
  5. Create a new branch with your favourite tool
  6. +
  7. Run sh run.sh make sync_templates_google_sheets to sync templates from Google sheets
  8. +
  9. Convince yourself in your favourite git diff tool (GitHub Desktop!) that the changed tables look as intended!
  10. +
  11. In your terminal, run sh run.sh make recreate-measured_in
  12. +
  13. When completed, the file src/ontology/components/measured_in.owl should have been updated. Look at the diff again to convince yourself that the changes look as intended. You may want to open oba-edit.obo in Protege to look at one or two changes!
  14. +
  15. Make sure you are on your new branch created above and commit changes to branch.
  16. +
  17. Publish branch (push to GitHub), make pull request, assign reviewer.
  18. +
+

Adding synonym

+
    +
  1. Follow the instructions for adding "measured in" annotations above, except:
  2. +
  3. Add the synonyms in this sheet here
  4. +
  5. Instead of sh run.sh make recreate-measured_in, sh run.sh make recreate-synonyms
  6. +
+

Importing terms and updating DOSDP patterns

+

When creating new OBA terms using DOSDP patterns for example the entity-attribute pattern, it may be necessary to import terms from other ontologies like CHEBI or PRO, the PRotein Ontology. However, CHEBI, NCBITAXON and PRO are too large to be managed easily as standard imports. To mitigate this situation, they can be managed as slims which are located here: +* NCBITAXON: https://github.com/obophenotype/ncbitaxon/tree/master/subsets +* PRO: https://github.com/obophenotype/pro_obo_slim +* CHEBI: https://github.com/obophenotype/chebi_obo_slim

+

Sometimes, a new term you are using in a DOSDP pattern is not yet in a slim. So you will have to refresh the slim first.

+

Refresh LIPID Maps

+

LIPID map is currently (03.06.2023) not imported, but curated manually, because https://www.lipidmaps.org/resources/sparql does not work. To update the LIPID maps imports, you have to

+
    +
  • Add a LIPID term to https://github.com/obophenotype/bio-attribute-ontology/blob/master/src/templates/external.tsv
  • +
  • When refreshing the imports in the usual way, this TSV file (a ROBOT template) is built in place of a proper LIPID MAPS mirror.
  • +
+

Refresh PRO Slim:

+

Note: you will need at least 32 GB RAM for this

+
git clone https://github.com/obophenotype/pro_obo_slim
+cd pro_obo_slim
+git checkout -b refresh20230312
+# Add your terms to seed.txt, and then SORT THE FILE and check that there are no duplicated terms.
+# Make sure that DOCKER is running. To set up DOCKER refer to https://oboacademy.github.io/obook/howto/odk-setup/
+sh odk.sh make all
+git commit -a -m "refresh slim after adding terms for OBA"
+git push --set-upstream origin refresh20230312
+
+

When this is done, make a pull request.

+

Refresh CHEBI Slim

+
git clone https://github.com/obophenotype/chebi_obo_slim
+cd chebi_obo_slim
+# Follow the instructions for the PRO slim from here.
+
+

The full process of refreshing the DOSDP patterns: +1. Check if new PRO / Chebi terms are not in slim, if they are not, add them as described above. +2. Run sh run.sh make IMP=false MIR=false ../patterns/definitions.owl to generate a new pattern ontology component. +3. Run sh run.sh make refresh-merged to import the new terms. +4. Run sh run.sh make IMP=false MIR=false ../patterns/definitions.owl again to generate the labels correctly where new terms are concerned.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/history/index.html b/history/index.html new file mode 100644 index 00000000..5283e189 --- /dev/null +++ b/history/index.html @@ -0,0 +1,746 @@ + + + + + + + + + + + + + + + + + + A brief history of OBA - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + +

A brief history of OBA

+

The following page gives an overview of the history of OBA.

+

TBD.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/index.html b/index.html new file mode 100644 index 00000000..a701be92 --- /dev/null +++ b/index.html @@ -0,0 +1,767 @@ + + + + + + + + + + + + + + + + + + + + Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + +

OBA - The Ontology of Biological Attributes

+

The Ontology of Biological Attributes is a tool for data integration. It aims to help researchers find new insights.

+

Welcome to the OBA online documentation!

+

Useful links

+ +

You can find descriptions of the standard ontology engineering workflows (ODK) here.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/metrics/index.html b/metrics/index.html new file mode 100644 index 00000000..7cc6d62a --- /dev/null +++ b/metrics/index.html @@ -0,0 +1,765 @@ + + + + + + + + + + + + + + + + + + + + + + Overview - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+ +
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/oak-metrics/index.html b/oak-metrics/index.html new file mode 100644 index 00000000..86f40556 --- /dev/null +++ b/oak-metrics/index.html @@ -0,0 +1,765 @@ + + + + + + + + + + + + + + + + + + + + + + OAK Metrics - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + +

Metrics for OBA (based on OAK)

+

Ontologies:

+
    +
  • obo:oba/oba-baseplus.owl (obo:oba/releases/2023-01-23/oba-baseplus.owl)
  • +
+

The raw data (ontology metrics) can be found on GitHub.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/oba-gwas-sop/index.html b/oba-gwas-sop/index.html new file mode 100644 index 00000000..0c322dca --- /dev/null +++ b/oba-gwas-sop/index.html @@ -0,0 +1,888 @@ + + + + + + + + + + + + + + + + + + + + + + OBA - GWAS curation SOP - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + +

How to add new GWAS requested trait terms to OBA and EFO?

+

1. Check if an appropriate OBA trait pattern already exists

+

Look into bio-attribute-ontology/src/patterns/dosdp-patterns/ and check if the GWAS trait term would fit into any of the existing patterns. +- [ ] If yes, skip to the next step. +- [ ] If none of the existing OBA trait patterns look appropriate, then create a new pattern. +In some cases, the requested GWAS term may not fit the scope of OBA. In that case, an new EFO term can be created without an equivalent OBA trait term.

+

2. Create new OBA term(s)

+ +

For example, for a trait involving the 'age at which disease manifestations first appear', fill in the table disease_onset.tsv.. +Create a unique OBA identifier by using the next available ID from your assigned range.

+

Also fill in the appropriate columns for the variable fields as specified in the actual DOS-DP yaml template file. +For example, in the case of the disease_onset.tsv. table, you must use MONDO disease or disorder terms in the disease column.

+

NOTE: Keep track of the IDs from your range that you have already assigned.

+
    +
  • Create a pull request (PR) with the edits. Request other people to review your PR.
  • +
  • If approved, merge the PR after the review(s) into the 'master' branch.
  • +
+

3. OBA release

+

The newly created trait terms can be imported into EFO from a publicly released version of OBA.

+

To run the OBA release pipeline, follow the instructions in the document Release Workflow for the Ontology of Biological Attributes (OBA).

+

4. Provide the new OBA terms to EFO

+
    +
  • +

    [ ] Add the newly created OBA term IRI and also all its component term IRIs to oba_terms.txt so that they get included in EFO dynamic imports. By component terms I mean all those terms that are used in the DOS-DP data filler table to compose the OBA term (terms from MONDO, UBERON, PATO, etc.) as specified in the corresponding DOS-DP pattern file.

    +
      +
    • NOTE: use full IRI, i.e:
    • +
    +

    http://purl.obolibrary.org/obo/OBA_2040167 +http://purl.obolibrary.org/obo/MONDO_0000481

    +
  • +
  • +

    [ ] This step depends on a new public OBA release.

    +
  • +
  • [ ] You need to accomplish this in an EFO PR.
  • +
+
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/oba-release-sop/index.html b/oba-release-sop/index.html new file mode 100644 index 00000000..f419ef6e --- /dev/null +++ b/oba-release-sop/index.html @@ -0,0 +1,899 @@ + + + + + + + + + + + + + + + + + + + + + + OBA release procedure - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + +

Release Workflow for the Ontology of Biological Attributes (OBA)

+
    +
  • Make sure you have the latest ODK installed by running docker:
  • +
+
docker pull obolibrary/odkfull
+
+
    +
  • Merge as many open PRs as possible.
  • +
  • Start with a fresh copy of the master branch. For the next steps you can use GitHub Desktop or the command line instructions below.
  • +
+
git pull
+
+

Create a new branch:

+
git checkout -b release-202X-XX-XX
+
+

In a terminal window, start the release pipeline:

+
sh run.sh make prepare_release_fast
+
+

NOTE: It is recommended that running the release pipeline is uncoupled from refreshing imports. However, inn case you need to refresh all the imports, you can achieve that by:

+
sh run.sh make prepare_release -B
+
+
    +
  • If everything went all right, you should see message similar to the one below in your terminal window:
  • +
+
+

... +Release files are now in ../.. - now you should commit, push and make a release on your git hosting site such as GitHub or GitLab +make[1]: Leaving directory '/work/src/ontology' +Please remember to update your ODK image from time to time: https://oboacademy.github.io/obook/howto/odk-update/.

+
+

Check and package the release artefacts for OBA

+
    +
  • You should also check in Protege if the new terms you just added look fine.
  • +
  • +

    Open in Protege some of the OBA release artefacts and check for any potential errors. +For example, check if there are any unsatisfiable classes in oba.obo.

    +
  • +
  • +

    Create a pull request and get another pair of eyes to review it.

    +
  • +
  • Merge your release-202X-XX-XX branch into the master branch once approved by a reviewer and all the automatic quality control checks passed.
  • +
+

Make a release and include all the release files (including oba-base.owl and oba.obo) as binary files

+

Use the github web interface to create a new OBA release.

+
    +
  • +

    There should be 15 recently modified files in the root directory of the local copy of the repo:

    +
      +
    1. oba-base.json
    2. +
    3. oba-base.obo
    4. +
    5. oba-base.owl
    6. +
    7. oba-baseplus.json
    8. +
    9. oba-baseplus.obo
    10. +
    11. oba-baseplus.owl
    12. +
    13. oba-basic.json
    14. +
    15. oba-basic.obo
    16. +
    17. oba-basic.owl
    18. +
    19. oba-full.json
    20. +
    21. oba-full.obo
    22. +
    23. oba-full.owl
    24. +
    25. oba.json
    26. +
    27. oba.obo
    28. +
    29. oba.owl
    30. +
    +
  • +
  • +

    NOTE: GitHub imposes size constraints on repositories. The combined size of the OBA artefacts exceeds the GitHub imposed size limit. For this reason, some of the large release artefact files are not under GitHub version control. However, all the 15 files need to be included in the public release as binary files. For background information on release artefacts, see

    + +
  • +
  • +

    Navigate to the 'Releases' page of OBA

    +
  • +
  • Click Draft a new release. +Click Chose a tag, and create a new tag based on the date on which your ontologies were build. You can find this, for example, by looking into the oba.obo file and checking the data-version: property. The date needs to be prefixed with a v, so, for example v2022-10-17.
  • +
  • For the title, you can use the date of the ontology build again, for example 2022-10-17 release
  • +
  • Drag and drop the files listed above or manually select them in the binaries box. using the github web user-interface.
  • +
  • You can automatically generate release notes.
  • +
  • Click Publish release. Done.
  • +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/odk-workflows/ContinuousIntegration/index.html b/odk-workflows/ContinuousIntegration/index.html new file mode 100644 index 00000000..1e939fe8 --- /dev/null +++ b/odk-workflows/ContinuousIntegration/index.html @@ -0,0 +1,776 @@ + + + + + + + + + + + + + + + + + + + + + + Continuous Integration - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + +

Introduction to Continuous Integration Workflows with ODK

+

Historically, most repos have been using Travis CI for continuous integration testing and building, but due to +runtime restrictions, we recently switched a lot of our repos to GitHub actions. You can set up your repo with CI by adding +this to your configuration file (src/ontology/oba-odk.yaml):

+
ci:
+  - github_actions
+
+

When updateing your repo, you will notice a new file being added: .github/workflows/qc.yml.

+

This file contains your CI logic, so if you need to change, or add anything, this is the place!

+

Alternatively, if your repo is in GitLab instead of GitHub, you can set up your repo with GitLab CI by adding +this to your configuration file (src/ontology/oba-odk.yaml):

+
ci:
+  - gitlab-ci
+
+

This will add a file called .gitlab-ci.yml in the root of your repo.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/odk-workflows/EditorsWorkflow/index.html b/odk-workflows/EditorsWorkflow/index.html new file mode 100644 index 00000000..36375a5c --- /dev/null +++ b/odk-workflows/EditorsWorkflow/index.html @@ -0,0 +1,1082 @@ + + + + + + + + + + + + + + + + + + + + + + Editors Workflow - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + +

Editors Workflow

+

The editors workflow is one of the formal workflows to ensure that the ontology is developed correctly according to ontology engineering principles. There are a few different editors workflows:

+
    +
  1. Local editing workflow: Editing the ontology in your local environment by hand, using tools such as Protégé, ROBOT templates or DOSDP patterns.
  2. +
  3. Completely automated data pipeline (GitHub Actions)
  4. +
  5. DROID workflow
  6. +
+

This document only covers the first editing workflow, but more will be added in the future

+

Local editing workflow

+

Workflow requirements:

+
    +
  • git
  • +
  • github
  • +
  • docker
  • +
  • editing tool of choice, e.g. Protégé, your favourite text editor, etc
  • +
+

1. Create issue

+

Ensure that there is a ticket on your issue tracker that describes the change you are about to make. While this seems optional, this is a very important part of the social contract of building an ontology - no change to the ontology should be performed without a good ticket, describing the motivation and nature of the intended change.

+

2. Update main branch

+

In your local environment (e.g. your laptop), make sure you are on the main (prev. master) branch and ensure that you have all the upstream changes, for example:

+
git checkout master
+git pull
+
+

3. Create feature branch

+

Create a new branch. Per convention, we try to use meaningful branch names such as: +- issue23removeprocess (where issue 23 is the related issue on GitHub) +- issue26addcontributor +- release20210101 (for releases)

+

On your command line, this looks like this:

+
git checkout -b issue23removeprocess
+
+

4. Perform edit

+

Using your editor of choice, perform the intended edit. For example:

+

Protégé

+
    +
  1. Open src/ontology/oba-edit.owl in Protégé
  2. +
  3. Make the change
  4. +
  5. Save the file
  6. +
+

TextEdit

+
    +
  1. Open src/ontology/oba-edit.owl in TextEdit (or Sublime, Atom, Vim, Nano)
  2. +
  3. Make the change
  4. +
  5. Save the file
  6. +
+

Consider the following when making the edit.

+
    +
  1. According to our development philosophy, the only places that should be manually edited are:
      +
    • src/ontology/oba-edit.owl
    • +
    • Any ROBOT templates you chose to use (the TSV files only)
    • +
    • Any DOSDP data tables you chose to use (the TSV files, and potentially the associated patterns)
    • +
    • components (anything in src/ontology/components), see here.
    • +
    +
  2. +
  3. Imports should not be edited (any edits will be flushed out with the next update). However, refreshing imports is a potentially breaking change - and is discussed elsewhere.
  4. +
  5. Changes should usually be small. Adding or changing 1 term is great. Adding or changing 10 related terms is ok. Adding or changing 100 or more terms at once should be considered very carefully.
  6. +
+

4. Check the Git diff

+

This step is very important. Rather than simply trusting your change had the intended effect, we should always use a git diff as a first pass for sanity checking.

+

In our experience, having a visual git client like GitHub Desktop or sourcetree is really helpful for this part. In case you prefer the command line:

+
git status
+git diff
+
+

5. Quality control

+

Now it's time to run your quality control checks. This can either happen locally (5a) or through your continuous integration system (7/5b).

+

5a. Local testing

+

If you chose to run your test locally:

+
sh run.sh make IMP=false test
+
+

This will run the whole set of configured ODK tests on including your change. If you have a complex DOSDP pattern pipeline you may want to add PAT=false to skip the potentially lengthy process of rebuilding the patterns.

+
sh run.sh make IMP=false PAT=false test
+
+

6. Pull request

+

When you are happy with the changes, you commit your changes to your feature branch, push them upstream (to GitHub) and create a pull request. For example:

+
git add NAMEOFCHANGEDFILES
+git commit -m "Added biological process term #12"
+git push -u origin issue23removeprocess
+
+

Then you go to your project on GitHub, and create a new pull request from the branch, for example: https://github.com/INCATools/ontology-development-kit/pulls

+

There is a lot of great advise on how to write pull requests, but at the very least you should: +- mention the tickets affected: see #23 to link to a related ticket, or fixes #23 if, by merging this pull request, the ticket is fixed. Tickets in the latter case will be closed automatically by GitHub when the pull request is merged. +- summarise the changes in a few sentences. Consider the reviewer: what would they want to know right away. +- If the diff is large, provide instructions on how to review the pull request best (sometimes, there are many changed files, but only one important change).

+

7/5b. Continuous Integration Testing

+

If you didn't run and local quality control checks (see 5a), you should have Continuous Integration (CI) set up, for example: +- Travis +- GitHub Actions

+

More on how to set this up here. Once the pull request is created, the CI will automatically trigger. If all is fine, it will show up green, otherwise red.

+

8. Community review

+

Once all the automatic tests have passed, it is important to put a second set of eyes on the pull request. Ontologies are inherently social - as in that they represent some kind of community consensus on how a domain is organised conceptually. This seems high brow talk, but it is very important that as an ontology editor, you have your work validated by the community you are trying to serve (e.g. your colleagues, other contributors etc.). In our experience, it is hard to get more than one review on a pull request - two is great. You can set up GitHub branch protection to actually require a review before a pull request can be merged! We recommend this.

+

This step seems daunting to some hopefully under-resourced ontologies, but we recommend to put this high up on your list of priorities - train a colleague, reach out!

+

9. Merge and cleanup

+

When the QC is green and the reviews are in (approvals), it is time to merge the pull request. After the pull request is merged, remember to delete the branch as well (this option will show up as a big button right after you have merged the pull request). If you have not done so, close all the associated tickets fixed by the pull request.

+

10. Changelog (Optional)

+

It is sometimes difficult to keep track of changes made to an ontology. Some ontology teams opt to document changes in a changelog (simply a text file in your repository) so that when release day comes, you know everything you have changed. This is advisable at least for major changes (such as a new release system, a new pattern or template etc.).

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/odk-workflows/ManageAutomatedTest/index.html b/odk-workflows/ManageAutomatedTest/index.html new file mode 100644 index 00000000..34c4406d --- /dev/null +++ b/odk-workflows/ManageAutomatedTest/index.html @@ -0,0 +1,797 @@ + + + + + + + + + + + + + + + + + + ManageAutomatedTest - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + +

ManageAutomatedTest

+ +

Constraint violation checks

+

We can define custom checks using SPARQL. SPARQL queries define bad modelling patterns (missing labels, misspelt URIs, and many more) in the ontology. If these queries return any results, then the build will fail. Custom checks are designed to be run as part of GitHub Actions Continuous Integration testing, but they can also run locally.

+

Steps to add a constraint violation check:

+
    +
  1. Add the SPARQL query in src/sparql. The name of the file should end with -violation.sparql. Please give a name that helps to understand which violation the query wants to check.
  2. +
  3. Add the name of the new file to odk configuration file src/ontology/uberon-odk.yaml:
      +
    1. Include the name of the file (without the -violation.sparql part) to the list inside the key custom_sparql_checks that is inside robot_report key.
    2. +
    3. +

      If the robot_report or custom_sparql_checks keys are not available, please add this code block to the end of the file.

      +

      yaml + robot_report: + release_reports: False + fail_on: ERROR + use_labels: False + custom_profile: True + report_on: + - edit + custom_sparql_checks: + - name-of-the-file-check +3. Update the repository so your new SPARQL check will be included in the QC.

      +
    4. +
    +
  4. +
+
sh run.sh make update_repo
+
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/odk-workflows/ManageDocumentation/index.html b/odk-workflows/ManageDocumentation/index.html new file mode 100644 index 00000000..b3840950 --- /dev/null +++ b/odk-workflows/ManageDocumentation/index.html @@ -0,0 +1,882 @@ + + + + + + + + + + + + + + + + + + + + + + Managing the documentation - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + +

Updating the Documentation

+

The documentation for OBA is managed in two places (relative to the repository root):

+
    +
  1. The docs directory contains all the files that pertain to the content of the documentation (more below)
  2. +
  3. the mkdocs.yaml file contains the documentation config, in particular its navigation bar and theme.
  4. +
+

The documentation is hosted using GitHub pages, on a special branch of the repository (called gh-pages). It is important that this branch is never deleted - it contains all the files GitHub pages needs to render and deploy the site. It is also important to note that the gh-pages branch should never be edited manually. All changes to the docs happen inside the docs directory on the main branch.

+

Editing the docs

+

Changing content

+

All the documentation is contained in the docs directory, and is managed in Markdown. Markdown is a very simple and convenient way to produce text documents with formatting instructions, and is very easy to learn - it is also used, for example, in GitHub issues. This is a normal editing workflow:

+
    +
  1. Open the .md file you want to change in an editor of choice (a simple text editor is often best). IMPORTANT: Do not edit any files in the docs/odk-workflows/ directory. These files are managed by the ODK system and will be overwritten when the repository is upgraded! If you wish to change these files, make an issue on the ODK issue tracker.
  2. +
  3. Perform the edit and save the file
  4. +
  5. Commit the file to a branch, and create a pull request as usual.
  6. +
  7. If your development team likes your changes, merge the docs into master branch.
  8. +
  9. Deploy the documentation (see below)
  10. +
+

Deploy the documentation

+

The documentation is not automatically updated from the Markdown, and needs to be deployed deliberately. To do this, perform the following steps:

+
    +
  1. In your terminal, navigate to the edit directory of your ontology, e.g.: + cd oba/src/ontology
  2. +
  3. Now you are ready to build the docs as follows: + sh run.sh make update_docs + Mkdocs now sets off to build the site from the markdown pages. You will be asked to
      +
    • Enter your username
    • +
    • Enter your password (see here for using GitHub access tokens instead) + IMPORTANT: Using password based authentication will be deprecated this year (2021). Make sure you read up on personal access tokens if that happens!
    • +
    +
  4. +
+

If everything was successful, you will see a message similar to this one:

+

INFO - Your documentation should shortly be available at: https://obophenotype.github.io/bio-attribute-ontology/ +3. Just to double check, you can now navigate to your documentation pages (usually https://obophenotype.github.io/bio-attribute-ontology/). + Just make sure you give GitHub 2-5 minutes to build the pages!

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/odk-workflows/ReleaseWorkflow/index.html b/odk-workflows/ReleaseWorkflow/index.html new file mode 100644 index 00000000..1ca946a8 --- /dev/null +++ b/odk-workflows/ReleaseWorkflow/index.html @@ -0,0 +1,968 @@ + + + + + + + + + + + + + + + + + + + + + + Release Workflow - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + + + + + +
+
+ + + + + +

The release workflow

+

The release workflow recommended by the ODK is based on GitHub releases and works as follows:

+
    +
  1. Run a release with the ODK
  2. +
  3. Review the release
  4. +
  5. Merge to main branch
  6. +
  7. Create a GitHub release
  8. +
+

These steps are outlined in detail in the following.

+

Run a release with the ODK

+

Preparation:

+
    +
  1. Ensure that all your pull requests are merged into your main (master) branch
  2. +
  3. Make sure that all changes to master are committed to GitHub (git status should say that there are no modified files)
  4. +
  5. Locally make sure you have the latest changes from master (git pull)
  6. +
  7. Checkout a new branch (e.g. git checkout -b release-2021-01-01)
  8. +
  9. You may or may not want to refresh your imports as part of your release strategy (see here)
  10. +
  11. Make sure you have the latest ODK installed by running docker pull obolibrary/odkfull
  12. +
+

To actually run the release, you:

+
    +
  1. Open a command line terminal window and navigate to the src/ontology directory (cd oba/src/ontology)
  2. +
  3. Run release pipeline:sh run.sh make prepare_release -B. Note that for some ontologies, this process can take up to 90 minutes - especially if there are large ontologies you depend on, like PRO or CHEBI.
  4. +
  5. If everything went well, you should see the following output on your machine: Release files are now in ../.. - now you should commit, push and make a release on your git hosting site such as GitHub or GitLab.
  6. +
+

This will create all the specified release targets (OBO, OWL, JSON, and the variants, ont-full and ont-base) and copy them into your release directory (the top level of your repo).

+

Review the release

+
    +
  1. (Optional) Rough check. This step is frequently skipped, but for the more paranoid among us (like the author of this doc), this is a 3 minute additional effort for some peace of mind. Open the main release (oba.owl) in you favourite development environment (i.e. Protégé) and eyeball the hierarchy. We recommend two simple checks:
      +
    1. Does the very top level of the hierarchy look ok? This means that all new terms have been imported/updated correctly.
    2. +
    3. Does at least one change that you know should be in this release appear? For example, a new class. This means that the release was actually based on the recent edit file.
    4. +
    +
  2. +
  3. Commit your changes to the branch and make a pull request
  4. +
  5. In your GitHub pull request, review the following three files in detail (based on our experience):
      +
    1. oba.obo - this reflects a useful subset of the whole ontology (everything that can be covered by OBO format). OBO format has that speaking for it: it is very easy to review!
    2. +
    3. oba-base.owl - this reflects the asserted axioms in your ontology that you have actually edited.
    4. +
    5. Ideally also take a look at oba-full.owl, which may reveal interesting new inferences you did not know about. Note that the diff of this file is sometimes quite large.
    6. +
    +
  6. +
  7. Like with every pull request, we recommend to always employ a second set of eyes when reviewing a PR!
  8. +
+

Merge the main branch

+

Once your CI checks have passed, and your reviews are completed, you can now merge the branch into your main branch (don't forget to delete the branch afterwards - a big button will appear after the merge is finished).

+

Create a GitHub release

+
    +
  1. Go to your releases page on GitHub by navigating to your repository, and then clicking on releases (usually on the right, for example: https://github.com/obophenotype/bio-attribute-ontology/releases). Then click "Draft new release"
  2. +
  3. As the tag version you need to choose the date on which your ontologies were build. You can find this, for example, by looking at the oba.obo file and check the data-version: property. The date needs to be prefixed with a v, so, for example v2020-02-06.
  4. +
  5. You can write whatever you want in the release title, but we typically write the date again. The description underneath should contain a concise list of changes or term additions.
  6. +
  7. Click "Publish release". Done.
  8. +
+

Debugging typical ontology release problems

+

Problems with memory

+

When you are dealing with large ontologies, you need a lot of memory. When you see error messages relating to large ontologies such as CHEBI, PRO, NCBITAXON, or Uberon, you should think of memory first, see here.

+

Problems when using OBO format based tools

+

Sometimes you will get cryptic error messages when using legacy tools using OBO format, such as the ontology release tool (OORT), which is also available as part of the ODK docker container. In these cases, you need to track down what axiom or annotation actually caused the breakdown. In our experience (in about 60% of the cases) the problem lies with duplicate annotations (def, comment) which are illegal in OBO. Here is an example recipe of how to deal with such a problem:

+
    +
  1. If you get a message like make: *** [cl.Makefile:84: oort] Error 255 you might have a OORT error.
  2. +
  3. To debug this, in your terminal enter sh run.sh make IMP=false PAT=false oort -B (assuming you are already in the ontology folder in your directory)
  4. +
  5. This should show you where the error is in the log (eg multiple different definitions) +WARNING: THE FIX BELOW IS NOT IDEAL, YOU SHOULD ALWAYS TRY TO FIX UPSTREAM IF POSSIBLE
  6. +
  7. Open oba-edit.owl in Protégé and find the offending term and delete all offending issue (e.g. delete ALL definition, if the problem was "multiple def tags not allowed") and save. +*While this is not idea, as it will remove all definitions from that term, it will be added back again when the term is fixed in the ontology it was imported from and added back in.
  8. +
  9. Rerun sh run.sh make IMP=false PAT=false oort -B and if it all passes, commit your changes to a branch and make a pull request as usual.
  10. +
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/odk-workflows/RepoManagement/index.html b/odk-workflows/RepoManagement/index.html new file mode 100644 index 00000000..79f5f6a9 --- /dev/null +++ b/odk-workflows/RepoManagement/index.html @@ -0,0 +1,1085 @@ + + + + + + + + + + + + + + + + + + + + + + Manage your ODK Repository - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+ +
+ + + +
+
+ + + + + +

Managing your ODK repository

+

Updating your ODK repository

+

Your ODK repositories configuration is managed in src/ontology/oba-odk.yaml. The ODK Project Configuration Schema defines all possible parameters that can be used in this config YAML. Once you have made your changes, you can run the following to apply your changes to the repository:

+
sh run.sh make update_repo
+
+

There are a large number of options that can be set to configure your ODK, but we will only discuss a few of them here.

+

NOTE for Windows users:

+

You may get a cryptic failure such as Set Illegal Option - if the update script located in src/scripts/update_repo.sh +was saved using Windows Line endings. These need to change to unix line endings. In Notepad++, for example, you can +click on Edit->EOL Conversion->Unix LF to change this.

+

Managing imports

+

You can use the update repository workflow described on this page to perform the following operations to your imports:

+
    +
  1. Add a new import
  2. +
  3. Modify an existing import
  4. +
  5. Remove an import you no longer want
  6. +
  7. Customise an import
  8. +
+

We will discuss all these workflows in the following.

+

Add new import

+

To add a new import, you first edit your odk config as described above, adding an id to the product list in the import_group section (for the sake of this example, we assume you already import RO, and your goal is to also import GO):

+
import_group:
+  products:
+    - id: ro
+    - id: go
+
+

Note: our ODK file should only have one import_group which can contain multiple imports (in the products section). Next, you run the update repo workflow to apply these changes. Note that by default, this module is going to be a SLME Bottom module, see here. To change that or customise your module, see section "Customise an import". To finalise the addition of your import, perform the following steps:

+
    +
  1. Add an import statement to your src/ontology/oba-edit.owl file. We suggest to do this using a text editor, by simply copying an existing import declaration and renaming it to the new ontology import, for example as follows: + ... + Ontology(<http://purl.obolibrary.org/obo/oba.owl> + Import(<http://purl.obolibrary.org/obo/oba/imports/ro_import.owl>) + Import(<http://purl.obolibrary.org/obo/oba/imports/go_import.owl>) + ...
  2. +
  3. Add your imports redirect to your catalog file src/ontology/catalog-v001.xml, for example: + <uri name="http://purl.obolibrary.org/obo/oba/imports/go_import.owl" uri="imports/go_import.owl"/>
  4. +
  5. Test whether everything is in order:
      +
    1. Refresh your import
    2. +
    3. Open in your Ontology Editor of choice (Protege) and ensure that the expected terms are imported.
    4. +
    +
  6. +
+

Note: The catalog file src/ontology/catalog-v001.xml has one purpose: redirecting +imports from URLs to local files. For example, if you have

+
Import(<http://purl.obolibrary.org/obo/oba/imports/go_import.owl>)
+
+

in your editors file (the ontology) and

+
<uri name="http://purl.obolibrary.org/obo/oba/imports/go_import.owl" uri="imports/go_import.owl"/>
+
+

in your catalog, tools like robot or Protégé will recognize the statement +in the catalog file to redirect the URL http://purl.obolibrary.org/obo/oba/imports/go_import.owl +to the local file imports/go_import.owl (which is in your src/ontology directory).

+

Modify an existing import

+

If you simply wish to refresh your import in light of new terms, see here. If you wish to change the type of your module see section "Customise an import".

+

Remove an existing import

+

To remove an existing import, perform the following steps:

+
    +
  1. remove the import declaration from your src/ontology/oba-edit.owl.
  2. +
  3. remove the id from your src/ontology/oba-odk.yaml, eg. - id: go from the list of products in the import_group.
  4. +
  5. run update repo workflow
  6. +
  7. delete the associated files manually:
      +
    • src/imports/go_import.owl
    • +
    • src/imports/go_terms.txt
    • +
    +
  8. +
  9. Remove the respective entry from the src/ontology/catalog-v001.xml file.
  10. +
+

Customise an import

+

By default, an import module extracted from a source ontology will be a SLME module, see here. There are various options to change the default.

+

The following change to your repo config (src/ontology/oba-odk.yaml) will switch the go import from an SLME module to a simple ROBOT filter module:

+
import_group:
+  products:
+    - id: ro
+    - id: go
+      module_type: filter
+
+

A ROBOT filter module is, essentially, importing all external terms declared by your ontology (see here on how to declare external terms to be imported). Note that the filter module does +not consider terms/annotations from namespaces other than the base-namespace of the ontology itself. For example, in the +example of GO above, only annotations / axioms related to the GO base IRI (http://purl.obolibrary.org/obo/GO_) would be considered. This +behaviour can be changed by adding additional base IRIs as follows:

+
import_group:
+  products:
+    - id: go
+      module_type: filter
+      base_iris:
+        - http://purl.obolibrary.org/obo/GO_
+        - http://purl.obolibrary.org/obo/CL_
+        - http://purl.obolibrary.org/obo/BFO
+
+

If you wish to customise your import entirely, you can specify your own ROBOT command to do so. To do that, add the following to your repo config (src/ontology/oba-odk.yaml):

+
import_group:
+  products:
+    - id: ro
+    - id: go
+      module_type: custom
+
+

Now add a new goal in your custom Makefile (src/ontology/oba.Makefile, not src/ontology/Makefile).

+
imports/go_import.owl: mirror/ro.owl imports/ro_terms_combined.txt
+    if [ $(IMP) = true ]; then $(ROBOT) query  -i $< --update ../sparql/preprocess-module.ru \
+        extract -T imports/ro_terms_combined.txt --force true --individuals exclude --method BOT \
+        query --update ../sparql/inject-subset-declaration.ru --update ../sparql/postprocess-module.ru \
+        annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) --output $@.tmp.owl && mv $@.tmp.owl $@; fi
+
+

Now feel free to change this goal to do whatever you wish it to do! It probably makes some sense (albeit not being a strict necessity), to leave most of the goal instead and replace only:

+
extract -T imports/ro_terms_combined.txt --force true --individuals exclude --method BOT \
+
+

to another ROBOT pipeline.

+

Add a component

+

A component is an import which belongs to your ontology, e.g. is managed by +you and your team.

+
    +
  1. Open src/ontology/oba-odk.yaml
  2. +
  3. If you dont have it yet, add a new top level section components
  4. +
  5. Under the components section, add a new section called products. +This is where all your components are specified
  6. +
  7. Under the products section, add a new component, e.g. - filename: mycomp.owl
  8. +
+

Example

+
components:
+  products:
+    - filename: mycomp.owl
+
+

When running sh run.sh make update_repo, a new file src/ontology/components/mycomp.owl will +be created which you can edit as you see fit. Typical ways to edit:

+
    +
  1. Using a ROBOT template to generate the component (see below)
  2. +
  3. Manually curating the component separately with Protégé or any other editor
  4. +
  5. Providing a components/mycomp.owl: make target in src/ontology/oba.Makefile +and provide a custom command to generate the component
      +
    • WARNING: Note that the custom rule to generate the component MUST NOT depend on any other ODK-generated file such as seed files and the like (see issue).
    • +
    +
  6. +
  7. Providing an additional attribute for the component in src/ontology/oba-odk.yaml, source, +to specify that this component should simply be downloaded from somewhere on the web.
  8. +
+

Adding a new component based on a ROBOT template

+

Since ODK 1.3.2, it is possible to simply link a ROBOT template to a component without having to specify any of the import logic. In order to add a new component that is connected to one or more template files, follow these steps:

+
    +
  1. Open src/ontology/oba-odk.yaml.
  2. +
  3. Make sure that use_templates: TRUE is set in the global project options. You should also make sure that use_context: TRUE is set in case you are using prefixes in your templates that are not known to robot, such as OMOP:, CPONT: and more. All non-standard prefixes you are using should be added to config/context.json.
  4. +
  5. Add another component to the products section.
  6. +
  7. To activate this component to be template-driven, simply say: use_template: TRUE. This will create an empty template for you in the templates directory, which will automatically be processed when recreating the component (e.g. run.bat make recreate-mycomp).
  8. +
  9. If you want to use more than one component, use the templates field to add as many template names as you wish. ODK will look for them in the src/templates directory.
  10. +
  11. Advanced: If you want to provide additional processing options, you can use the template_options field. This should be a string with option from robot template. One typical example for additional options you may want to provide is --add-prefixes config/context.json to ensure the prefix map of your context is provided to robot, see above.
  12. +
+

Example:

+
components:
+  products:
+    - filename: mycomp.owl
+      use_template: TRUE
+      template_options: --add-prefixes config/context.json
+      templates:
+        - template1.tsv
+        - template2.tsv
+
+

Note: if your mirror is particularly large and complex, read this ODK recommendation.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/odk-workflows/RepositoryFileStructure/index.html b/odk-workflows/RepositoryFileStructure/index.html new file mode 100644 index 00000000..f5965a13 --- /dev/null +++ b/odk-workflows/RepositoryFileStructure/index.html @@ -0,0 +1,974 @@ + + + + + + + + + + + + + + + + + + + + + + Your ODK Repository Overview - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + +

Repository structure

+

The main kinds of files in the repository:

+
    +
  1. Release files
  2. +
  3. Imports
  4. +
  5. Components
  6. +
+

Release files

+

Release file are the file that are considered part of the official ontology release and to be used by the community. A detailed description of the release artefacts can be found here.

+

Imports

+

Imports are subsets of external ontologies that contain terms and axioms you would like to re-use in your ontology. These are considered "external", like dependencies in software development, and are not included in your "base" product, which is the release artefact which contains only those axioms that you personally maintain.

+

These are the current imports in OBA

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ImportURLType
rohttp://purl.obolibrary.org/obo/ro.owlNone
chebihttps://raw.githubusercontent.com/obophenotype/chebi_obo_slim/main/chebi_slim.owlNone
goplushttp://purl.obolibrary.org/obo/go/go-base.owlNone
gohttp://purl.obolibrary.org/obo/go.owlNone
patohttp://purl.obolibrary.org/obo/pato.owlNone
omohttp://purl.obolibrary.org/obo/omo.owlNone
hphttp://purl.obolibrary.org/obo/hp.owlNone
mondohttp://purl.obolibrary.org/obo/mondo.owlNone
ncbitaxonhttp://purl.obolibrary.org/obo/ncbitaxon/subsets/taxslim.owlNone
uberonhttp://purl.obolibrary.org/obo/uberon.owlNone
clhttp://purl.obolibrary.org/obo/cl.owlNone
nbohttp://purl.obolibrary.org/obo/nbo.owlNone
prhttps://raw.githubusercontent.com/obophenotype/pro_obo_slim/master/pr_slim.owlNone
sohttp://purl.obolibrary.org/obo/so.owlNone
pohttp://purl.obolibrary.org/obo/po.owlNone
bfohttp://purl.obolibrary.org/obo/bfo.owlNone
swisslipidshttp://purl.obolibrary.org/obo/swisslipids.owlNone
lipidmapshttp://purl.obolibrary.org/obo/lipidmaps.owlNone
+

Components

+

Components, in contrast to imports, are considered full members of the ontology. This means that any axiom in a component is also included in the ontology base - which means it is considered native to the ontology. While this sounds complicated, consider this: conceptually, no component should be part of more than one ontology. If that seems to be the case, we are most likely talking about an import. Components are often not needed for ontologies, but there are some use cases:

+
    +
  1. There is an automated process that generates and re-generates a part of the ontology
  2. +
  3. A part of the ontology is managed in ROBOT templates
  4. +
  5. The expressivity of the component is higher than the format of the edit file. For example, people still choose to manage their ontology in OBO format (they should not) missing out on a lot of owl features. They may choose to manage logic that is beyond OBO in a specific OWL component.
  6. +
+

These are the components in OBA

+ + + + + + + + + + + + + + + + + +
FilenameURL
obsoletes.owlNone
synonyms.owlNone
+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/odk-workflows/SettingUpDockerForODK/index.html b/odk-workflows/SettingUpDockerForODK/index.html new file mode 100644 index 00000000..7a530476 --- /dev/null +++ b/odk-workflows/SettingUpDockerForODK/index.html @@ -0,0 +1,771 @@ + + + + + + + + + + + + + + + + + + + + + + Setting up Docker for ODK - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + +

Setting up your Docker environment for ODK use

+

One of the most frequent problems with running the ODK for the first time is failure because of lack of memory. This can look like a Java OutOfMemory exception, +but more often than not it will appear as something like an Error 137. There are two places you need to consider to set your memory:

+
    +
  1. Your src/ontology/run.sh (or run.bat) file. You can set the memory in there by adding +robot_java_args: '-Xmx8G' to your src/ontology/oba-odk.yaml file, see for example here.
  2. +
  3. Set your docker memory. By default, it should be about 10-20% more than your robot_java_args variable. You can manage your memory settings +by right-clicking on the docker whale in your system bar-->Preferences-->Resources-->Advanced, see picture below.
  4. +
+

dockermemory

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/odk-workflows/UpdateImports/index.html b/odk-workflows/UpdateImports/index.html new file mode 100644 index 00000000..d3ce1c8c --- /dev/null +++ b/odk-workflows/UpdateImports/index.html @@ -0,0 +1,1025 @@ + + + + + + + + + + + + + + + + + + + + + + Imports management - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + +

Update Imports Workflow

+

This page discusses how to update the contents of your imports, like adding or removing terms. If you are looking to customise imports, like changing the module type, see here.

+

Importing a new term

+

Note: some ontologies now use a merged-import system to manage dynamic imports, for these please follow instructions in the section title "Using the Base Module approach".

+

Importing a new term is split into two sub-phases:

+
    +
  1. Declaring the terms to be imported
  2. +
  3. Refreshing imports dynamically
  4. +
+

Declaring terms to be imported

+

There are three ways to declare terms that are to be imported from an external ontology. Choose the appropriate one for your particular scenario (all three can be used in parallel if need be):

+
    +
  1. Protégé-based declaration
  2. +
  3. Using term files
  4. +
  5. Using the custom import template
  6. +
+

Protégé-based declaration

+

This workflow is to be avoided, but may be appropriate if the editor does not have access to the ODK docker container. +This approach also applies to ontologies that use base module import approach.

+
    +
  1. Open your ontology (edit file) in Protégé (5.5+).
  2. +
  3. Select 'owl:Thing'
  4. +
  5. Add a new class as usual.
  6. +
  7. Paste the full iri in the 'Name:' field, for example, http://purl.obolibrary.org/obo/CHEBI_50906.
  8. +
  9. Click 'OK'
  10. +
+

Adding Classes

+

Now you can use this term for example to construct logical definitions. The next time the imports are refreshed (see how to refresh here), the metadata (labels, definitions, etc.) for this term are imported from the respective external source ontology and becomes visible in your ontology.

+

Using term files

+

Every import has, by default a term file associated with it, which can be found in the imports directory. For example, if you have a GO import in src/ontology/go_import.owl, you will also have an associated term file src/ontology/go_terms.txt. You can add terms in there simply as a list:

+
GO:0008150
+GO:0008151
+
+

Now you can run the refresh imports workflow) and the two terms will be imported.

+

Using the custom import template

+

This workflow is appropriate if:

+
    +
  1. You prefer to manage all your imported terms in a single file (rather than multiple files like in the "Using term files" workflow above).
  2. +
  3. You wish to augment your imported ontologies with additional information. This requires a cautionary discussion.
  4. +
+

To enable this workflow, you add the following to your ODK config file (src/ontology/oba-odk.yaml), and update the repository:

+
use_custom_import_module: TRUE
+
+

Now you can manage your imported terms directly in the custom external terms template, which is located at src/templates/external_import.owl. Note that this file is a ROBOT template, and can, in principle, be extended to include any axioms you like. Before extending the template, however, read the following carefully.

+

The main purpose of the custom import template is to enable the management off all terms to be imported in a centralised place. To enable that, you do not have to do anything other than maintaining the template. So if you, say currently import APOLLO_SV:00000480, and you wish to import APOLLO_SV:00000532, you simply add a row like this:

+
ID  Entity Type
+ID  TYPE
+APOLLO_SV:00000480  owl:Class
+APOLLO_SV:00000532  owl:Class
+
+

When the imports are refreshed see imports refresh workflow, the term(s) will simply be imported from the configured ontologies.

+

Now, if you wish to extend the Makefile (which is beyond these instructions) and add, say, synonyms to the imported terms, you can do that, but you need to (a) preserve the ID and ENTITY columns and (b) ensure that the ROBOT template is valid otherwise, see here.

+

WARNING. Note that doing this is a widespread antipattern (see related issue). You should not change the axioms of terms that do not belong into your ontology unless necessary - such changes should always be pushed into the ontology where they belong. However, since people are doing it, whether the OBO Foundry likes it or not, at least using the custom imports module as described here localises the changes to a single simple template and ensures that none of the annotations added this way are merged into the base file.

+

Refresh imports

+

If you want to refresh the import yourself (this may be necessary to pass the travis tests), and you have the ODK installed, you can do the following (using go as an example):

+

First, you navigate in your terminal to the ontology directory (underneath src in your hpo root directory).

+
cd src/ontology
+
+

Then, you regenerate the import that will now include any new terms you have added. Note: You must have docker installed.

+
sh run.sh make PAT=false imports/go_import.owl -B
+
+

Since ODK 1.2.27, it is also possible to simply run the following, which is the same as the above:

+
sh run.sh make refresh-go
+
+

Note that in case you changed the defaults, you need to add IMP=true and/or MIR=true to the command below:

+
sh run.sh make IMP=true MIR=true PAT=false imports/go_import.owl -B
+
+

If you wish to skip refreshing the mirror, i.e. skip downloading the latest version of the source ontology for your import (e.g. go.owl for your go import) you can set MIR=false instead, which will do the exact same thing as the above, but is easier to remember:

+
sh run.sh make IMP=true MIR=false PAT=false imports/go_import.owl -B
+
+

Using the Base Module approach

+

Since ODK 1.2.31, we support an entirely new approach to generate modules: Using base files. +The idea is to only import axioms from ontologies that actually belong to it. +A base file is a subset of the ontology that only contains those axioms that nominally +belong there. In other words, the base file does not contain any axioms that belong +to another ontology. An example would be this:

+

Imagine this being the full Uberon ontology:

+
Axiom 1: BFO:123 SubClassOf BFO:124
+Axiom 1: UBERON:123 SubClassOf BFO:123
+Axiom 1: UBERON:124 SubClassOf UBERON 123
+
+

The base file is the set of all axioms that are about UBERON terms:

+
Axiom 1: UBERON:123 SubClassOf BFO:123
+Axiom 1: UBERON:124 SubClassOf UBERON 123
+
+

I.e.

+
Axiom 1: BFO:123 SubClassOf BFO:124
+
+

Gets removed.

+

The base file pipeline is a bit more complex than the normal pipelines, because +of the logical interactions between the imported ontologies. This is solved by _first +merging all mirrors into one huge file and then extracting one mega module from it.

+

Example: Let's say we are importing terms from Uberon, GO and RO in our ontologies. +When we use the base pipelines, we

+

1) First obtain the base (usually by simply downloading it, but there is also an option now to create it with ROBOT) +2) We merge all base files into one big pile +3) Then we extract a single module imports/merged_import.owl

+

The first implementation of this pipeline is PATO, see https://github.com/pato-ontology/pato/blob/master/src/ontology/pato-odk.yaml.

+

To check if your ontology uses this method, check src/ontology/oba-odk.yaml to see if use_base_merging: TRUE is declared under import_group

+

If your ontology uses Base Module approach, please use the following steps:

+

First, add the term to be imported to the term file associated with it (see above "Using term files" section if this is not clear to you)

+

Next, you navigate in your terminal to the ontology directory (underneath src in your hpo root directory).

+
cd src/ontology
+
+

Then refresh imports by running

+
sh run.sh make imports/merged_import.owl
+
+

Note: if your mirrors are updated, you can run sh run.sh make no-mirror-refresh-merged

+

This requires quite a bit of memory on your local machine, so if you encounter an error, it might be a lack of memory on your computer. A solution would be to create a ticket in an issue tracker requesting for the term to be imported, and one of the local devs should pick this up and run the import for you.

+

Lastly, restart Protégé, and the term should be imported in ready to be used.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/odk-workflows/components/index.html b/odk-workflows/components/index.html new file mode 100644 index 00000000..f014a60f --- /dev/null +++ b/odk-workflows/components/index.html @@ -0,0 +1,788 @@ + + + + + + + + + + + + + + + + + + + + + + Components management - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + +

Adding components to an ODK repo

+

For details on what components are, please see component section of repository file structure document.

+

To add custom components to an ODK repo, please follow the following steps:

+

1) Locate your odk yaml file and open it with your favourite text editor (src/ontology/oba-odk.yaml) +2) Search if there is already a component section to the yaml file, if not add it accordingly, adding the name of your component:

+
components:
+  products:
+    - filename: your-component-name.owl
+
+

3) Add the component to your catalog file (src/ontology/catalog-v001.xml)

+
  <uri name="http://purl.obolibrary.org/obo/oba/components/your-component-name.owl" uri="components/your-component-name.owl"/>
+
+

4) Add the component to the edit file (src/ontology/oba-edit.obo) +for .obo formats:

+
import: http://purl.obolibrary.org/obo/oba/components/your-component-name.owl
+
+

for .owl formats:

+
Import(<http://purl.obolibrary.org/obo/oba/components/your-component-name.owl>)
+
+

5) Refresh your repo by running sh run.sh make update_repo - this should create a new file in src/ontology/components. +6) In your custom makefile (src/ontology/oba.Makefile) add a goal for your custom make file. In this example, the goal is a ROBOT template.

+
$(COMPONENTSDIR)/your-component-name.owl: $(SRC) ../templates/your-component-template.tsv 
+    $(ROBOT) template --template ../templates/your-component-template.tsv \
+  annotate --ontology-iri $(ONTBASE)/$@ --output $(COMPONENTSDIR)/your-component-name.owl
+
+

(If using a ROBOT template, do not forget to add your template tsv in src/templates/)

+

7) Make the file by running sh run.sh make components/your-component-name.owl

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/odk-workflows/index.html b/odk-workflows/index.html new file mode 100644 index 00000000..97d47659 --- /dev/null +++ b/odk-workflows/index.html @@ -0,0 +1,771 @@ + + + + + + + + + + + + + + + + + + + + + + Overview - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + + + + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/robot-metrics/index.html b/robot-metrics/index.html new file mode 100644 index 00000000..f0a1e2e5 --- /dev/null +++ b/robot-metrics/index.html @@ -0,0 +1,1096 @@ + + + + + + + + + + + + + + + + + + + + + + ROBOT Metrics - Ontology of Biological Attributes (OBA) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+ + + + +
+ + +
+ +
+ + + + + + +
+
+ + + +
+
+
+ + + + +
+
+
+ + + +
+
+
+ + + +
+
+
+ + + +
+
+ + + + + +

Metrics OBA/OBA-BASEPLUS

+

IRI: http://purl.obolibrary.org/obo/oba/oba-baseplus.owl

+

Version IRI: http://purl.obolibrary.org/obo/oba/releases/2023-01-23/oba-baseplus.owl

+

Entities and axioms

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MetricValue
Annotation properties25
Axioms66080
Logical axioms29270
Classes11947
Object properties6
Data properties0
Individuals0
+

Expressivity

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MetricValue
ExpressivityALE
OWL2True
OWL2 DLFalse
OWL2 ELFalse
OWL2 QLFalse
OWL2 RLFalse
+

Axiom types

+ + + + + + + + + + + + + + + + + + + + + + + + + +
MetricValue
AnnotationAssertion24843
EquivalentClasses7769
Declaration11967
SubClassOf21501
+

Entity namespaces: axiom counts by namespace

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MetricValue
PR69
oboInOwl12
owl2
GO708
HP1
xsd2
CL346
PATO139
BFO2
rdfs2
OBA7854
dc114
MONDO40
CHEBI302
IAO3
UBERON2369
NBO109
SO4
RO4
PO6
dc2
+

Class expressions used

+ + + + + + + + + + + + + + + + + + + + + +
MetricValue
Class79152
ObjectSomeValuesFrom15797
ObjectIntersectionOf8683
+

The raw data (ontology metrics) can be found on GitHub.

+ + + + + + +
+
+ + +
+ +
+ + + +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/search/search_index.json b/search/search_index.json new file mode 100644 index 00000000..d2f32328 --- /dev/null +++ b/search/search_index.json @@ -0,0 +1 @@ +{"config":{"lang":["en"],"separator":"[\\s\\-]+","pipeline":["stopWordFilter"]},"docs":[{"location":"","title":"OBA - The Ontology of Biological Attributes","text":"

The Ontology of Biological Attributes is a tool for data integration. It aims to help researchers find new insights.

Welcome to the OBA online documentation!

"},{"location":"#useful-links","title":"Useful links","text":"
  • GitHub Repository
  • Issue tracker
  • OBO Library Homepage
  • Editors guide
  • How to cite OBA

You can find descriptions of the standard ontology engineering workflows (ODK) here.

"},{"location":"cite/","title":"How to cite OBA","text":"

Please cite

Stefancsik R, Balhoff JP, Balk MA, Ball RL, Bello SM, Caron AR, Chesler EJ, de\nSouza V, Gehrke S, Haendel M, Harris LW, Harris NL, Ibrahim A, Koehler S,\nMatentzoglu N, McMurry JA, Mungall CJ, Munoz-Torres MC, Putman T, Robinson P,\nSmedley D, Sollis E, Thessen AE, Vasilevsky N, Walton DO, Osumi-Sutherland D.\nThe Ontology of Biological Attributes (OBA)-computational traits for the life sciences.\nMamm Genome. 2023 Apr.\n

doi:10.1007/s00335-023-09992-1 PMID: 37076585

See also Zenodo.

"},{"location":"contributing/","title":"How to contribute to OBA","text":"

See https://github.com/obophenotype/bio-attribute-ontology/blob/master/CONTRIBUTING.md.

"},{"location":"editors-guide/","title":"Local development workflows with OBA","text":""},{"location":"editors-guide/#edit-files","title":"Edit files","text":"

There are three kinds of files to edit in OBO explained in the following:

  • The normal OBA edit file (src/ontology/obo-edit.obo)
  • The OBA SSSOM mappings (src/mappings/*)
  • The OBA DOSDP pattern files (src/patterns/data/default/*)
"},{"location":"editors-guide/#the-oba-edit-file","title":"The OBA edit file","text":"

As opposed to other ontologies, the OBA edit file (src/ontology/obo-edit.obo) is barely used. Power curators will use the oba-edit.obo file occasionally to edit the class hierarchy, but as per OBA principles, the class hierarchy is mostly created by reasoning. Most of OBA editing happens by editing the DOSDP templates, see below.

"},{"location":"editors-guide/#the-oba-sssom-mappings","title":"The OBA SSSOM mappings","text":"
  • OBA-VT SSSOM Mapping: The official mappings between OBA and VT. Source of truth is on Google Sheets, not Github.
  • OBA-EFO SSSOM Mapping: The official mappings between OBA and EFO. Source of truth is on Google Sheets, not Github.
  • OBA-EFO Excluded Mapping: Terms from EFO that have been reviewed and deemed out of scope for OBA. Source of truth is on Google Sheets, not Github.
  • OBA-VT Excluded Mapping: Terms from EFO that have been reviewed and deemed out of scope for OBA. Source of truth is on Google Sheets, not Github.
"},{"location":"editors-guide/#the-oba-dosdp-patterns","title":"The OBA DOSDP patterns","text":"

All OBA DOSDP data tables can be found here.

DOSDP tables are the main way to edit OBA. You can edit the DOSDP TSV files using a regular text editor or a spreadsheet editor.

The main rule is, make sure to review the diff before making a pull request - the diff should only show the things you have actually changed.

"},{"location":"editors-guide/#updating-sssom-mapping-files","title":"Updating SSSOM mapping files","text":"
cd src/ontology\nsh run.sh make sync_sssom_google_sheets\n
"},{"location":"editors-guide/#creatingupdating-terms","title":"Creating/updating terms","text":""},{"location":"editors-guide/#preparing-alignment-work","title":"Preparing alignment work","text":"
  1. Update the data required for the alignment: sh run.sh make prepare_oba_alignment -B. This will take a while, as a lot of ontologies are downloaded and syncronised.
  2. Start jupyter in your local environment
  3. Open src/scripts/oba_alignment.ipynb in your Jupyter environment and run all over night.
  4. While the above is running, read everything in the notebook carefully to get a sense what the notebook is doing. The methods section can be skipped during the first read through, but it will likely be necessary to review these in later stages of the alignment process.
  5. The notebook run will produce the following files:
    • src/mappings/oba-vt-unreviewed.sssom.tsv: VT mappings identified by pipeline but not reviewed
    • src/mappings/oba-vt-missed.sssom.tsv: VT mappings identified by looking at OBA IRIs (no need for review)
    • src/mappings/oba-vt-unmapped.sssom.tsv: VT terms that have not been mapped so far (excluding reviewed and candidate mappings)
    • src/mappings/oba-vt-unreviewed.dosdp.tsv: VT terms with candidate DOSDP pattern fillings.
    • src/mappings/oba-efo-unreviewed.sssom.tsv: see above vt analog
    • src/mappings/oba-efo-unmapped.sssom.tsv: see above vt analog
    • src/mappings/oba-efo-unreviewed.dosdp.tsv: see above vt analog
"},{"location":"editors-guide/#curating-efo-alignment","title":"Curating EFO alignment","text":"
  1. Follow the steps in the preparing alignment workflow
  2. The central pieces for the EFO alignment, if of interest, can be found in the section starting with OBA-EFO Alignment in src/scripts/oba_alignment.ipynb.
  3. Review src/mappings/oba-efo-unreviewed.sssom.tsv. These are the new mapping suggestions as determined by the mapping pipeline. Review mappings 1 x 1 and copy them into the official EFO-OBA SSSOM mapping curated on Google Sheets.
  4. Review src/mappings/oba-efo-unreviewed.dosdp.tsv. This is the hardest part. The table only provides a handful of suggests on how to map the label using DOSDP. You will have to go through the table subject_id by subject_id and identify the correct corresponding DOSDP pattern tables. Important: when you create an ID (defined_class column DOSDP table) for an EFO-sourced class, you have to add a respective mapping to the official EFO-OBA SSSOM mapping curated on Google Sheets.
  5. Optional: Review src/mappings/oba-efo-unmapped.sssom.tsv to figure out what to do about entirely unmapped EFO terms. These may need some careful planning and adjustments of the alignment code.
"},{"location":"editors-guide/#curating-vt-alignment","title":"Curating VT alignment","text":"
  1. Follow the steps in the preparing alignment workflow
  2. The central pieces for the EFO alignment, if of interest, can be found in the section starting with OBA-VT Alignment in src/scripts/oba_alignment.ipynb.
  3. Review src/mappings/oba-vt-missed.sssom.tsv. This should ideally be empty - these are mappings that have not been factored into the official oba-vt mappings yet, but have the VT-style IRI (OBA:VT0010108) which suggests that the class was derived from the respective VT id. Add all mappings in oba-vt-missed.sssom.tsv to the official VT-OBA SSSOM mapping curated on Google Sheets.
  4. Review src/mappings/oba-vt-unreviewed.sssom.tsv. These are the new mapping suggestions as determined by the mapping pipeline. Review mappings 1 x 1 and copy them into the official VT-OBA SSSOM mapping curated on Google Sheets.
  5. Review src/mappings/oba-vt-unreviewed.dosdp.tsv. This is the hardest part. The table only provides a handful of suggests on how to map the label using DOSDP. You will have to go through the table subject_id by subject_id and identify the correct corresponding DOSDP pattern tables. Important: when you create an ID (defined_class column DOSDP table) for a VT-sourced class, you add a special IRI that looks like OBA:VT123. This way, mappings will be curated automatically by the framework and you dont have to add them manually.
  6. Optional: Review src/mappings/oba-vt-unmapped.sssom.tsv to figure out what to do about entirely unmapped VT terms. These may need some careful planning and adjustments of the alignment code.
"},{"location":"editors-guide/#adding-measured-in-annotations","title":"Adding \"measured in\" annotations","text":"
  1. Go to Google sheet for \"measured in\" annotations and add annotations
  2. Go to cd src/ontology in your terminal
  3. Create a new branch with your favourite tool
  4. Run sh run.sh make sync_templates_google_sheets to sync templates from Google sheets
  5. Convince yourself in your favourite git diff tool (GitHub Desktop!) that the changed tables look as intended!
  6. In your terminal, run sh run.sh make recreate-measured_in
  7. When completed, the file src/ontology/components/measured_in.owl should have been updated. Look at the diff again to convince yourself that the changes look as intended. You may want to open oba-edit.obo in Protege to look at one or two changes!
  8. Make sure you are on your new branch created above and commit changes to branch.
  9. Publish branch (push to GitHub), make pull request, assign reviewer.
"},{"location":"editors-guide/#adding-synonym","title":"Adding synonym","text":"
  1. Follow the instructions for adding \"measured in\" annotations above, except:
  2. Add the synonyms in this sheet here
  3. Instead of sh run.sh make recreate-measured_in, sh run.sh make recreate-synonyms
"},{"location":"editors-guide/#importing-terms-and-updating-dosdp-patterns","title":"Importing terms and updating DOSDP patterns","text":"

When creating new OBA terms using DOSDP patterns for example the entity-attribute pattern, it may be necessary to import terms from other ontologies like CHEBI or PRO, the PRotein Ontology. However, CHEBI, NCBITAXON and PRO are too large to be managed easily as standard imports. To mitigate this situation, they can be managed as slims which are located here: * NCBITAXON: https://github.com/obophenotype/ncbitaxon/tree/master/subsets * PRO: https://github.com/obophenotype/pro_obo_slim * CHEBI: https://github.com/obophenotype/chebi_obo_slim

Sometimes, a new term you are using in a DOSDP pattern is not yet in a slim. So you will have to refresh the slim first.

"},{"location":"editors-guide/#refresh-lipid-maps","title":"Refresh LIPID Maps","text":"

LIPID map is currently (03.06.2023) not imported, but curated manually, because https://www.lipidmaps.org/resources/sparql does not work. To update the LIPID maps imports, you have to

  • Add a LIPID term to https://github.com/obophenotype/bio-attribute-ontology/blob/master/src/templates/external.tsv
  • When refreshing the imports in the usual way, this TSV file (a ROBOT template) is built in place of a proper LIPID MAPS mirror.
"},{"location":"editors-guide/#refresh-pro-slim","title":"Refresh PRO Slim:","text":"

Note: you will need at least 32 GB RAM for this

git clone https://github.com/obophenotype/pro_obo_slim\ncd pro_obo_slim\ngit checkout -b refresh20230312\n# Add your terms to seed.txt, and then SORT THE FILE and check that there are no duplicated terms.\n# Make sure that DOCKER is running. To set up DOCKER refer to https://oboacademy.github.io/obook/howto/odk-setup/\nsh odk.sh make all\ngit commit -a -m \"refresh slim after adding terms for OBA\"\ngit push --set-upstream origin refresh20230312\n

When this is done, make a pull request.

"},{"location":"editors-guide/#refresh-chebi-slim","title":"Refresh CHEBI Slim","text":"
git clone https://github.com/obophenotype/chebi_obo_slim\ncd chebi_obo_slim\n# Follow the instructions for the PRO slim from here.\n

The full process of refreshing the DOSDP patterns: 1. Check if new PRO / Chebi terms are not in slim, if they are not, add them as described above. 2. Run sh run.sh make IMP=false MIR=false ../patterns/definitions.owl to generate a new pattern ontology component. 3. Run sh run.sh make refresh-merged to import the new terms. 4. Run sh run.sh make IMP=false MIR=false ../patterns/definitions.owl again to generate the labels correctly where new terms are concerned.

"},{"location":"history/","title":"A brief history of OBA","text":"

The following page gives an overview of the history of OBA.

TBD.

"},{"location":"metrics/","title":"Metrics","text":"

There are currently two sets of metrics of this ontology:

  • ROBOT metrics
  • ROBOT metrics
"},{"location":"oak-metrics/","title":"Metrics for OBA (based on OAK)","text":"

Ontologies:

  • obo:oba/oba-baseplus.owl (obo:oba/releases/2023-01-23/oba-baseplus.owl)

The raw data (ontology metrics) can be found on GitHub.

"},{"location":"oba-gwas-sop/","title":"How to add new GWAS requested trait terms to OBA and EFO?","text":""},{"location":"oba-gwas-sop/#1-check-if-an-appropriate-oba-trait-pattern-already-exists","title":"1. Check if an appropriate OBA trait pattern already exists","text":"

Look into bio-attribute-ontology/src/patterns/dosdp-patterns/ and check if the GWAS trait term would fit into any of the existing patterns. - [ ] If yes, skip to the next step. - [ ] If none of the existing OBA trait patterns look appropriate, then create a new pattern. In some cases, the requested GWAS term may not fit the scope of OBA. In that case, an new EFO term can be created without an equivalent OBA trait term.

"},{"location":"oba-gwas-sop/#2-create-new-oba-terms","title":"2. Create new OBA term(s)","text":"
  • Create a new github branch for the edits.

  • Fill in the appropriate DOS-DP template data table to add any new terms to OBA in bio-attribute-ontology/src/patterns/data/default/.

For example, for a trait involving the 'age at which disease manifestations first appear', fill in the table disease_onset.tsv.. Create a unique OBA identifier by using the next available ID from your assigned range.

Also fill in the appropriate columns for the variable fields as specified in the actual DOS-DP yaml template file. For example, in the case of the disease_onset.tsv. table, you must use MONDO disease or disorder terms in the disease column.

NOTE: Keep track of the IDs from your range that you have already assigned.

  • Create a pull request (PR) with the edits. Request other people to review your PR.
  • If approved, merge the PR after the review(s) into the 'master' branch.
"},{"location":"oba-gwas-sop/#3-oba-release","title":"3. OBA release","text":"

The newly created trait terms can be imported into EFO from a publicly released version of OBA.

To run the OBA release pipeline, follow the instructions in the document Release Workflow for the Ontology of Biological Attributes (OBA).

"},{"location":"oba-gwas-sop/#4-provide-the-new-oba-terms-to-efo","title":"4. Provide the new OBA terms to EFO","text":"
  • [ ] Add the newly created OBA term IRI and also all its component term IRIs to oba_terms.txt so that they get included in EFO dynamic imports. By component terms I mean all those terms that are used in the DOS-DP data filler table to compose the OBA term (terms from MONDO, UBERON, PATO, etc.) as specified in the corresponding DOS-DP pattern file.

    • NOTE: use full IRI, i.e:

    http://purl.obolibrary.org/obo/OBA_2040167 http://purl.obolibrary.org/obo/MONDO_0000481

  • [ ] This step depends on a new public OBA release.

  • [ ] You need to accomplish this in an EFO PR.
"},{"location":"oba-release-sop/","title":"Release Workflow for the Ontology of Biological Attributes (OBA)","text":"
  • Make sure you have the latest ODK installed by running docker:
docker pull obolibrary/odkfull\n
  • Merge as many open PRs as possible.
  • Start with a fresh copy of the master branch. For the next steps you can use GitHub Desktop or the command line instructions below.
git pull\n

Create a new branch:

git checkout -b release-202X-XX-XX\n

In a terminal window, start the release pipeline:

sh run.sh make prepare_release_fast\n

NOTE: It is recommended that running the release pipeline is uncoupled from refreshing imports. However, inn case you need to refresh all the imports, you can achieve that by:

sh run.sh make prepare_release -B\n
  • If everything went all right, you should see message similar to the one below in your terminal window:

... Release files are now in ../.. - now you should commit, push and make a release on your git hosting site such as GitHub or GitLab make[1]: Leaving directory '/work/src/ontology' Please remember to update your ODK image from time to time: https://oboacademy.github.io/obook/howto/odk-update/.

"},{"location":"oba-release-sop/#check-and-package-the-release-artefacts-for-oba","title":"Check and package the release artefacts for OBA","text":"
  • You should also check in Protege if the new terms you just added look fine.
  • Open in Protege some of the OBA release artefacts and check for any potential errors. For example, check if there are any unsatisfiable classes in oba.obo.

  • Create a pull request and get another pair of eyes to review it.

  • Merge your release-202X-XX-XX branch into the master branch once approved by a reviewer and all the automatic quality control checks passed.
"},{"location":"oba-release-sop/#make-a-release-and-include-all-the-release-files-including-oba-baseowl-and-obaobo-as-binary-files","title":"Make a release and include all the release files (including oba-base.owl and oba.obo) as binary files","text":"

Use the github web interface to create a new OBA release.

  • There should be 15 recently modified files in the root directory of the local copy of the repo:

    1. oba-base.json
    2. oba-base.obo
    3. oba-base.owl
    4. oba-baseplus.json
    5. oba-baseplus.obo
    6. oba-baseplus.owl
    7. oba-basic.json
    8. oba-basic.obo
    9. oba-basic.owl
    10. oba-full.json
    11. oba-full.obo
    12. oba-full.owl
    13. oba.json
    14. oba.obo
    15. oba.owl
  • NOTE: GitHub imposes size constraints on repositories. The combined size of the OBA artefacts exceeds the GitHub imposed size limit. For this reason, some of the large release artefact files are not under GitHub version control. However, all the 15 files need to be included in the public release as binary files. For background information on release artefacts, see

    • OWL, OBO, JSON? Base, simple, full, basic? What should you use, and why?
    • Release artefacts
  • Navigate to the 'Releases' page of OBA

  • Click Draft a new release. Click Chose a tag, and create a new tag based on the date on which your ontologies were build. You can find this, for example, by looking into the oba.obo file and checking the data-version: property. The date needs to be prefixed with a v, so, for example v2022-10-17.
  • For the title, you can use the date of the ontology build again, for example 2022-10-17 release
  • Drag and drop the files listed above or manually select them in the binaries box. using the github web user-interface.
  • You can automatically generate release notes.
  • Click Publish release. Done.
"},{"location":"robot-metrics/","title":"Metrics OBA/OBA-BASEPLUS","text":"

IRI: http://purl.obolibrary.org/obo/oba/oba-baseplus.owl

Version IRI: http://purl.obolibrary.org/obo/oba/releases/2023-01-23/oba-baseplus.owl

"},{"location":"robot-metrics/#entities-and-axioms","title":"Entities and axioms","text":"Metric Value Annotation properties 25 Axioms 66080 Logical axioms 29270 Classes 11947 Object properties 6 Data properties 0 Individuals 0"},{"location":"robot-metrics/#expressivity","title":"Expressivity","text":"Metric Value Expressivity ALE OWL2 True OWL2 DL False OWL2 EL False OWL2 QL False OWL2 RL False"},{"location":"robot-metrics/#axiom-types","title":"Axiom types","text":"Metric Value AnnotationAssertion 24843 EquivalentClasses 7769 Declaration 11967 SubClassOf 21501"},{"location":"robot-metrics/#entity-namespaces-axiom-counts-by-namespace","title":"Entity namespaces: axiom counts by namespace","text":"Metric Value PR 69 oboInOwl 12 owl 2 GO 708 HP 1 xsd 2 CL 346 PATO 139 BFO 2 rdfs 2 OBA 7854 dc11 4 MONDO 40 CHEBI 302 IAO 3 UBERON 2369 NBO 109 SO 4 RO 4 PO 6 dc 2"},{"location":"robot-metrics/#class-expressions-used","title":"Class expressions used","text":"Metric Value Class 79152 ObjectSomeValuesFrom 15797 ObjectIntersectionOf 8683

The raw data (ontology metrics) can be found on GitHub.

"},{"location":"odk-workflows/","title":"Default ODK Workflows","text":"
  • Daily Editors Workflow
  • Release Workflow
  • Manage your ODK Repository
  • Setting up Docker for ODK
  • Imports management
  • Managing the documentation
  • Managing your Automated Testing
"},{"location":"odk-workflows/ContinuousIntegration/","title":"Introduction to Continuous Integration Workflows with ODK","text":"

Historically, most repos have been using Travis CI for continuous integration testing and building, but due to runtime restrictions, we recently switched a lot of our repos to GitHub actions. You can set up your repo with CI by adding this to your configuration file (src/ontology/oba-odk.yaml):

ci:\n  - github_actions\n

When updateing your repo, you will notice a new file being added: .github/workflows/qc.yml.

This file contains your CI logic, so if you need to change, or add anything, this is the place!

Alternatively, if your repo is in GitLab instead of GitHub, you can set up your repo with GitLab CI by adding this to your configuration file (src/ontology/oba-odk.yaml):

ci:\n  - gitlab-ci\n

This will add a file called .gitlab-ci.yml in the root of your repo.

"},{"location":"odk-workflows/EditorsWorkflow/","title":"Editors Workflow","text":"

The editors workflow is one of the formal workflows to ensure that the ontology is developed correctly according to ontology engineering principles. There are a few different editors workflows:

  1. Local editing workflow: Editing the ontology in your local environment by hand, using tools such as Prot\u00e9g\u00e9, ROBOT templates or DOSDP patterns.
  2. Completely automated data pipeline (GitHub Actions)
  3. DROID workflow

This document only covers the first editing workflow, but more will be added in the future

"},{"location":"odk-workflows/EditorsWorkflow/#local-editing-workflow","title":"Local editing workflow","text":"

Workflow requirements:

  • git
  • github
  • docker
  • editing tool of choice, e.g. Prot\u00e9g\u00e9, your favourite text editor, etc
"},{"location":"odk-workflows/EditorsWorkflow/#1-create-issue","title":"1. Create issue","text":"

Ensure that there is a ticket on your issue tracker that describes the change you are about to make. While this seems optional, this is a very important part of the social contract of building an ontology - no change to the ontology should be performed without a good ticket, describing the motivation and nature of the intended change.

"},{"location":"odk-workflows/EditorsWorkflow/#2-update-main-branch","title":"2. Update main branch","text":"

In your local environment (e.g. your laptop), make sure you are on the main (prev. master) branch and ensure that you have all the upstream changes, for example:

git checkout master\ngit pull\n
"},{"location":"odk-workflows/EditorsWorkflow/#3-create-feature-branch","title":"3. Create feature branch","text":"

Create a new branch. Per convention, we try to use meaningful branch names such as: - issue23removeprocess (where issue 23 is the related issue on GitHub) - issue26addcontributor - release20210101 (for releases)

On your command line, this looks like this:

git checkout -b issue23removeprocess\n
"},{"location":"odk-workflows/EditorsWorkflow/#4-perform-edit","title":"4. Perform edit","text":"

Using your editor of choice, perform the intended edit. For example:

Prot\u00e9g\u00e9

  1. Open src/ontology/oba-edit.owl in Prot\u00e9g\u00e9
  2. Make the change
  3. Save the file

TextEdit

  1. Open src/ontology/oba-edit.owl in TextEdit (or Sublime, Atom, Vim, Nano)
  2. Make the change
  3. Save the file

Consider the following when making the edit.

  1. According to our development philosophy, the only places that should be manually edited are:
    • src/ontology/oba-edit.owl
    • Any ROBOT templates you chose to use (the TSV files only)
    • Any DOSDP data tables you chose to use (the TSV files, and potentially the associated patterns)
    • components (anything in src/ontology/components), see here.
  2. Imports should not be edited (any edits will be flushed out with the next update). However, refreshing imports is a potentially breaking change - and is discussed elsewhere.
  3. Changes should usually be small. Adding or changing 1 term is great. Adding or changing 10 related terms is ok. Adding or changing 100 or more terms at once should be considered very carefully.
"},{"location":"odk-workflows/EditorsWorkflow/#4-check-the-git-diff","title":"4. Check the Git diff","text":"

This step is very important. Rather than simply trusting your change had the intended effect, we should always use a git diff as a first pass for sanity checking.

In our experience, having a visual git client like GitHub Desktop or sourcetree is really helpful for this part. In case you prefer the command line:

git status\ngit diff\n
"},{"location":"odk-workflows/EditorsWorkflow/#5-quality-control","title":"5. Quality control","text":"

Now it's time to run your quality control checks. This can either happen locally (5a) or through your continuous integration system (7/5b).

"},{"location":"odk-workflows/EditorsWorkflow/#5a-local-testing","title":"5a. Local testing","text":"

If you chose to run your test locally:

sh run.sh make IMP=false test\n

This will run the whole set of configured ODK tests on including your change. If you have a complex DOSDP pattern pipeline you may want to add PAT=false to skip the potentially lengthy process of rebuilding the patterns.

sh run.sh make IMP=false PAT=false test\n
"},{"location":"odk-workflows/EditorsWorkflow/#6-pull-request","title":"6. Pull request","text":"

When you are happy with the changes, you commit your changes to your feature branch, push them upstream (to GitHub) and create a pull request. For example:

git add NAMEOFCHANGEDFILES\ngit commit -m \"Added biological process term #12\"\ngit push -u origin issue23removeprocess\n

Then you go to your project on GitHub, and create a new pull request from the branch, for example: https://github.com/INCATools/ontology-development-kit/pulls

There is a lot of great advise on how to write pull requests, but at the very least you should: - mention the tickets affected: see #23 to link to a related ticket, or fixes #23 if, by merging this pull request, the ticket is fixed. Tickets in the latter case will be closed automatically by GitHub when the pull request is merged. - summarise the changes in a few sentences. Consider the reviewer: what would they want to know right away. - If the diff is large, provide instructions on how to review the pull request best (sometimes, there are many changed files, but only one important change).

"},{"location":"odk-workflows/EditorsWorkflow/#75b-continuous-integration-testing","title":"7/5b. Continuous Integration Testing","text":"

If you didn't run and local quality control checks (see 5a), you should have Continuous Integration (CI) set up, for example: - Travis - GitHub Actions

More on how to set this up here. Once the pull request is created, the CI will automatically trigger. If all is fine, it will show up green, otherwise red.

"},{"location":"odk-workflows/EditorsWorkflow/#8-community-review","title":"8. Community review","text":"

Once all the automatic tests have passed, it is important to put a second set of eyes on the pull request. Ontologies are inherently social - as in that they represent some kind of community consensus on how a domain is organised conceptually. This seems high brow talk, but it is very important that as an ontology editor, you have your work validated by the community you are trying to serve (e.g. your colleagues, other contributors etc.). In our experience, it is hard to get more than one review on a pull request - two is great. You can set up GitHub branch protection to actually require a review before a pull request can be merged! We recommend this.

This step seems daunting to some hopefully under-resourced ontologies, but we recommend to put this high up on your list of priorities - train a colleague, reach out!

"},{"location":"odk-workflows/EditorsWorkflow/#9-merge-and-cleanup","title":"9. Merge and cleanup","text":"

When the QC is green and the reviews are in (approvals), it is time to merge the pull request. After the pull request is merged, remember to delete the branch as well (this option will show up as a big button right after you have merged the pull request). If you have not done so, close all the associated tickets fixed by the pull request.

"},{"location":"odk-workflows/EditorsWorkflow/#10-changelog-optional","title":"10. Changelog (Optional)","text":"

It is sometimes difficult to keep track of changes made to an ontology. Some ontology teams opt to document changes in a changelog (simply a text file in your repository) so that when release day comes, you know everything you have changed. This is advisable at least for major changes (such as a new release system, a new pattern or template etc.).

"},{"location":"odk-workflows/ManageAutomatedTest/","title":"ManageAutomatedTest","text":""},{"location":"odk-workflows/ManageAutomatedTest/#constraint-violation-checks","title":"Constraint violation checks","text":"

We can define custom checks using SPARQL. SPARQL queries define bad modelling patterns (missing labels, misspelt URIs, and many more) in the ontology. If these queries return any results, then the build will fail. Custom checks are designed to be run as part of GitHub Actions Continuous Integration testing, but they can also run locally.

"},{"location":"odk-workflows/ManageAutomatedTest/#steps-to-add-a-constraint-violation-check","title":"Steps to add a constraint violation check:","text":"
  1. Add the SPARQL query in src/sparql. The name of the file should end with -violation.sparql. Please give a name that helps to understand which violation the query wants to check.
  2. Add the name of the new file to odk configuration file src/ontology/uberon-odk.yaml:
    1. Include the name of the file (without the -violation.sparql part) to the list inside the key custom_sparql_checks that is inside robot_report key.
    2. If the robot_report or custom_sparql_checks keys are not available, please add this code block to the end of the file.

      yaml robot_report: release_reports: False fail_on: ERROR use_labels: False custom_profile: True report_on: - edit custom_sparql_checks: - name-of-the-file-check 3. Update the repository so your new SPARQL check will be included in the QC.

sh run.sh make update_repo\n
"},{"location":"odk-workflows/ManageDocumentation/","title":"Updating the Documentation","text":"

The documentation for OBA is managed in two places (relative to the repository root):

  1. The docs directory contains all the files that pertain to the content of the documentation (more below)
  2. the mkdocs.yaml file contains the documentation config, in particular its navigation bar and theme.

The documentation is hosted using GitHub pages, on a special branch of the repository (called gh-pages). It is important that this branch is never deleted - it contains all the files GitHub pages needs to render and deploy the site. It is also important to note that the gh-pages branch should never be edited manually. All changes to the docs happen inside the docs directory on the main branch.

"},{"location":"odk-workflows/ManageDocumentation/#editing-the-docs","title":"Editing the docs","text":""},{"location":"odk-workflows/ManageDocumentation/#changing-content","title":"Changing content","text":"

All the documentation is contained in the docs directory, and is managed in Markdown. Markdown is a very simple and convenient way to produce text documents with formatting instructions, and is very easy to learn - it is also used, for example, in GitHub issues. This is a normal editing workflow:

  1. Open the .md file you want to change in an editor of choice (a simple text editor is often best). IMPORTANT: Do not edit any files in the docs/odk-workflows/ directory. These files are managed by the ODK system and will be overwritten when the repository is upgraded! If you wish to change these files, make an issue on the ODK issue tracker.
  2. Perform the edit and save the file
  3. Commit the file to a branch, and create a pull request as usual.
  4. If your development team likes your changes, merge the docs into master branch.
  5. Deploy the documentation (see below)
"},{"location":"odk-workflows/ManageDocumentation/#deploy-the-documentation","title":"Deploy the documentation","text":"

The documentation is not automatically updated from the Markdown, and needs to be deployed deliberately. To do this, perform the following steps:

  1. In your terminal, navigate to the edit directory of your ontology, e.g.: cd oba/src/ontology
  2. Now you are ready to build the docs as follows: sh run.sh make update_docs Mkdocs now sets off to build the site from the markdown pages. You will be asked to
    • Enter your username
    • Enter your password (see here for using GitHub access tokens instead) IMPORTANT: Using password based authentication will be deprecated this year (2021). Make sure you read up on personal access tokens if that happens!

If everything was successful, you will see a message similar to this one:

INFO - Your documentation should shortly be available at: https://obophenotype.github.io/bio-attribute-ontology/ 3. Just to double check, you can now navigate to your documentation pages (usually https://obophenotype.github.io/bio-attribute-ontology/). Just make sure you give GitHub 2-5 minutes to build the pages!

"},{"location":"odk-workflows/ReleaseWorkflow/","title":"The release workflow","text":"

The release workflow recommended by the ODK is based on GitHub releases and works as follows:

  1. Run a release with the ODK
  2. Review the release
  3. Merge to main branch
  4. Create a GitHub release

These steps are outlined in detail in the following.

"},{"location":"odk-workflows/ReleaseWorkflow/#run-a-release-with-the-odk","title":"Run a release with the ODK","text":"

Preparation:

  1. Ensure that all your pull requests are merged into your main (master) branch
  2. Make sure that all changes to master are committed to GitHub (git status should say that there are no modified files)
  3. Locally make sure you have the latest changes from master (git pull)
  4. Checkout a new branch (e.g. git checkout -b release-2021-01-01)
  5. You may or may not want to refresh your imports as part of your release strategy (see here)
  6. Make sure you have the latest ODK installed by running docker pull obolibrary/odkfull

To actually run the release, you:

  1. Open a command line terminal window and navigate to the src/ontology directory (cd oba/src/ontology)
  2. Run release pipeline:sh run.sh make prepare_release -B. Note that for some ontologies, this process can take up to 90 minutes - especially if there are large ontologies you depend on, like PRO or CHEBI.
  3. If everything went well, you should see the following output on your machine: Release files are now in ../.. - now you should commit, push and make a release on your git hosting site such as GitHub or GitLab.

This will create all the specified release targets (OBO, OWL, JSON, and the variants, ont-full and ont-base) and copy them into your release directory (the top level of your repo).

"},{"location":"odk-workflows/ReleaseWorkflow/#review-the-release","title":"Review the release","text":"
  1. (Optional) Rough check. This step is frequently skipped, but for the more paranoid among us (like the author of this doc), this is a 3 minute additional effort for some peace of mind. Open the main release (oba.owl) in you favourite development environment (i.e. Prot\u00e9g\u00e9) and eyeball the hierarchy. We recommend two simple checks:
    1. Does the very top level of the hierarchy look ok? This means that all new terms have been imported/updated correctly.
    2. Does at least one change that you know should be in this release appear? For example, a new class. This means that the release was actually based on the recent edit file.
  2. Commit your changes to the branch and make a pull request
  3. In your GitHub pull request, review the following three files in detail (based on our experience):
    1. oba.obo - this reflects a useful subset of the whole ontology (everything that can be covered by OBO format). OBO format has that speaking for it: it is very easy to review!
    2. oba-base.owl - this reflects the asserted axioms in your ontology that you have actually edited.
    3. Ideally also take a look at oba-full.owl, which may reveal interesting new inferences you did not know about. Note that the diff of this file is sometimes quite large.
  4. Like with every pull request, we recommend to always employ a second set of eyes when reviewing a PR!
"},{"location":"odk-workflows/ReleaseWorkflow/#merge-the-main-branch","title":"Merge the main branch","text":"

Once your CI checks have passed, and your reviews are completed, you can now merge the branch into your main branch (don't forget to delete the branch afterwards - a big button will appear after the merge is finished).

"},{"location":"odk-workflows/ReleaseWorkflow/#create-a-github-release","title":"Create a GitHub release","text":"
  1. Go to your releases page on GitHub by navigating to your repository, and then clicking on releases (usually on the right, for example: https://github.com/obophenotype/bio-attribute-ontology/releases). Then click \"Draft new release\"
  2. As the tag version you need to choose the date on which your ontologies were build. You can find this, for example, by looking at the oba.obo file and check the data-version: property. The date needs to be prefixed with a v, so, for example v2020-02-06.
  3. You can write whatever you want in the release title, but we typically write the date again. The description underneath should contain a concise list of changes or term additions.
  4. Click \"Publish release\". Done.
"},{"location":"odk-workflows/ReleaseWorkflow/#debugging-typical-ontology-release-problems","title":"Debugging typical ontology release problems","text":""},{"location":"odk-workflows/ReleaseWorkflow/#problems-with-memory","title":"Problems with memory","text":"

When you are dealing with large ontologies, you need a lot of memory. When you see error messages relating to large ontologies such as CHEBI, PRO, NCBITAXON, or Uberon, you should think of memory first, see here.

"},{"location":"odk-workflows/ReleaseWorkflow/#problems-when-using-obo-format-based-tools","title":"Problems when using OBO format based tools","text":"

Sometimes you will get cryptic error messages when using legacy tools using OBO format, such as the ontology release tool (OORT), which is also available as part of the ODK docker container. In these cases, you need to track down what axiom or annotation actually caused the breakdown. In our experience (in about 60% of the cases) the problem lies with duplicate annotations (def, comment) which are illegal in OBO. Here is an example recipe of how to deal with such a problem:

  1. If you get a message like make: *** [cl.Makefile:84: oort] Error 255 you might have a OORT error.
  2. To debug this, in your terminal enter sh run.sh make IMP=false PAT=false oort -B (assuming you are already in the ontology folder in your directory)
  3. This should show you where the error is in the log (eg multiple different definitions) WARNING: THE FIX BELOW IS NOT IDEAL, YOU SHOULD ALWAYS TRY TO FIX UPSTREAM IF POSSIBLE
  4. Open oba-edit.owl in Prot\u00e9g\u00e9 and find the offending term and delete all offending issue (e.g. delete ALL definition, if the problem was \"multiple def tags not allowed\") and save. *While this is not idea, as it will remove all definitions from that term, it will be added back again when the term is fixed in the ontology it was imported from and added back in.
  5. Rerun sh run.sh make IMP=false PAT=false oort -B and if it all passes, commit your changes to a branch and make a pull request as usual.
"},{"location":"odk-workflows/RepoManagement/","title":"Managing your ODK repository","text":""},{"location":"odk-workflows/RepoManagement/#updating-your-odk-repository","title":"Updating your ODK repository","text":"

Your ODK repositories configuration is managed in src/ontology/oba-odk.yaml. The ODK Project Configuration Schema defines all possible parameters that can be used in this config YAML. Once you have made your changes, you can run the following to apply your changes to the repository:

sh run.sh make update_repo\n

There are a large number of options that can be set to configure your ODK, but we will only discuss a few of them here.

NOTE for Windows users:

You may get a cryptic failure such as Set Illegal Option - if the update script located in src/scripts/update_repo.sh was saved using Windows Line endings. These need to change to unix line endings. In Notepad++, for example, you can click on Edit->EOL Conversion->Unix LF to change this.

"},{"location":"odk-workflows/RepoManagement/#managing-imports","title":"Managing imports","text":"

You can use the update repository workflow described on this page to perform the following operations to your imports:

  1. Add a new import
  2. Modify an existing import
  3. Remove an import you no longer want
  4. Customise an import

We will discuss all these workflows in the following.

"},{"location":"odk-workflows/RepoManagement/#add-new-import","title":"Add new import","text":"

To add a new import, you first edit your odk config as described above, adding an id to the product list in the import_group section (for the sake of this example, we assume you already import RO, and your goal is to also import GO):

import_group:\n  products:\n    - id: ro\n    - id: go\n

Note: our ODK file should only have one import_group which can contain multiple imports (in the products section). Next, you run the update repo workflow to apply these changes. Note that by default, this module is going to be a SLME Bottom module, see here. To change that or customise your module, see section \"Customise an import\". To finalise the addition of your import, perform the following steps:

  1. Add an import statement to your src/ontology/oba-edit.owl file. We suggest to do this using a text editor, by simply copying an existing import declaration and renaming it to the new ontology import, for example as follows: ... Ontology(<http://purl.obolibrary.org/obo/oba.owl> Import(<http://purl.obolibrary.org/obo/oba/imports/ro_import.owl>) Import(<http://purl.obolibrary.org/obo/oba/imports/go_import.owl>) ...
  2. Add your imports redirect to your catalog file src/ontology/catalog-v001.xml, for example: <uri name=\"http://purl.obolibrary.org/obo/oba/imports/go_import.owl\" uri=\"imports/go_import.owl\"/>
  3. Test whether everything is in order:
    1. Refresh your import
    2. Open in your Ontology Editor of choice (Protege) and ensure that the expected terms are imported.

Note: The catalog file src/ontology/catalog-v001.xml has one purpose: redirecting imports from URLs to local files. For example, if you have

Import(<http://purl.obolibrary.org/obo/oba/imports/go_import.owl>)\n

in your editors file (the ontology) and

<uri name=\"http://purl.obolibrary.org/obo/oba/imports/go_import.owl\" uri=\"imports/go_import.owl\"/>\n

in your catalog, tools like robot or Prot\u00e9g\u00e9 will recognize the statement in the catalog file to redirect the URL http://purl.obolibrary.org/obo/oba/imports/go_import.owl to the local file imports/go_import.owl (which is in your src/ontology directory).

"},{"location":"odk-workflows/RepoManagement/#modify-an-existing-import","title":"Modify an existing import","text":"

If you simply wish to refresh your import in light of new terms, see here. If you wish to change the type of your module see section \"Customise an import\".

"},{"location":"odk-workflows/RepoManagement/#remove-an-existing-import","title":"Remove an existing import","text":"

To remove an existing import, perform the following steps:

  1. remove the import declaration from your src/ontology/oba-edit.owl.
  2. remove the id from your src/ontology/oba-odk.yaml, eg. - id: go from the list of products in the import_group.
  3. run update repo workflow
  4. delete the associated files manually:
    • src/imports/go_import.owl
    • src/imports/go_terms.txt
  5. Remove the respective entry from the src/ontology/catalog-v001.xml file.
"},{"location":"odk-workflows/RepoManagement/#customise-an-import","title":"Customise an import","text":"

By default, an import module extracted from a source ontology will be a SLME module, see here. There are various options to change the default.

The following change to your repo config (src/ontology/oba-odk.yaml) will switch the go import from an SLME module to a simple ROBOT filter module:

import_group:\n  products:\n    - id: ro\n    - id: go\n      module_type: filter\n

A ROBOT filter module is, essentially, importing all external terms declared by your ontology (see here on how to declare external terms to be imported). Note that the filter module does not consider terms/annotations from namespaces other than the base-namespace of the ontology itself. For example, in the example of GO above, only annotations / axioms related to the GO base IRI (http://purl.obolibrary.org/obo/GO_) would be considered. This behaviour can be changed by adding additional base IRIs as follows:

import_group:\n  products:\n    - id: go\n      module_type: filter\n      base_iris:\n        - http://purl.obolibrary.org/obo/GO_\n        - http://purl.obolibrary.org/obo/CL_\n        - http://purl.obolibrary.org/obo/BFO\n

If you wish to customise your import entirely, you can specify your own ROBOT command to do so. To do that, add the following to your repo config (src/ontology/oba-odk.yaml):

import_group:\n  products:\n    - id: ro\n    - id: go\n      module_type: custom\n

Now add a new goal in your custom Makefile (src/ontology/oba.Makefile, not src/ontology/Makefile).

imports/go_import.owl: mirror/ro.owl imports/ro_terms_combined.txt\n    if [ $(IMP) = true ]; then $(ROBOT) query  -i $< --update ../sparql/preprocess-module.ru \\\n        extract -T imports/ro_terms_combined.txt --force true --individuals exclude --method BOT \\\n        query --update ../sparql/inject-subset-declaration.ru --update ../sparql/postprocess-module.ru \\\n        annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) --output $@.tmp.owl && mv $@.tmp.owl $@; fi\n

Now feel free to change this goal to do whatever you wish it to do! It probably makes some sense (albeit not being a strict necessity), to leave most of the goal instead and replace only:

extract -T imports/ro_terms_combined.txt --force true --individuals exclude --method BOT \\\n

to another ROBOT pipeline.

"},{"location":"odk-workflows/RepoManagement/#add-a-component","title":"Add a component","text":"

A component is an import which belongs to your ontology, e.g. is managed by you and your team.

  1. Open src/ontology/oba-odk.yaml
  2. If you dont have it yet, add a new top level section components
  3. Under the components section, add a new section called products. This is where all your components are specified
  4. Under the products section, add a new component, e.g. - filename: mycomp.owl

Example

components:\n  products:\n    - filename: mycomp.owl\n

When running sh run.sh make update_repo, a new file src/ontology/components/mycomp.owl will be created which you can edit as you see fit. Typical ways to edit:

  1. Using a ROBOT template to generate the component (see below)
  2. Manually curating the component separately with Prot\u00e9g\u00e9 or any other editor
  3. Providing a components/mycomp.owl: make target in src/ontology/oba.Makefile and provide a custom command to generate the component
    • WARNING: Note that the custom rule to generate the component MUST NOT depend on any other ODK-generated file such as seed files and the like (see issue).
  4. Providing an additional attribute for the component in src/ontology/oba-odk.yaml, source, to specify that this component should simply be downloaded from somewhere on the web.
"},{"location":"odk-workflows/RepoManagement/#adding-a-new-component-based-on-a-robot-template","title":"Adding a new component based on a ROBOT template","text":"

Since ODK 1.3.2, it is possible to simply link a ROBOT template to a component without having to specify any of the import logic. In order to add a new component that is connected to one or more template files, follow these steps:

  1. Open src/ontology/oba-odk.yaml.
  2. Make sure that use_templates: TRUE is set in the global project options. You should also make sure that use_context: TRUE is set in case you are using prefixes in your templates that are not known to robot, such as OMOP:, CPONT: and more. All non-standard prefixes you are using should be added to config/context.json.
  3. Add another component to the products section.
  4. To activate this component to be template-driven, simply say: use_template: TRUE. This will create an empty template for you in the templates directory, which will automatically be processed when recreating the component (e.g. run.bat make recreate-mycomp).
  5. If you want to use more than one component, use the templates field to add as many template names as you wish. ODK will look for them in the src/templates directory.
  6. Advanced: If you want to provide additional processing options, you can use the template_options field. This should be a string with option from robot template. One typical example for additional options you may want to provide is --add-prefixes config/context.json to ensure the prefix map of your context is provided to robot, see above.

Example:

components:\n  products:\n    - filename: mycomp.owl\n      use_template: TRUE\n      template_options: --add-prefixes config/context.json\n      templates:\n        - template1.tsv\n        - template2.tsv\n

Note: if your mirror is particularly large and complex, read this ODK recommendation.

"},{"location":"odk-workflows/RepositoryFileStructure/","title":"Repository structure","text":"

The main kinds of files in the repository:

  1. Release files
  2. Imports
  3. Components
"},{"location":"odk-workflows/RepositoryFileStructure/#release-files","title":"Release files","text":"

Release file are the file that are considered part of the official ontology release and to be used by the community. A detailed description of the release artefacts can be found here.

"},{"location":"odk-workflows/RepositoryFileStructure/#imports","title":"Imports","text":"

Imports are subsets of external ontologies that contain terms and axioms you would like to re-use in your ontology. These are considered \"external\", like dependencies in software development, and are not included in your \"base\" product, which is the release artefact which contains only those axioms that you personally maintain.

These are the current imports in OBA

Import URL Type ro http://purl.obolibrary.org/obo/ro.owl None chebi https://raw.githubusercontent.com/obophenotype/chebi_obo_slim/main/chebi_slim.owl None goplus http://purl.obolibrary.org/obo/go/go-base.owl None go http://purl.obolibrary.org/obo/go.owl None pato http://purl.obolibrary.org/obo/pato.owl None omo http://purl.obolibrary.org/obo/omo.owl None hp http://purl.obolibrary.org/obo/hp.owl None mondo http://purl.obolibrary.org/obo/mondo.owl None ncbitaxon http://purl.obolibrary.org/obo/ncbitaxon/subsets/taxslim.owl None uberon http://purl.obolibrary.org/obo/uberon.owl None cl http://purl.obolibrary.org/obo/cl.owl None nbo http://purl.obolibrary.org/obo/nbo.owl None pr https://raw.githubusercontent.com/obophenotype/pro_obo_slim/master/pr_slim.owl None so http://purl.obolibrary.org/obo/so.owl None po http://purl.obolibrary.org/obo/po.owl None bfo http://purl.obolibrary.org/obo/bfo.owl None swisslipids http://purl.obolibrary.org/obo/swisslipids.owl None lipidmaps http://purl.obolibrary.org/obo/lipidmaps.owl None"},{"location":"odk-workflows/RepositoryFileStructure/#components","title":"Components","text":"

Components, in contrast to imports, are considered full members of the ontology. This means that any axiom in a component is also included in the ontology base - which means it is considered native to the ontology. While this sounds complicated, consider this: conceptually, no component should be part of more than one ontology. If that seems to be the case, we are most likely talking about an import. Components are often not needed for ontologies, but there are some use cases:

  1. There is an automated process that generates and re-generates a part of the ontology
  2. A part of the ontology is managed in ROBOT templates
  3. The expressivity of the component is higher than the format of the edit file. For example, people still choose to manage their ontology in OBO format (they should not) missing out on a lot of owl features. They may choose to manage logic that is beyond OBO in a specific OWL component.

These are the components in OBA

Filename URL obsoletes.owl None synonyms.owl None"},{"location":"odk-workflows/SettingUpDockerForODK/","title":"Setting up your Docker environment for ODK use","text":"

One of the most frequent problems with running the ODK for the first time is failure because of lack of memory. This can look like a Java OutOfMemory exception, but more often than not it will appear as something like an Error 137. There are two places you need to consider to set your memory:

  1. Your src/ontology/run.sh (or run.bat) file. You can set the memory in there by adding robot_java_args: '-Xmx8G' to your src/ontology/oba-odk.yaml file, see for example here.
  2. Set your docker memory. By default, it should be about 10-20% more than your robot_java_args variable. You can manage your memory settings by right-clicking on the docker whale in your system bar-->Preferences-->Resources-->Advanced, see picture below.

"},{"location":"odk-workflows/UpdateImports/","title":"Update Imports Workflow","text":"

This page discusses how to update the contents of your imports, like adding or removing terms. If you are looking to customise imports, like changing the module type, see here.

"},{"location":"odk-workflows/UpdateImports/#importing-a-new-term","title":"Importing a new term","text":"

Note: some ontologies now use a merged-import system to manage dynamic imports, for these please follow instructions in the section title \"Using the Base Module approach\".

Importing a new term is split into two sub-phases:

  1. Declaring the terms to be imported
  2. Refreshing imports dynamically
"},{"location":"odk-workflows/UpdateImports/#declaring-terms-to-be-imported","title":"Declaring terms to be imported","text":"

There are three ways to declare terms that are to be imported from an external ontology. Choose the appropriate one for your particular scenario (all three can be used in parallel if need be):

  1. Prot\u00e9g\u00e9-based declaration
  2. Using term files
  3. Using the custom import template
"},{"location":"odk-workflows/UpdateImports/#protege-based-declaration","title":"Prot\u00e9g\u00e9-based declaration","text":"

This workflow is to be avoided, but may be appropriate if the editor does not have access to the ODK docker container. This approach also applies to ontologies that use base module import approach.

  1. Open your ontology (edit file) in Prot\u00e9g\u00e9 (5.5+).
  2. Select 'owl:Thing'
  3. Add a new class as usual.
  4. Paste the full iri in the 'Name:' field, for example, http://purl.obolibrary.org/obo/CHEBI_50906.
  5. Click 'OK'

Now you can use this term for example to construct logical definitions. The next time the imports are refreshed (see how to refresh here), the metadata (labels, definitions, etc.) for this term are imported from the respective external source ontology and becomes visible in your ontology.

"},{"location":"odk-workflows/UpdateImports/#using-term-files","title":"Using term files","text":"

Every import has, by default a term file associated with it, which can be found in the imports directory. For example, if you have a GO import in src/ontology/go_import.owl, you will also have an associated term file src/ontology/go_terms.txt. You can add terms in there simply as a list:

GO:0008150\nGO:0008151\n

Now you can run the refresh imports workflow) and the two terms will be imported.

"},{"location":"odk-workflows/UpdateImports/#using-the-custom-import-template","title":"Using the custom import template","text":"

This workflow is appropriate if:

  1. You prefer to manage all your imported terms in a single file (rather than multiple files like in the \"Using term files\" workflow above).
  2. You wish to augment your imported ontologies with additional information. This requires a cautionary discussion.

To enable this workflow, you add the following to your ODK config file (src/ontology/oba-odk.yaml), and update the repository:

use_custom_import_module: TRUE\n

Now you can manage your imported terms directly in the custom external terms template, which is located at src/templates/external_import.owl. Note that this file is a ROBOT template, and can, in principle, be extended to include any axioms you like. Before extending the template, however, read the following carefully.

The main purpose of the custom import template is to enable the management off all terms to be imported in a centralised place. To enable that, you do not have to do anything other than maintaining the template. So if you, say currently import APOLLO_SV:00000480, and you wish to import APOLLO_SV:00000532, you simply add a row like this:

ID  Entity Type\nID  TYPE\nAPOLLO_SV:00000480  owl:Class\nAPOLLO_SV:00000532  owl:Class\n

When the imports are refreshed see imports refresh workflow, the term(s) will simply be imported from the configured ontologies.

Now, if you wish to extend the Makefile (which is beyond these instructions) and add, say, synonyms to the imported terms, you can do that, but you need to (a) preserve the ID and ENTITY columns and (b) ensure that the ROBOT template is valid otherwise, see here.

WARNING. Note that doing this is a widespread antipattern (see related issue). You should not change the axioms of terms that do not belong into your ontology unless necessary - such changes should always be pushed into the ontology where they belong. However, since people are doing it, whether the OBO Foundry likes it or not, at least using the custom imports module as described here localises the changes to a single simple template and ensures that none of the annotations added this way are merged into the base file.

"},{"location":"odk-workflows/UpdateImports/#refresh-imports","title":"Refresh imports","text":"

If you want to refresh the import yourself (this may be necessary to pass the travis tests), and you have the ODK installed, you can do the following (using go as an example):

First, you navigate in your terminal to the ontology directory (underneath src in your hpo root directory).

cd src/ontology\n

Then, you regenerate the import that will now include any new terms you have added. Note: You must have docker installed.

sh run.sh make PAT=false imports/go_import.owl -B\n

Since ODK 1.2.27, it is also possible to simply run the following, which is the same as the above:

sh run.sh make refresh-go\n

Note that in case you changed the defaults, you need to add IMP=true and/or MIR=true to the command below:

sh run.sh make IMP=true MIR=true PAT=false imports/go_import.owl -B\n

If you wish to skip refreshing the mirror, i.e. skip downloading the latest version of the source ontology for your import (e.g. go.owl for your go import) you can set MIR=false instead, which will do the exact same thing as the above, but is easier to remember:

sh run.sh make IMP=true MIR=false PAT=false imports/go_import.owl -B\n
"},{"location":"odk-workflows/UpdateImports/#using-the-base-module-approach","title":"Using the Base Module approach","text":"

Since ODK 1.2.31, we support an entirely new approach to generate modules: Using base files. The idea is to only import axioms from ontologies that actually belong to it. A base file is a subset of the ontology that only contains those axioms that nominally belong there. In other words, the base file does not contain any axioms that belong to another ontology. An example would be this:

Imagine this being the full Uberon ontology:

Axiom 1: BFO:123 SubClassOf BFO:124\nAxiom 1: UBERON:123 SubClassOf BFO:123\nAxiom 1: UBERON:124 SubClassOf UBERON 123\n

The base file is the set of all axioms that are about UBERON terms:

Axiom 1: UBERON:123 SubClassOf BFO:123\nAxiom 1: UBERON:124 SubClassOf UBERON 123\n

I.e.

Axiom 1: BFO:123 SubClassOf BFO:124\n

Gets removed.

The base file pipeline is a bit more complex than the normal pipelines, because of the logical interactions between the imported ontologies. This is solved by _first merging all mirrors into one huge file and then extracting one mega module from it.

Example: Let's say we are importing terms from Uberon, GO and RO in our ontologies. When we use the base pipelines, we

1) First obtain the base (usually by simply downloading it, but there is also an option now to create it with ROBOT) 2) We merge all base files into one big pile 3) Then we extract a single module imports/merged_import.owl

The first implementation of this pipeline is PATO, see https://github.com/pato-ontology/pato/blob/master/src/ontology/pato-odk.yaml.

To check if your ontology uses this method, check src/ontology/oba-odk.yaml to see if use_base_merging: TRUE is declared under import_group

If your ontology uses Base Module approach, please use the following steps:

First, add the term to be imported to the term file associated with it (see above \"Using term files\" section if this is not clear to you)

Next, you navigate in your terminal to the ontology directory (underneath src in your hpo root directory).

cd src/ontology\n

Then refresh imports by running

sh run.sh make imports/merged_import.owl\n

Note: if your mirrors are updated, you can run sh run.sh make no-mirror-refresh-merged

This requires quite a bit of memory on your local machine, so if you encounter an error, it might be a lack of memory on your computer. A solution would be to create a ticket in an issue tracker requesting for the term to be imported, and one of the local devs should pick this up and run the import for you.

Lastly, restart Prot\u00e9g\u00e9, and the term should be imported in ready to be used.

"},{"location":"odk-workflows/components/","title":"Adding components to an ODK repo","text":"

For details on what components are, please see component section of repository file structure document.

To add custom components to an ODK repo, please follow the following steps:

1) Locate your odk yaml file and open it with your favourite text editor (src/ontology/oba-odk.yaml) 2) Search if there is already a component section to the yaml file, if not add it accordingly, adding the name of your component:

components:\n  products:\n    - filename: your-component-name.owl\n

3) Add the component to your catalog file (src/ontology/catalog-v001.xml)

  <uri name=\"http://purl.obolibrary.org/obo/oba/components/your-component-name.owl\" uri=\"components/your-component-name.owl\"/>\n

4) Add the component to the edit file (src/ontology/oba-edit.obo) for .obo formats:

import: http://purl.obolibrary.org/obo/oba/components/your-component-name.owl\n

for .owl formats:

Import(<http://purl.obolibrary.org/obo/oba/components/your-component-name.owl>)\n

5) Refresh your repo by running sh run.sh make update_repo - this should create a new file in src/ontology/components. 6) In your custom makefile (src/ontology/oba.Makefile) add a goal for your custom make file. In this example, the goal is a ROBOT template.

$(COMPONENTSDIR)/your-component-name.owl: $(SRC) ../templates/your-component-template.tsv \n    $(ROBOT) template --template ../templates/your-component-template.tsv \\\n  annotate --ontology-iri $(ONTBASE)/$@ --output $(COMPONENTSDIR)/your-component-name.owl\n

(If using a ROBOT template, do not forget to add your template tsv in src/templates/)

7) Make the file by running sh run.sh make components/your-component-name.owl

"}]} \ No newline at end of file diff --git a/sitemap.xml b/sitemap.xml new file mode 100644 index 00000000..15318eea --- /dev/null +++ b/sitemap.xml @@ -0,0 +1,108 @@ + + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + + None + 2024-04-04 + daily + + \ No newline at end of file diff --git a/sitemap.xml.gz b/sitemap.xml.gz new file mode 100644 index 00000000..cbc29542 Binary files /dev/null and b/sitemap.xml.gz differ