diff --git a/.paket/Paket.Restore.targets b/.paket/Paket.Restore.targets index b4f593eb..0df24f94 100644 --- a/.paket/Paket.Restore.targets +++ b/.paket/Paket.Restore.targets @@ -62,6 +62,11 @@ true true + + + True + + $(BaseIntermediateOutputPath.TrimEnd('\').TrimEnd('\/')) @@ -102,24 +107,28 @@ true - + + true - - + + - + - $(MSBuildProjectDirectory)\obj\$(MSBuildProjectFile).paket.references.cached + $(PaketIntermediateOutputPath)\$(MSBuildProjectFile).paket.references.cached $(MSBuildProjectFullPath).paket.references @@ -154,8 +163,8 @@ - - + + @@ -183,11 +192,12 @@ runtime runtime true + true - $(MSBuildProjectDirectory)/obj/$(MSBuildProjectFile).paket.clitools + $(PaketIntermediateOutputPath)/$(MSBuildProjectFile).paket.clitools @@ -206,12 +216,12 @@ - + false $(MSBuildVersion) @@ -219,9 +229,9 @@ - + - <_NuspecFilesNewLocation Include="$(BaseIntermediateOutputPath)$(Configuration)\*.nuspec"/> + <_NuspecFilesNewLocation Include="$(PaketIntermediateOutputPath)\$(Configuration)\*.nuspec"/> @@ -229,14 +239,16 @@ $(MSBuildProjectDirectory)/$(MSBuildProjectFile) true + false + true false - true + true false - true + true false - true - $(BaseIntermediateOutputPath)$(Configuration) - $(BaseIntermediateOutputPath) + true + $(PaketIntermediateOutputPath)\$(Configuration) + $(PaketIntermediateOutputPath) @@ -250,6 +262,53 @@ + + ] +module MessagePrompts = + + let prompt (msg:string) = + System.Console.Write(msg) + System.Console.ReadLine().Trim() + |> function | "" -> None | s -> Some s + |> Option.map (fun s -> s.Replace ("\"","\\\"")) + + let rec promptYesNo msg = + match prompt (sprintf "%s [Yn]: " msg) with + | Some "Y" | Some "y" -> true + | Some "N" | Some "n" -> false + | _ -> System.Console.WriteLine("Sorry, invalid answer"); promptYesNo msg + + let releaseMsg = """This will stage all uncommitted changes, push them to the origin and bump the release version to the latest number in the RELEASE_NOTES.md file. + Do you want to continue?""" + + let releaseDocsMsg = """This will push the docs to gh-pages. Remember building the docs prior to this. Do you want to continue?""" + // -------------------------------------------------------------------------------------- // START TODO: Provide project-specific details below // -------------------------------------------------------------------------------------- @@ -352,10 +372,14 @@ Target.create "ReferenceDocs" (fun _ -> DirectoryInfo.getSubDirectories d |> Array.filter(fun x -> x.FullName.ToLower().Contains("net45")) let net47Bin = DirectoryInfo.getSubDirectories d |> Array.filter(fun x -> x.FullName.ToLower().Contains("net47")) + let netstandardBin = + DirectoryInfo.getSubDirectories d |> Array.filter(fun x -> x.FullName.ToLower().Contains("netstandard")) if net45Bin.Length > 0 then d.Name, net45Bin.[0] - else + elif net47Bin.Length > 0 then d.Name, net47Bin.[0] + else + d.Name, netstandardBin.[0] dInfo.GetFiles() |> Array.filter (fun x -> @@ -440,7 +464,7 @@ Target.create "ReleaseDocs" (fun _ -> ) Target.create "ReleaseLocal" (fun _ -> - let tempDocsDir = "temp/gh-pages" + let tempDocsDir = "temp/localDocs" Shell.cleanDir tempDocsDir |> ignore Shell.copyRecursive "docs" tempDocsDir true |> printfn "%A" Shell.replaceInFiles @@ -506,6 +530,11 @@ Target.create "GitReleaseNuget" (fun _ -> Shell.copy tempNugetDir files ) +//Confirmation Targets (Ugly because error is thrown. Maybe there is a better way on handling this using the cancellation tokens in the target context, but i was not able to figure that out) + +Target.create "ReleaseConfirmation" (fun _ -> match promptYesNo releaseMsg with | true -> () |_ -> failwith "Release canceled") +Target.create "ReleaseDocsConfirmation" (fun _ -> match promptYesNo releaseDocsMsg with | true -> () |_ -> failwith "Release canceled") + // -------------------------------------------------------------------------------------- // Run all targets by default. Invoke 'build ' to override @@ -540,7 +569,8 @@ Target.create "Linux" ignore "Clean" ==> "Release" -"BuildPackage" +"ReleaseConfirmation" + ==> "BuildPackage" ==> "PublishNuget" ==> "Release" @@ -555,6 +585,9 @@ Target.create "Linux" ignore "GenerateDocs" ==> "ReleaseDocs" +"ReleaseDocsConfirmation" + ==> "ReleaseDocs" + "All" ==> "ReleaseLocal" diff --git a/docsrc/content/BioTools-tmhmm.fsx b/docsrc/content/BioTools-tmhmm.fsx new file mode 100644 index 00000000..031f9f32 --- /dev/null +++ b/docsrc/content/BioTools-tmhmm.fsx @@ -0,0 +1,78 @@ +(*** hide ***) +// This block of code is omitted in the generated HTML documentation. Use +// it to define helpers that you do not want to show in the documentation. +#r "netstandard" +#r "../../packages/Newtonsoft.Json.10.0.3/lib/netstandard1.3/Newtonsoft.Json.dll" +#r "../../packages/System.Buffers/lib/netstandard2.0/System.Buffers.dll" +#r "../../packages/Docker.DotNet/lib/netstandard2.0/Docker.DotNet.dll" +#r "../../packages/SharpZipLib/lib/netstandard2.0/ICSharpCode.SharpZipLib.dll" +#r "../../packages/FSharpAux.IO/lib/netstandard2.0/FSharpAux.dll" +#r "../../packages/FSharpAux.IO/lib/netstandard2.0/FSharpAux.IO.dll" +#r @"../../lib/Formatting/FSharp.Plotly.dll" + +#I @"../../bin/BioFSharp/netstandard2.0/" +#I @"../../bin/BioFSharp.IO/netstandard2.0/" +#I @"../../bin/BioFSharp.BioTools/netstandard2.0/" +#r "BioFSharp.dll" +#r "BioFSharp.IO.dll" +#r "BioFSharp.BioTools.dll" + + +open System.IO +open BioFSharp.BioTools + + +let client = Docker.connect "npipe://./pipe/docker_engine" + + + +//let tmhmm = Docker.ImageName "tmhmm" + +//let bcContextUbuntu = +// BioContainer.initBcContextWithMountAsync client ubuntu "C:/tmp" +// |> Async.RunSynchronously + + + +//BioContainer.disposeAsync bcContextUbuntu +//|> Async.Start + + +let bcContext = + BioContainer.initBcContextLocalDefaultAsync Tmhmm.ImageTmhmm + |> Async.RunSynchronously + + +let stream = new FileStream("C:/tmp/seq.fasta",FileMode.Open) + +let res = Tmhmm.run bcContext stream + + +BioContainer.disposeAsync bcContext +|> Async.Start + + + +res |> Seq.head |> fun tp -> tp.Topology + + + + + +//let bcContext = +// BioContainer.initBcContextLocalDefaultAsync TargetP.ImageTagetP +// |> Async.RunSynchronously + + +//let stream = new FileStream("C:/tmp/twelve.fsa",FileMode.Open) + +//let res = TargetP.run bcContext (TargetP.NonPlant) stream + + + +//BioContainer.disposeAsync bcContext +//|> Async.Start + + + +//res |> Seq.head |> fun tp -> tp.Mtp \ No newline at end of file diff --git a/docsrc/content/BlastWrapper.fsx b/docsrc/content/BlastWrapper.fsx index ab7937c4..ef72c731 100644 --- a/docsrc/content/BlastWrapper.fsx +++ b/docsrc/content/BlastWrapper.fsx @@ -202,11 +202,10 @@ let outputPath = (__SOURCE_DIRECTORY__ + "/data/Output.txt") (*** do-not-eval ***) BlastWrapper(ncbiPath).blastP inputFile queryFastaPath outputPath ([customOutputFormat;] |> seq) -(** - As you can see in the result file, the format is tab separated and contains the fields we specified in our our `customOutputFormat`. -*) (** +As you can see in the result file, the format is tab separated and contains the fields we specified in our our `customOutputFormat`. +
 |# BLASTP 2.2.31+
 |# Query: >gi|7525013|ref|NP_051039.1| photosystem II protein D1 (chloroplast) [Arabidopsis thaliana]
diff --git a/docsrc/content/GFF3.fsx b/docsrc/content/GFF3.fsx
index 89ee7cfa..26c5ccec 100644
--- a/docsrc/content/GFF3.fsx
+++ b/docsrc/content/GFF3.fsx
@@ -101,7 +101,7 @@ _Note: The order of key value pairs in field 9 (attributes) may be changed._
 
 ///Takes a seq>, a FASTA converter and a destination filepath and writes it into a .gff. Hint: Use converter = id if no FastA sequence is included.
 (*** do-not-eval ***)
-let gffExampleWrite = GFF3.write features BioItem.symbol (__SOURCE_DIRECTORY__ + "/data/gffExampleWrite.gff")
+let gffExampleWrite = GFF3.write BioItem.symbol (__SOURCE_DIRECTORY__ + "/data/gffExampleWrite.gff") features
 
 (**
 ##Example: Sequence of CDS
diff --git a/docsrc/content/release-notes.md b/docsrc/content/release-notes.md
index ead8fd79..bb6fa510 100644
--- a/docsrc/content/release-notes.md
+++ b/docsrc/content/release-notes.md
@@ -1,3 +1,81 @@
+#### 0.1.0 - Thursday, May 30, 2019
+Several bugfixes and additions to multiple sub projects:
+
+* **BioFSharp** (core):
+    * Additional functionality:
+      * [GravyScore](https://github.com/CSBiology/BioFSharp/commit/209c3497e3cdb1db56a0675e0f2a76634a6dbe7a) (Grand Average of Hydropathy) as additional amino acid property
+
+* **BioFSharp.IO**:
+    * Additional functionality:
+      * [GAF Parser](https://github.com/CSBiology/BioFSharp/commit/cbba6a41a9b239e26467f32048aaec3335373faf) for GO Annotation file format: [Gene Association File](http://geneontology.org/docs/go-annotation-file-gaf-format-2.0/)
+      * [Uniprot style Fasta header parser](https://github.com/CSBiology/BioFSharp/commit/f2a16aaa2456b0c431f6d50d0f78a12834671e97)
+      * [FastA to GFF3 converter functions](https://github.com/CSBiology/BioFSharp/commit/2cdd3537398040e1508736bd734b22a67a7c46e7)
+      * [GFF3 Pretty Printer](https://github.com/CSBiology/BioFSharp/commit/eaaa59fbd382721e75fbb9c6558b0ba2ff6afb00) 
+
+    * BugFixes:
+      * [Fix OboParser](https://github.com/CSBiology/BioFSharp/commit/0354c9c13e7a4692f2ab61b80ef86ac8f5bd83c3) just taking the last occurence of the alt_id keyword and discards previous ones.
+      * Fix [Fasta](https://github.com/CSBiology/BioFSharp/commit/aff8eff849deb1cca411faf3c640d53f6e410497) and [GFF3](https://github.com/CSBiology/BioFSharp/commit/d0f059ab899c715a37b7f50318292c8a81f18dd9) writers appending to files instead of recreating
+
+* **BioFSharp.BioTools**:
+    * Additional functionality:
+      * [TMHMM biocontainer API wrapper](https://github.com/CSBiology/BioFSharp/commit/f11cb122df29ccaa0809d3c3c951294a1b645e0f) ([TMHMM](http://www.cbs.dtu.dk/services/TMHMM/) 2.0c predicts transmembrane helices in proteins)
+      * [FastP and Hera biocontainer API wrapper](https://github.com/CSBiology/BioFSharp/commit/28b7654d57824bcdfdb8bae16af2f0f706ed60ad)
+      * [IntaRNA biocontainer API wrapper](https://github.com/CSBiology/BioFSharp/commit/a659496179cd754fbea2fe9ef4030544a35eb68b) ([IntaRNA](https://github.com/BackofenLab/IntaRNA) is a tool for prediction of various nucleotide interactions)
+
+    * BugFixes:
+      * Fix Stream entry closed error in [BioContainer.tarOfStream()](https://github.com/CSBiology/BioFSharp/commit/20f8973ea717208627ef5a7ea0b72cbaecb4103c)
+
+* **BioFSharp.ImgP**:
+    * Additional functionality:
+      * [update 3DCWT correlation calculation to loop](https://github.com/CSBiology/BioFSharp/commit/0b4ffe93755d915da64f4231199b0ec54d4d6c4d)
+
+    * Bugfixes:
+      * [fix height adjustment in ricker](https://github.com/CSBiology/BioFSharp/commit/abab82be1ac9fa0c540acfb5f3ccc6bd5143df1f)
+      * [fix Ricker values for discrete time points](https://github.com/CSBiology/BioFSharp/commit/2bb6bb2b67ea43df2d9fe970bd1445e568df53d4)
+
+#### 0.0.16 - Thursday, March 7, 2019
+ * Fix template files for all nuget packages: 
+   * use correct paths for binaries
+   * update project descriptions
+   * use type project to infer dependencies
+
+#### 0.0.15 - Thursday, March 7, 2019
+ * BioFSharp.ML - project introduction:
+   * Usage of Microsoft's CNTK library with a biological focus:
+   * This project comes with working CNTK integration:
+      * necessary dependencies to use CNTK and its unmanaged libraries
+      * CNTK loadscript: an easy way to load all dependencies of CNTK. load the script and use the resolveCNTKDependencies() function.
+   * Built-in deep neural network 'd::ppop' ([publication](https://www.frontiersin.org/articles/10.3389/fpls.2018.01559/full))
+     * predict peptide observability in mass spectrometry
+     * Classification: functions to determine peptide feature vectors as input for dppop
+     * Prediction: functions to predict peptide observability using the feature vectors prepared in Classification.
+     * NonPlant and Plant models as embedded resources: the two models used in the original Web API. Additionally load custom models.
+ * Othert additions:
+   * BioFSharp.BioTools:
+     * Integration of new tools as biocontainer APIs:
+       * Hera
+       * FastP
+       * ClustalO
+       * HMMER (experimental):
+         * hmmbuild
+         * hmmalign
+         * hmmsearch
+         * hmmscan
+         * hmmemit
+         * hmmpress
+         * hmmconvert
+
+#### 0.0.14 - Tuesday, February 12, 2019
+ * Addition of blast biocontainer support for makeblastdb, blastp, blastn with full parameter wrapping
+ * Extension of BioContainer functionality:
+   * Add windows/unix path conversions and subpath matching to MounInfo
+   * Add execReturnAsync (returns stdout of docker container) and execAsync (redirects stdout/stderr of container)
+
+#### 0.0.13 - Friday, February 8, 2019
+* Addition of the BioTools project, which uses docker.dotnet to use docker images and stream their output to fsi
+* Low level wrapping of multiple docker.dotnet functions for F#
+* Basic functionality for using docker images from fsi
+
 #### 0.0.12 - Friday, December 28, 2018
 * Addition of Pretty Printers, SOFT Parser, GEOFTP functions
 * Improvement and refactoring of Modification functionality
diff --git a/paket.dependencies b/paket.dependencies
index 8874ee40..34c78cc4 100644
--- a/paket.dependencies
+++ b/paket.dependencies
@@ -1,4 +1,3 @@
-
 framework: auto-detect
 
 source https://nuget.org/api/v2
@@ -17,6 +16,11 @@ nuget Microsoft.Xaml
 nuget SwaggerProvider
 nuget YamlDotNet
 
+nuget docker.dotnet
+nuget SharpZipLib
+
+nuget CNTK.CPUOnly
+
 git https://github.com/CSBiology/FSharpAux.git nuget Packages: /
 nuget FSharpAux
 nuget FSharpAux.IO
@@ -28,6 +32,7 @@ nuget FSharp.Stats.MSF
 
 clitool dotnet-fake
 
+
 group BioDB
     framework:net45
     source https://nuget.org/api/v2
diff --git a/paket.lock b/paket.lock
index 70afd277..80c8fb82 100644
--- a/paket.lock
+++ b/paket.lock
@@ -2,7 +2,7 @@ RESTRICTION: || (== net45) (== net47) (== netstandard2.0)
 NUGET
   remote: https://www.nuget.org/api/v2
     Alea (3.0.4)
-    Argu (5.1)
+    Argu (5.2)
       FSharp.Core (>= 4.0.0.1) - restriction: || (== net45) (== net47) (&& (== netstandard2.0) (>= net45))
       FSharp.Core (>= 4.3.2) - restriction: || (&& (== net47) (< net45) (>= netstandard2.0)) (== netstandard2.0)
       System.Configuration.ConfigurationManager (>= 4.4) - restriction: || (&& (== net47) (< net45) (>= netstandard2.0)) (== netstandard2.0)
@@ -21,32 +21,30 @@ NUGET
       System.ValueTuple (>= 4.4) - restriction: || (&& (== net45) (>= net46)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
       System.Xml.XmlSerializer (>= 4.3) - restriction: || (&& (== net45) (>= net46)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
       System.Xml.XPath.XmlDocument (>= 4.3) - restriction: || (&& (== net45) (>= net46)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
-    CommandLineParser (2.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
-      System.Collections (>= 4.0.11-rc2-24027) - restriction: || (&& (== net45) (>= netstandard1.5)) (== net47) (== netstandard2.0)
-      System.Console (>= 4.0.0-rc2-24027) - restriction: || (&& (== net45) (>= netstandard1.5)) (== net47) (== netstandard2.0)
-      System.Diagnostics.Debug (>= 4.0.11-rc2-24027) - restriction: || (&& (== net45) (>= netstandard1.5)) (== net47) (== netstandard2.0)
-      System.Globalization (>= 4.0.11-rc2-24027) - restriction: || (&& (== net45) (>= netstandard1.5)) (== net47) (== netstandard2.0)
-      System.IO (>= 4.1.0-rc2-24027) - restriction: || (&& (== net45) (>= netstandard1.5)) (== net47) (== netstandard2.0)
-      System.Linq (>= 4.1.0-rc2-24027) - restriction: || (&& (== net45) (>= netstandard1.5)) (== net47) (== netstandard2.0)
-      System.Linq.Expressions (>= 4.0.11-rc2-24027) - restriction: || (&& (== net45) (>= netstandard1.5)) (== net47) (== netstandard2.0)
-      System.Reflection (>= 4.1.0-rc2-24027) - restriction: || (&& (== net45) (>= netstandard1.5)) (== net47) (== netstandard2.0)
-      System.Reflection.Extensions (>= 4.0.1-rc2-24027) - restriction: || (&& (== net45) (>= netstandard1.5)) (== net47) (== netstandard2.0)
-      System.Reflection.TypeExtensions (>= 4.1.0-rc2-24027) - restriction: || (&& (== net45) (>= netstandard1.5)) (== net47) (== netstandard2.0)
-      System.Resources.ResourceManager (>= 4.0.1-rc2-24027) - restriction: || (&& (== net45) (>= netstandard1.5)) (== net47) (== netstandard2.0)
-      System.Runtime (>= 4.1.0-rc2-24027) - restriction: || (&& (== net45) (>= netstandard1.5)) (== net47) (== netstandard2.0)
-      System.Runtime.Extensions (>= 4.1.0-rc2-24027) - restriction: || (&& (== net45) (>= netstandard1.5)) (== net47) (== netstandard2.0)
-    dotnet-fake (5.10.1) - clitool: true
-    Expecto (8.6)
-      Argu (>= 5.1) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
-      Mono.Cecil (>= 0.10) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
+    CNTK.CPUOnly (2.6)
+      CNTK.Deps.MKL (2.6)
+      CNTK.Deps.OpenCV.Zip (2.6)
+    CNTK.Deps.MKL (2.6)
+    CNTK.Deps.OpenCV.Zip (2.6)
+    CommandLineParser (2.4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
+    Docker.DotNet (3.125.2)
+      Newtonsoft.Json (>= 9.0.1)
+      System.Buffers (>= 4.3) - restriction: || (&& (== net45) (>= net46)) (== net47) (&& (== netstandard2.0) (>= net46))
+      System.Buffers (>= 4.4) - restriction: || (&& (== net47) (< net45) (>= netstandard2.0)) (== netstandard2.0)
+      System.Runtime (>= 4.3) - restriction: || (&& (== net45) (>= net46)) (== net47) (&& (== netstandard2.0) (>= net46))
+      System.ValueTuple (>= 4.4)
+    dotnet-fake (5.12) - clitool: true
+    Expecto (8.8)
+      Argu (>= 5.2) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
+      Mono.Cecil (>= 0.10.1) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
       System.Diagnostics.FileVersionInfo (>= 4.3) - restriction: || (&& (== net45) (>= netstandard2.0)) (&& (== net47) (< net461) (>= netstandard2.0)) (== netstandard2.0)
-    Expecto.BenchmarkDotNet (8.6)
+    Expecto.BenchmarkDotNet (8.8)
       BenchmarkDotNet (>= 0.10.14) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
       FSharp.Core (>= 4.3.4) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
-    Expecto.FsCheck (8.6)
-      Expecto (>= 8.6) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
-      FsCheck (>= 2.10.4) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
-    Expecto.VisualStudio.TestAdapter (10.0.0) - version_in_path: true
+    Expecto.FsCheck (8.8)
+      Expecto (>= 8.8) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
+      FsCheck (>= 2.13) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
+    Expecto.VisualStudio.TestAdapter (10.0.1) - version_in_path: true
       Expecto (>= 8.0 < 9.0) - restriction: || (&& (== net45) (>= net461)) (== net47) (&& (== netstandard2.0) (>= net461))
       FSharp.Core (>= 4.0 < 5.0) - restriction: || (&& (== net45) (>= net461)) (== net47) (&& (== netstandard2.0) (>= net461))
       Microsoft.TestPlatform.ObjectModel (>= 15.0 < 16.0) - restriction: || (&& (== net45) (>= net461)) (== net47) (&& (== netstandard2.0) (>= net461))
@@ -54,11 +52,11 @@ NUGET
       Newtonsoft.Json (>= 10.0 < 11.0) - restriction: || (&& (== net45) (>= net461)) (== net47) (&& (== netstandard2.0) (>= net461))
     FsCheck (2.13) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
       FSharp.Core (>= 4.2.3) - restriction: || (&& (== net45) (>= net452)) (&& (== net45) (>= netstandard1.6)) (== net47) (== netstandard2.0)
-    FSharp.Core (4.5.4)
+    FSharp.Core (4.6.1)
     FSharp.Data.TypeProviders (5.0.0.6)
       FSharp.Core (>= 3.1.2.5)
     FSharp.Plotly (1.1.21)
-    Microsoft.CodeAnalysis.Analyzers (2.6.2) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
+    Microsoft.CodeAnalysis.Analyzers (2.6.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
     Microsoft.CodeAnalysis.Common (2.10) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
       Microsoft.CodeAnalysis.Analyzers (>= 2.6.1) - restriction: || (&& (== net45) (>= netstandard1.3)) (== net47) (== netstandard2.0)
       System.AppContext (>= 4.3) - restriction: || (&& (== net45) (>= netstandard1.3)) (== net47) (== netstandard2.0)
@@ -104,7 +102,7 @@ NUGET
       Microsoft.CodeAnalysis.Common (2.10)
     Microsoft.DotNet.PlatformAbstractions (2.1) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
       System.Runtime.InteropServices.RuntimeInformation (>= 4.0)
-    Microsoft.NETCore.Platforms (2.1.2) - restriction: || (&& (== net45) (>= netstandard2.0)) (&& (== net47) (< net46) (>= netstandard2.0)) (&& (== net47) (< netstandard1.3) (>= netstandard2.0)) (== netstandard2.0)
+    Microsoft.NETCore.Platforms (2.2) - restriction: || (&& (== net45) (>= netstandard2.0)) (&& (== net47) (< net46) (>= netstandard2.0)) (&& (== net47) (< netstandard1.3) (>= netstandard2.0)) (== netstandard2.0)
     Microsoft.NETCore.Targets (2.1) - restriction: || (&& (== net45) (>= netstandard2.0) (< portable-net45+win8+wp8+wpa81)) (&& (== net47) (< net45) (>= netstandard2.0)) (&& (== net47) (>= netstandard2.0) (< portable-net45+win8+wp8+wpa81)) (== netstandard2.0)
     Microsoft.TestPlatform.ObjectModel (15.9.0) - version_in_path: true, restriction: || (&& (== net45) (>= net461)) (== net47) (&& (== netstandard2.0) (>= net461))
       System.Reflection.Metadata (>= 1.3) - restriction: || (&& (== net45) (>= net451)) (&& (== net45) (>= netstandard1.5)) (== net47) (== netstandard2.0)
@@ -113,13 +111,14 @@ NUGET
       System.Security.AccessControl (>= 4.5) - restriction: || (&& (== net45) (>= monoandroid)) (&& (== net45) (>= monotouch)) (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (&& (== net45) (>= netstandard2.0)) (&& (== net45) (>= xamarinios)) (&& (== net45) (>= xamarinmac)) (&& (== net45) (>= xamarintvos)) (&& (== net45) (>= xamarinwatchos)) (== net47) (== netstandard2.0)
       System.Security.Principal.Windows (>= 4.5) - restriction: || (&& (== net45) (>= monoandroid)) (&& (== net45) (>= monotouch)) (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (&& (== net45) (>= netstandard2.0)) (&& (== net45) (>= xamarinios)) (&& (== net45) (>= xamarinmac)) (&& (== net45) (>= xamarintvos)) (&& (== net45) (>= xamarinwatchos)) (== net47) (== netstandard2.0)
     Microsoft.Xaml (4.0.0.1)
-    Mono.Cecil (0.10.1) - version_in_path: true, restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
+    Mono.Cecil (0.10.3) - version_in_path: true, restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
     Newtonsoft.Json (10.0.3) - version_in_path: true
+    SharpZipLib (1.1)
     SwaggerProvider (0.8.2)
       FSharp.Core (>= 4.0)
       Newtonsoft.Json (>= 10.0)
     System.AppContext (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
-    System.Buffers (4.5) - restriction: || (&& (== net47) (>= monoandroid) (>= netstandard2.0)) (&& (== net47) (>= monotouch) (>= netstandard2.0)) (&& (== net47) (< net45) (>= netstandard2.0)) (&& (== net47) (< netstandard1.1) (>= netstandard2.0)) (&& (== net47) (>= netstandard2.0) (>= wpa81)) (&& (== net47) (>= netstandard2.0) (>= xamarinmac)) (&& (== net47) (>= netstandard2.0) (>= xamarintvos)) (&& (== net47) (>= netstandard2.0) (>= xamarinwatchos)) (&& (== net47) (>= xamarinios)) (== netstandard2.0)
+    System.Buffers (4.5) - restriction: || (&& (== net45) (>= net46)) (== net47) (== netstandard2.0)
     System.Collections (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
     System.Collections.Concurrent (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
     System.Collections.Immutable (1.5) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
@@ -166,10 +165,10 @@ NUGET
     System.Linq (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
     System.Linq.Expressions (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
     System.Management (4.5) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
-    System.Memory (4.5.1) - restriction: || (&& (== net47) (< net45) (>= netstandard2.0)) (== netstandard2.0)
+    System.Memory (4.5.2) - restriction: || (&& (== net47) (< net45) (>= netstandard2.0)) (== netstandard2.0)
       System.Buffers (>= 4.4)
       System.Numerics.Vectors (>= 4.4) - restriction: || (&& (== net45) (>= net461)) (== net47) (== netstandard2.0)
-      System.Runtime.CompilerServices.Unsafe (>= 4.5)
+      System.Runtime.CompilerServices.Unsafe (>= 4.5.2)
     System.Numerics.Vectors (4.5) - restriction: || (&& (== net47) (< net45) (>= netstandard2.0)) (== netstandard2.0)
     System.Reflection (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp1.1) (>= netstandard2.0)) (&& (== net45) (>= netcoreapp2.0)) (&& (== net45) (>= netstandard2.0) (< portable-net45+win8+wpa81)) (== net47) (== netstandard2.0)
       Microsoft.NETCore.Platforms (>= 1.1) - restriction: || (&& (== net45) (< portable-net45+win8+wp8+wpa81)) (&& (== net47) (< net45)) (&& (== net47) (< portable-net45+win8+wp8+wpa81)) (== netstandard2.0)
@@ -178,18 +177,14 @@ NUGET
       System.Reflection.Primitives (>= 4.3) - restriction: || (&& (== net45) (< portable-net45+win8+wp8+wpa81)) (&& (== net47) (< net45)) (&& (== net47) (< portable-net45+win8+wp8+wpa81)) (== netstandard2.0)
       System.Runtime (>= 4.3) - restriction: || (&& (== net45) (< portable-net45+win8+wp8+wpa81)) (&& (== net47) (< net45)) (&& (== net47) (< portable-net45+win8+wp8+wpa81)) (== netstandard2.0)
     System.Reflection.Emit.Lightweight (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
-    System.Reflection.Extensions (4.3) - restriction: || (&& (== net45) (>= net461) (>= netstandard1.5)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
     System.Reflection.Metadata (1.6) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
       System.Collections.Immutable (>= 1.5)
     System.Reflection.Primitives (4.3) - restriction: || (&& (== net45) (>= netcoreapp1.1) (>= netstandard2.0)) (&& (== net45) (>= netstandard2.0) (< portable-net45+win8+wpa81)) (&& (== net47) (< net45) (>= netstandard2.0)) (&& (== net47) (>= netcoreapp1.1) (>= netstandard2.0)) (&& (== net47) (>= netstandard2.0) (< portable-net45+win8+wpa81)) (== netstandard2.0)
       Microsoft.NETCore.Platforms (>= 1.1) - restriction: || (&& (== net45) (< portable-net45+win8+wp8+wpa81)) (&& (== net47) (< net45)) (&& (== net47) (< portable-net45+win8+wp8+wpa81)) (== netstandard2.0)
       Microsoft.NETCore.Targets (>= 1.1) - restriction: || (&& (== net45) (< portable-net45+win8+wp8+wpa81)) (&& (== net47) (< net45)) (&& (== net47) (< portable-net45+win8+wp8+wpa81)) (== netstandard2.0)
       System.Runtime (>= 4.3) - restriction: || (&& (== net45) (< portable-net45+win8+wp8+wpa81)) (&& (== net47) (< net45)) (&& (== net47) (< portable-net45+win8+wp8+wpa81)) (== netstandard2.0)
-    System.Reflection.TypeExtensions (4.5.1) - restriction: || (&& (== net45) (>= net461) (>= netstandard1.5)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
     System.Resources.ResourceManager (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
-    System.Runtime (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
-      Microsoft.NETCore.Platforms (>= 1.1) - restriction: || (&& (== net45) (< portable-net45+win8+wp8+wpa81)) (&& (== net47) (< net45)) (&& (== net47) (< portable-net45+win8+wp8+wpa81)) (== netstandard2.0)
-      Microsoft.NETCore.Targets (>= 1.1) - restriction: || (&& (== net45) (< portable-net45+win8+wp8+wpa81)) (&& (== net47) (< net45)) (&& (== net47) (< portable-net45+win8+wp8+wpa81)) (== netstandard2.0)
+    System.Runtime (4.3) - restriction: || (&& (== net45) (>= net46)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
     System.Runtime.CompilerServices.Unsafe (4.5.2) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (== netstandard2.0)
     System.Runtime.Extensions (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
       Microsoft.NETCore.Platforms (>= 1.1) - restriction: || (&& (== net45) (< portable-net45+win8+wp8+wpa81)) (&& (== net47) (< net45)) (&& (== net47) (< portable-net45+win8+wp8+wpa81)) (== netstandard2.0)
@@ -229,19 +224,19 @@ NUGET
       Microsoft.NETCore.Platforms (>= 1.1) - restriction: || (&& (== net45) (< portable-net45+win8+wp8+wpa81)) (&& (== net47) (< net45)) (&& (== net47) (< portable-net45+win8+wp8+wpa81)) (== netstandard2.0)
       Microsoft.NETCore.Targets (>= 1.1) - restriction: || (&& (== net45) (< portable-net45+win8+wp8+wpa81)) (&& (== net47) (< net45)) (&& (== net47) (< portable-net45+win8+wp8+wpa81)) (== netstandard2.0)
       System.Runtime (>= 4.3) - restriction: || (&& (== net45) (< portable-net45+win8+wp8+wpa81)) (&& (== net47) (< net45)) (&& (== net47) (< portable-net45+win8+wp8+wpa81)) (== netstandard2.0)
-    System.Text.Encoding.CodePages (4.5) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
-      System.Runtime.CompilerServices.Unsafe (>= 4.5) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
+    System.Text.Encoding.CodePages (4.5.1) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
+      System.Runtime.CompilerServices.Unsafe (>= 4.5.2) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (&& (== net45) (>= netstandard2.0)) (== net47) (== netstandard2.0)
     System.Text.Encoding.Extensions (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
     System.Threading (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
     System.Threading.Tasks (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (&& (== net45) (>= netstandard2.0) (< portable-net45+win8+wp8+wpa81)) (== net47) (== netstandard2.0)
       Microsoft.NETCore.Platforms (>= 1.1) - restriction: || (&& (== net45) (< portable-net45+win8+wp8+wpa81)) (&& (== net47) (< net45)) (&& (== net47) (< portable-net45+win8+wp8+wpa81)) (== netstandard2.0)
       Microsoft.NETCore.Targets (>= 1.1) - restriction: || (&& (== net45) (< portable-net45+win8+wp8+wpa81)) (&& (== net47) (< net45)) (&& (== net47) (< portable-net45+win8+wp8+wpa81)) (== netstandard2.0)
       System.Runtime (>= 4.3) - restriction: || (&& (== net45) (< portable-net45+win8+wp8+wpa81)) (&& (== net47) (< net45)) (&& (== net47) (< portable-net45+win8+wp8+wpa81)) (== netstandard2.0)
-    System.Threading.Tasks.Extensions (4.5.1) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
-      System.Runtime.CompilerServices.Unsafe (>= 4.5)
+    System.Threading.Tasks.Extensions (4.5.2) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
+      System.Runtime.CompilerServices.Unsafe (>= 4.5.2)
     System.Threading.Tasks.Parallel (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
     System.Threading.Thread (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
-    System.ValueTuple (4.5) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
+    System.ValueTuple (4.5)
     System.Xml.ReaderWriter (4.3.1) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
     System.Xml.XDocument (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
     System.Xml.XmlDocument (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
@@ -252,19 +247,19 @@ NUGET
     System.Xml.XPath.XmlDocument (4.3) - restriction: || (&& (== net45) (>= net461)) (&& (== net45) (>= netcoreapp2.0)) (== net47) (&& (== netstandard2.0) (>= net461)) (&& (== netstandard2.0) (>= netcoreapp2.0))
       System.Xml.XmlDocument (>= 4.3) - restriction: || (&& (== net45) (>= net46)) (&& (== net45) (>= netstandard1.3)) (== net47) (== netstandard2.0)
       System.Xml.XPath (>= 4.3) - restriction: || (&& (== net45) (>= net46)) (&& (== net45) (>= netstandard1.3)) (== net47) (== netstandard2.0)
-    YamlDotNet (5.2.1)
+    YamlDotNet (5.3)
   remote: paket-files/github.com/CSBiology/FSharp.Stats
-    FSharp.Stats (0.0.1)
-    FSharp.Stats.MSF (0.0.1)
+    FSharp.Stats (0.0.13)
+    FSharp.Stats.MSF (0.0.13)
   remote: paket-files/github.com/CSBiology/FSharpAux
-    FSharpAux (0.0.12)
-    FSharpAux.IO (0.0.12)
+    FSharpAux (0.0.13)
+    FSharpAux.IO (0.0.13)
 GIT
   remote: https://github.com/CSBiology/FSharpAux.git
-     (8b77b4f32aaa1f09c5cf436a61f6c446c27ba28e)
+     (9b35d33d818bdea8da54b75a060c309b3ed509bf)
       path: /
   remote: https://github.com/CSBiology/FSharp.Stats.git
-     (4bf70fc0ddfb288cb7f166382b4c3d41b14bf415)
+     (e2ab6cf7746aadf50e01aa98a6666fdac5aefbc1)
       path: /
 GROUP BioDB
 RESTRICTION: == net45
diff --git a/paket.references b/paket.references
index 999041f6..2492401e 100644
--- a/paket.references
+++ b/paket.references
@@ -1 +1,2 @@
-dotnet-fake
\ No newline at end of file
+dotnet-fake
+docker.dotnet
\ No newline at end of file
diff --git a/src/BioFSharp.BioDB/AssemblyInfo.fs b/src/BioFSharp.BioDB/AssemblyInfo.fs
index 4d0a8f68..0357f92e 100644
--- a/src/BioFSharp.BioDB/AssemblyInfo.fs
+++ b/src/BioFSharp.BioDB/AssemblyInfo.fs
@@ -5,8 +5,8 @@ open System.Reflection
 []
 []
 [")>]
-[]
-[]
+[]
+[]
 []
 do ()
 
@@ -14,6 +14,6 @@ module internal AssemblyVersionInformation =
     let [] AssemblyTitle = "BioFSharp.BioDB"
     let [] AssemblyProduct = "BioFSharp"
     let [] AssemblyDescription = "An open source bioinformatics toolbox written in F#. "
-    let [] AssemblyVersion = "0.0.12"
-    let [] AssemblyFileVersion = "0.0.12"
+    let [] AssemblyVersion = "0.1.0"
+    let [] AssemblyFileVersion = "0.1.0"
     let [] AssemblyConfiguration = "Release"
diff --git a/src/BioFSharp.BioDB/BioFSharp.BioDB.fsproj b/src/BioFSharp.BioDB/BioFSharp.BioDB.fsproj
index 096015ae..01fc5fc3 100644
--- a/src/BioFSharp.BioDB/BioFSharp.BioDB.fsproj
+++ b/src/BioFSharp.BioDB/BioFSharp.BioDB.fsproj
@@ -1,4 +1,4 @@
-
+
   
     net45
     BioFSharp.BioDB
@@ -20,6 +20,7 @@
     
     
     
+    
     
     
     
diff --git a/src/BioFSharp.BioDB/paket.template b/src/BioFSharp.BioDB/paket.template
index ccfea2ea..7b61152d 100644
--- a/src/BioFSharp.BioDB/paket.template
+++ b/src/BioFSharp.BioDB/paket.template
@@ -1,9 +1,10 @@
 type project
+id BioFSharp.BioDB
 title
     BioFSharp.BioDB
 owners
     Timo Mühlhaus
-authors
+authors 
     Timo Mühlhaus
 projectUrl
     https://github.com/CSBiology/BioFSharp
@@ -13,11 +14,17 @@ licenseUrl
     https://github.com/CSBiology/BioFSharp/blob/master/LICENSE.txt
 requireLicenseAcceptance
     false
+language
+    F#
 copyright
-    Copyright 2018
+    Copyright 2019
 tags
-    F# FSharp bioinformatics
+    bioinformatics F# fsharp database-access
 summary
-    F# FSharp bioinformatics
+    APIs for querying common biological databases
 description
-    F# FSharp bioinformatics
+    APIs for querying common biological databases
+include-referenced-projects 
+    true
+files
+    ../../bin/BioFSharp.BioDB ==> lib
diff --git a/src/BioFSharp.BioTools/AssemblyInfo.fs b/src/BioFSharp.BioTools/AssemblyInfo.fs
new file mode 100644
index 00000000..851e6769
--- /dev/null
+++ b/src/BioFSharp.BioTools/AssemblyInfo.fs
@@ -0,0 +1,19 @@
+// Auto-Generated by FAKE; do not edit
+namespace System
+open System.Reflection
+
+[]
+[]
+[")>]
+[]
+[]
+[]
+do ()
+
+module internal AssemblyVersionInformation =
+    let [] AssemblyTitle = "BioFSharp.BioTools"
+    let [] AssemblyProduct = "BioFSharp"
+    let [] AssemblyDescription = "An open source bioinformatics toolbox written in F#. "
+    let [] AssemblyVersion = "0.1.0"
+    let [] AssemblyFileVersion = "0.1.0"
+    let [] AssemblyConfiguration = "Release"
diff --git a/src/BioFSharp.BioTools/BioContainer.fs b/src/BioFSharp.BioTools/BioContainer.fs
new file mode 100644
index 00000000..78ad1c0f
--- /dev/null
+++ b/src/BioFSharp.BioTools/BioContainer.fs
@@ -0,0 +1,350 @@
+namespace BioFSharp.BioTools
+
+open System
+open System.Threading
+open Docker.DotNet
+open Docker.DotNet.Models
+
+/// BioContainer helper
+module BioContainer =
+    
+    open Docker
+    open System.IO
+
+    //[]
+    type MountInfo =        
+        | NoMount        
+        | HostDir of string
+        
+        override this.ToString() =
+            match this with
+            | NoMount              -> "NoMount"
+            | HostDir _ -> sprintf "%s:%s" (MountInfo.getHostDir this) (MountInfo.getContainerPath this)      
+         
+        ///get the full mounted unix path used in the container 
+        static member getContainerPath (hd:MountInfo) =
+            match hd with
+            | NoMount               -> failwithf "No mount directory set."
+            | HostDir hostdirectory -> 
+                if hostdirectory.Contains(" ") then 
+                    failwithf "paths mounted to docker cannot contain spaces.\r\nThe path %s contains spaces." hostdirectory
+                else
+                    sprintf "/data/%s" ((Path.GetFullPath(hostdirectory).Replace(":","")) |> BioContainerIO.toUnixDirectorySeparator )
+
+        ///get the path of the windows host directory used to mount in the container
+        static member getHostDir (hd:MountInfo) =
+            match hd with
+            | NoMount               -> failwithf "No mount directory set."
+            | HostDir hostdirectory -> Path.GetFullPath (hostdirectory)
+
+        ///get the container full mounted unix path of a file in a subfolder of the mounted host directory
+        static member containerPathOf (m:MountInfo) (filePath:string) =
+            let winDir          = MountInfo.getHostDir m
+            let containerBase   = MountInfo.getContainerPath m
+
+            //spaces not supported in unix paths
+            if filePath.Contains(" ") then
+                failwithf "paths mounted to docker cannot contain spaces.\r\nThe path %s contains spaces." filePath
+            else
+                //the given path is relative
+                if filePath.StartsWith(".") then
+                    let fullFilePath = 
+                        //get absolute combined path
+                        Path.Combine(containerBase,filePath)
+                        |> Path.GetFullPath
+                        |> BioContainerIO.toUnixDirectorySeparator
+
+                    //check that combined path does not go above base (eg base/../../)
+                    if (fullFilePath.StartsWith(containerBase)) then
+                        fullFilePath |> BioContainerIO.toUnixDirectorySeparator
+                    else
+                        failwithf ("the relative path \r\n%s\r\n escapes the scope of the container base path \r\n%s\r\n. the combined path is:\r\n%s\r\n") filePath containerBase fullFilePath
+
+                else
+                    //Path is not relative. Use Path functions to resolve ../ and check if absolute path is a subpath of the windows base path
+                    // TO-DO: make subpath matchin non-case-sensitive because that works on the windows side
+                    let fullFilePath = filePath |> Path.GetFullPath
+                    if fullFilePath.StartsWith(winDir) then
+                        //if absolute windows path is correct, replace it with the containerbase
+                        fullFilePath.Replace(winDir,containerBase)
+                        |> fun x -> x.Replace(":","")
+                        |> BioContainerIO.toUnixDirectorySeparator
+                    else 
+                        failwithf "The given path \r\n%s\r\n is not a subpath of the mounted host directory \r\n%s\r\n. If you want to use relative paths start them with ./" fullFilePath winDir
+                        
+
+
+            
+
+
+            
+
+
+    type BcContext = {
+        Id          : Guid
+        Connection  : DockerClient
+        ImageName   : string
+        ContainerId : string
+        Mount       : MountInfo
+        }
+        
+   
+    /// Connect to docker engine (docker deamon)
+    let connect str =
+        (new DockerClientConfiguration(new Uri(str)) ).CreateClient()
+
+
+    /// Connect to default local docker engine (docker deamon: "npipe://./pipe/docker_engine")
+    let connectLocalDefault () =
+        // TODO: Use System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(OSPlatform.Linux)
+        connect "npipe://./pipe/docker_engine"
+        
+
+
+    /// Runs a container of a specified image and keeps it running
+    let initBcContextAsync (connection:DockerClient)  (image: DockerId) =
+        if not (Docker.Image.exists connection image) then failwithf "Image %s does not exists! Please pull the image first." (string image )    
+        async {
+            let! container =
+                let param = Docker.Container.ContainerParams.InitCreateContainerParameters(User="root",Image=string image,OpenStdin=true)
+                Docker.Container.createContainerWithAsync connection param      
+        
+            let! isRunning =
+                let param = 
+                    Docker.Container.ContainerParams.InitContainerStartParameters()
+
+                Docker.Container.startContainerWithAsync connection param container.ID
+                
+            return {Id=Guid.NewGuid();Connection=connection;ImageName=string image;ContainerId=container.ID;Mount=MountInfo.NoMount}
+            } 
+
+
+    /// Runs a container of a specified image and keeps it running on the local default docker engine
+    let initBcContextLocalDefaultAsync  (image: DockerId) =
+        let client = connectLocalDefault () 
+        initBcContextAsync client image
+
+
+    /// Runs a container of a specified image and keeps it running. Bind mounts the host directory under /data/ (without ':' and lower letter according to BioContainer standards).
+    let initBcContextWithMountAsync (connection:DockerClient) (image: DockerId) (hostdirectory:string) =
+        if not (Docker.Image.exists connection image) then failwithf "Image %s does not exists! Please pull the image first." (string image )    
+        let hd = MountInfo.HostDir hostdirectory
+        async {
+            let! container = // volume  bind
+                
+                let hostdirectory' = MountInfo.getHostDir hd 
+                let target = MountInfo.getContainerPath hd  //sprintf "/data/%s" (hostdirectory'.ToLower().Replace(":",""))
+                let mount = Docker.Container.ContainerParams.InitMount(Type="bind",Source=hostdirectory',Target=target,ReadOnly=false)
+                let hc    = Docker.Container.ContainerParams.InitHostConfig(Mounts=[mount])
+                let param = Docker.Container.ContainerParams.InitCreateContainerParameters(User="root",HostConfig=hc,Image=string image,OpenStdin=true)
+                Docker.Container.createContainerWithAsync connection param      
+        
+            let! isRunning =
+                let param = 
+                    Docker.Container.ContainerParams.InitContainerStartParameters()
+
+                Docker.Container.startContainerWithAsync connection param container.ID
+                
+            return {Id=Guid.NewGuid();Connection=connection;ImageName=string image;ContainerId=container.ID;Mount=hd}
+            } 
+
+    /// Executes a command in the biocontainer context and returns the either the standard output of the container or the standard error of the container if stdout is empty
+    let execReturnAsync (bc:BcContext) cmd =
+        async {
+        
+            let! execContainer =
+                let param = 
+                    Docker.Container.ContainerParams.InitContainerExecCreateParameters(                                        
+                        AttachStderr=true,
+                        AttachStdout=true,                
+                        AttachStdin=false,
+                        Cmd=cmd,
+                        Detach=false                    
+                        )
+
+                Docker.Container.execCreateContainerAsync bc.Connection param (bc.ContainerId)
+
+            let! stream =
+                let param = 
+                    Docker.Container.ContainerParams.InitContainerExecStartParameters(
+                        AttachStderr=true,
+                        AttachStdout=true,                
+                        AttachStdin=false,                   
+                        Cmd=cmd
+                        )                
+                Docker.Container.startContainerWithExecConfigAsync bc.Connection param execContainer.ID
+
+
+            let stdOutputStream = new System.IO.MemoryStream()
+            let stdErrStream = new System.IO.MemoryStream()
+            let streamTask =
+                stream.CopyOutputToAsync(null,stdOutputStream,stdErrStream,CancellationToken.None)             
+                
+            do! streamTask |> Async.AwaitTask
+
+
+            let result =        
+                if stdOutputStream.Length < 1L then
+                    stdErrStream.Position <- 0L
+                    BioContainerIO.readFrom stdErrStream
+                else
+                    stdOutputStream.Position <- 0L
+                    BioContainerIO.readFrom stdOutputStream
+                    
+            if stdErrStream.Length > 0L then
+                stdErrStream.Position <- 0L
+                System.Console.Error.Write(BioContainerIO.readFrom stdErrStream)
+
+            return result
+    
+        } 
+  
+    ///Executes a command in the biocontainer context. Passes stdout and stderr of the container to stoud/stderr.
+    let execAsync (bc:BcContext) cmd =
+        async {
+        
+            let! execContainer =
+                let param = 
+                    Docker.Container.ContainerParams.InitContainerExecCreateParameters(                                        
+                        AttachStderr=true,
+                        AttachStdout=true,                
+                        AttachStdin=false,
+                        Cmd=cmd,
+                        Detach=false                    
+                        )
+
+                Docker.Container.execCreateContainerAsync bc.Connection param (bc.ContainerId)
+
+            let! stream =
+                let param = 
+                    Docker.Container.ContainerParams.InitContainerExecStartParameters(
+                        AttachStderr=true,
+                        AttachStdout=true,                
+                        AttachStdin=false,                   
+                        Cmd=cmd
+                        )                
+                Docker.Container.startContainerWithExecConfigAsync bc.Connection param execContainer.ID
+
+
+            let stdOutputStream = new System.IO.MemoryStream()
+            let stdErrStream = new System.IO.MemoryStream()
+            let streamTask =
+                stream.CopyOutputToAsync(null,stdOutputStream,stdErrStream,CancellationToken.None)             
+                
+            do! streamTask |> Async.AwaitTask
+
+
+
+            if stdErrStream.Length > 0L then
+                stdErrStream.Position <- 0L
+                System.Console.Error.Write(BioContainerIO.readFrom stdErrStream)
+
+            if stdOutputStream.Length > 0L then
+                stdOutputStream.Position <- 0L
+                System.Console.Write(BioContainerIO.readFrom stdOutputStream)
+                    
+            return ()
+    
+        } 
+        
+
+    /// Disposes the biocontainer context (stops and removes the underlying container)
+    let disposeAsync (bc:BcContext) =
+        let param = Docker.Container.ContainerParams.InitContainerRemoveParameters(Force=true)
+        Docker.Container.removeContainerWithAsync bc.Connection param (Docker.DockerId.ContainerId bc.ContainerId)
+
+
+    /// Copies file from a container (only single file is supported)
+    let getFileAsync (bc:BcContext) (filePath) =
+        async {
+            let param = Docker.Container.ContainerParams.InitGetArchiveFromContainerParameters(Path=filePath)
+            let! res = Docker.Container.getArchiveFromContainerAsync  bc.Connection param false bc.ContainerId 
+            return BioContainerIO.tarToStream res.Stream
+            }
+    
+    /// Puts a stream in a container (only single file is supported)
+    let putStreamAsync (bc:BcContext) (sourceStream:System.IO.Stream) targetFileName  =
+        async {
+            let targetPath = BioContainerIO.directoryName targetFileName
+            let targetName = BioContainerIO.fileName targetFileName
+
+            // ! Set the target filename as tar-entry name to make renameing possible
+            let stream = BioContainerIO.tarOfStream targetName sourceStream
+    
+            let param = Docker.Container.ContainerParams.InitContainerPathStatParameters(AllowOverwriteDirWithFile=true, Path=targetPath)
+            do!
+                Docker.Container.extractArchiveToContainerAsync bc.Connection param (bc.ContainerId ) stream
+        
+            sourceStream.Close()
+            }
+
+    /// Copies file into a container (only single file is supported)
+    let putFileAsync (bc:BcContext) (sourceFileName:string) targetFileName  =
+        async {
+            let fileStream = new System.IO.FileStream(sourceFileName,System.IO.FileMode.Open)
+            do!
+                putStreamAsync bc fileStream targetFileName
+            }
+
+
+    //let runCmdAsync (connection:DockerClient) (dockerid: DockerId) cmd =
+    //    // Function creates and deletes new container all the time 
+    //    // !maybe use  Containers.StartWithConfigContainerExecAsync (Docker.DotNet.Models.ContainerExecStartParameters()) in the future
+
+    //    if not (Docker.Image.exists connection dockerid) then failwithf "Image %s does not exists! Please pull the image first." (dockerid.ToString())
+    //    async {
+    //        let! container =
+    //            let param = Docker.Container.ContainerParams.InitCreateContainerParameters(Image=dockerid.ToString(),Cmd=cmd)
+    //            Docker.Container.createContainerWithAsync connection param              
+
+    //        let! isRunning =
+    //            let param = Docker.Container.ContainerParams.InitContainerStartParameters()
+    //            Docker.Container.startContainerWithAsync connection param container.ID
+                
+    //        let! wait = 
+    //            Docker.Container.waitContainerAsync connection container.ID
+        
+    //        let! logs =
+    //            let param = Docker.Container.ContainerParams.InitContainerLogsParameters(ShowStdout=true)
+    //            Docker.Container.getContainerLogsAsync connection param container.ID
+                    
+    //        do! Docker.Container.removeContainerAsync connection (DockerId.ContainerId container.ID)
+                
+    //        return logs
+    //    } 
+
+
+
+
+    ///// Run = create + start (or only start if available)
+    //let tryRunCmd (connection:DockerClient) (dockerid: DockerId) cmd =
+    //    if Docker.Image.exists connection dockerid then
+    //        let res = Docker.Container.createContainerByImage connection (dockerid.ToString())
+    //        if Docker.Container.startContainer connection res.ID then
+    //            Some 
+    //        else
+    //            None
+
+    //    else
+    //        None
+    
+    ///// Run = create + start (or only start if available)
+    //let tryRun (connection:DockerClient) (dockerid: DockerId) =
+    //    if Docker.Image.exists connection dockerid then
+            
+    //        if Docker.Container.existsByImage connection dockerid then
+                
+    //        else
+    //            if Docker.Container.startContainer connection (dockerid.ToString()) then
+    //                Some DockerId
+    //            else
+    //                None
+    //    else
+    //        None
+        
+        
+
+
+
+
+
diff --git a/src/BioFSharp.BioTools/BioContainerIO.fs b/src/BioFSharp.BioTools/BioContainerIO.fs
new file mode 100644
index 00000000..0be7c35f
--- /dev/null
+++ b/src/BioFSharp.BioTools/BioContainerIO.fs
@@ -0,0 +1,78 @@
+namespace BioFSharp.BioTools
+
+open System
+open System.Threading
+open Docker.DotNet
+open Docker.DotNet.Models
+open System.IO
+
+/// BioContainer helper
+module BioContainerIO =
+
+    open ICSharpCode.SharpZipLib.GZip
+    open ICSharpCode.SharpZipLib.Tar
+    open System.IO
+ 
+    let toUnixDirectorySeparator (filename:string) = 
+        let dirSep = "/"
+        filename.Replace("\\", dirSep)
+            .TrimEnd(Path.DirectorySeparatorChar)
+
+    let directoryName (filename:string) = 
+        let dirSep = Path.DirectorySeparatorChar
+        let tmp    = filename.Split([|dirSep;'/'|])
+        tmp
+        |> Seq.take (tmp.Length-1)
+        |> String.concat ("/")
+        |> fun s -> s + "/"
+
+    let fileName (filename:string) = 
+        let dirSep = Path.DirectorySeparatorChar
+        let tmp    = filename.Split([|dirSep;'/'|])
+        let last = tmp.Length-1
+        if last > 0 then tmp.[last] else ""
+
+    let readFrom (stream:System.IO.Stream) =
+        let length = (stream.Length) |> int
+        let tmp : array = Array.zeroCreate length
+        stream.Read(tmp,0,length) |> ignore
+
+        System.Text.Encoding.UTF8.GetString(tmp,0,length)
+
+    // Returns the first file as stream
+    let tarToStream inStream =
+        let tarIn = new TarInputStream(inStream)
+        if tarIn.GetNextEntry().IsDirectory then 
+            tarIn.GetNextEntry() |> ignore
+            tarIn :> Stream
+        else
+            tarIn :> Stream
+   
+
+    /// Returns a tar-archive MemoryStream (only one entry supported) 
+    let tarOfStream (tarEntryName:string) (inputStream:Stream) =
+        let outStream = new MemoryStream()
+        let tarOutputStream = new TarOutputStream(outStream) 
+
+        let fileSize = inputStream.Length
+        let entry = TarEntry.CreateTarEntry(tarEntryName)
+        // Must set size, otherwise TarOutputStream will fail when output exceeds.
+        entry.Size <- fileSize
+        // Add the entry to the tar stream, before writing the data.
+        tarOutputStream.PutNextEntry(entry)
+        // this is copied from TarArchive.WriteEntryCore
+        let localBuffer : byte [] = Array.zeroCreate (32 * 1024)
+        let rec loop () =
+            let numRead = inputStream.Read(localBuffer, 0, localBuffer.Length)
+            if (numRead <= 0) then
+                tarOutputStream.CloseEntry()
+            else
+                tarOutputStream.Write(localBuffer, 0, numRead)
+                loop ()
+    
+        loop ()
+        tarOutputStream.IsStreamOwner <- false
+        tarOutputStream.Close()
+        outStream.Position <- 0L
+        outStream
+
diff --git a/src/BioFSharp.BioTools/BioFSharp.BioTools.fsproj b/src/BioFSharp.BioTools/BioFSharp.BioTools.fsproj
new file mode 100644
index 00000000..6fa10fb9
--- /dev/null
+++ b/src/BioFSharp.BioTools/BioFSharp.BioTools.fsproj
@@ -0,0 +1,40 @@
+
+
+  
+    net45;net47;netstandard2.0
+    BioFSharp.BioTools
+    BioFSharp.BioTools
+    BioFSharp.BioTools
+    Library
+    true
+    
+    true
+    
+    true
+    
+    $(AllowedOutputExtensionsInPackageBuildOutputFolder);.pdb
+    Debug;Release;Mono
+  
+  
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+  
+  
+    
+    
+  
+  
+
+
diff --git a/src/BioFSharp.BioTools/BioFSharp.BioTools.fsx b/src/BioFSharp.BioTools/BioFSharp.BioTools.fsx
new file mode 100644
index 00000000..ffea26d0
--- /dev/null
+++ b/src/BioFSharp.BioTools/BioFSharp.BioTools.fsx
@@ -0,0 +1,495 @@
+#r "netstandard"
+#r "../../packages/Newtonsoft.Json.10.0.3/lib/netstandard1.3/Newtonsoft.Json.dll"
+#r "../../packages/System.Buffers/lib/netstandard2.0/System.Buffers.dll"
+#r "../../packages/Docker.DotNet/lib/netstandard2.0/Docker.DotNet.dll"
+
+#r "../../packages/SharpZipLib/lib/netstandard2.0/ICSharpCode.SharpZipLib.dll"
+
+#r "../../packages/SharpZipLib/lib/netstandard2.0/ICSharpCode.SharpZipLib.dll"
+#r "../../packages/FSharpAux.IO/lib/netstandard2.0/FSharpAux.dll"
+#r "../../packages/FSharpAux.IO/lib/netstandard2.0/FSharpAux.IO.dll"
+
+#load "Docker.fs"
+#load "BioContainerIO.fs"
+#load "BioContainer.fs"
+#load "TargetP.fs"
+#load "Blast.fs"
+#load "ClustalO.fs"
+#load "HMMER.fs"
+
+open System.Threading
+open System.Threading
+open Docker.DotNet
+open System.Threading
+open System.Buffers
+open System.Threading.Tasks
+
+open BioFSharp.BioTools
+open System.Collections.Generic
+open Docker.DotNet.Models
+open System.IO
+
+
+open ICSharpCode.SharpZipLib.GZip
+open ICSharpCode.SharpZipLib.Tar
+open Newtonsoft.Json.Serialization
+open System
+
+
+
+let client = Docker.connect "npipe://./pipe/docker_engine"
+
+
+
+let ubuntu = Docker.ImageName "ubuntu"
+
+let bcContextUbuntu =
+    BioContainer.initBcContextWithMountAsync client ubuntu "C:/tmp" 
+    |> Async.RunSynchronously
+
+
+
+BioContainer.disposeAsync bcContextUbuntu
+|> Async.Start
+
+
+let bcContext =
+    BioContainer.initBcContextLocalDefaultAsync TargetP.ImageTagetP
+    |> Async.RunSynchronously
+
+
+let stream = new FileStream("C:/tmp/twelve.fsa",FileMode.Open)
+
+let res = TargetP.run bcContext (TargetP.NonPlant) stream
+
+
+
+BioContainer.disposeAsync bcContext
+|> Async.Start
+
+
+
+res |> Seq.head |> fun tp -> tp.Mtp
+
+
+
+
+
+//// https://github.com/Microsoft/Docker.DotNet/issues/223 -> write
+//// https://github.com/Microsoft/Docker.DotNet/issues/212 -> read
+
+////-i, --interactive=false Keep STDIN open even if not attached
+////-t, --tty=false Allocate a pseudo-TTY
+
+//let exe = 
+//    async {
+//        //let! container =
+//        //    let param = Docker.Container.ContainerParams.InitCreateContainerParameters(Image=dockerid.ToString(),Cmd=cmd,OpenStdin=true)
+//        //    Docker.Container.createContainerWithAsync connection param      
+        
+//        //let! isRunning =
+//        //    let param = 
+//        //        Docker.Container.ContainerParams.InitContainerStartParameters()
+
+//        //    Docker.Container.startContainerWithAsync connection param container.ID
+
+//        let! execContainer =
+//            let param = 
+//                Docker.Container.ContainerParams.InitContainerExecCreateParameters(                                        
+//                    AttachStderr=true,
+//                    AttachStdout=true,                
+//                    AttachStdin=false,
+//                    Cmd=cmd',
+//                    Detach=false
+//                    //Tty=false
+//                    )
+
+//            Docker.Container.execCreateContainerAsync connection param (cont)
+//        return tmp
+//        }
+
+//    |> Async.RunSynchronously
+
+
+//////docker stop $(docker ps -a -q)
+
+////Docker.Container.removeContainerAsync connection (Docker.DockerId.ContainerId (container.ID))  
+////|> Async.RunSynchronously
+
+
+//let ms = 
+//    async {
+        
+//        let! execContainer =
+//            let param = 
+//                Docker.Container.ContainerParams.InitContainerExecCreateParameters(                                        
+//                    AttachStderr=true,
+//                    AttachStdout=true,                
+//                    AttachStdin=false,
+//                    Cmd=cmd,
+//                    Detach=false                    
+//                    )
+
+//            Docker.Container.execCreateContainerAsync connection param (cont)
+
+//        let! stream =
+//            let param = 
+//                Docker.Container.ContainerParams.InitContainerExecStartParameters(
+//                    AttachStderr=true,
+//                    AttachStdout=true,                
+//                    AttachStdin=false,                   
+//                    Cmd=cmd
+//                    )                
+//            Docker.Container.startContainerWithExecConfigAsync connection param cont // startContainerExecAsync connection exe.ID // 
+            
+//        printfn "Start Exec"
+        
+//        //let stopParam = new ContainerStopParameters()        
+//        //let! st =  connection.Containers.StopContainerAsync(cont,stopParam) |> Async.AwaitTask
+        
+//        //printfn "Stop: %b" st
+        
+//        let stdOutputStream = new System.IO.MemoryStream()
+//        let streamTask =
+//            stream.CopyOutputToAsync(null,stdOutputStream,null,CancellationToken.None)             
+
+                
+//        do! streamTask |> Async.AwaitTask
+
+//        printfn "Streamed"
+
+//        //let! wait = 
+//        //    Docker.Container.waitContainerAsync connection container.ID
+
+//        let result =        
+//            stdOutputStream.Position <- 0L
+//            readFrom stdOutputStream
+                    
+//        //do! Docker.Container.removeContainerAsync connection (Docker.DockerId.ContainerId container.ID)  
+    
+//        return result
+    
+//    } 
+//    |> Async.RunSynchronously
+
+
+
+//let ms = 
+//    async {
+//        let! container =
+//            let param = 
+//                Docker.Container.ContainerParams.InitCreateContainerParameters(
+//                    ArgsEscaped=false,
+//                    AttachStderr=true,
+//                    AttachStdout=true,                
+//                    AttachStdin=false,
+//                    Image=string dockerid,
+//                    Cmd=cmd
+//                    )
+
+//            Docker.Container.createContainerWithAsync connection param              
+
+//        //let! isRunning =
+//        //    let param = Docker.Container.ContainerParams.InitContainerStartParameters()
+//        //    Docker.Container.startContainerWithAsync connection param container.ID
+
+//        let! stream = 
+//            let param = Docker.Container.ContainerParams.InitContainerAttachParameters (Stdout=true,Stderr=true,Stdin=false,Stream=true)
+//            connection.Containers.AttachContainerAsync(container.ID,false,param)
+//            |> Async.AwaitTask
+    
+//        let stdOutputStream = new System.IO.MemoryStream()
+//        let streamTask =
+//            stream.CopyOutputToAsync(null,stdOutputStream,null,CancellationToken.None) 
+
+//        let! isRunning =
+//            let param = 
+//                Docker.Container.ContainerParams.InitContainerExecStartParameters(
+//                    AttachStderr=true,
+//                    AttachStdout=true,                
+//                    AttachStdin=false,                   
+//                    Cmd=cmd
+//                    )                
+//            Docker.Container.startContainerWithExecConfigAsync connection param container.ID
+                
+//        do! streamTask |> Async.AwaitTask
+
+//        let! wait = 
+//            Docker.Container.waitContainerAsync connection container.ID
+
+//        let result =        
+//            stdOutputStream.Position <- 0L
+//            readFrom stdOutputStream
+                    
+//        do! Docker.Container.removeContainerAsync connection (Docker.DockerId.ContainerId container.ID)  
+    
+//        return result
+    
+//    } 
+//    |> Async.RunSynchronously
+
+
+
+////let tmp =
+////    BioContainer.runCmdAsync client (Docker.DockerId.ImageName "ubuntu") ["echo"; "hello world"]
+////    |> Async.RunSynchronously
+////    |> readFrom
+
+
+
+//Docker.Image.exists client (Docker.DockerId.ImageName "targetp_image")
+
+
+//Docker.Image.listImages client
+//|> Seq.map (fun i -> i.ID )
+//|> Seq.toArray
+
+
+//Docker.Container.existsByImage client (Docker.DockerId.ImageName "targetp_image")
+
+
+////ancestor=([:],  or )
+
+//let filters = 
+//    Docker.Container.ContainerParams.InitContainerListParameters(All=true,Filters=Docker.Filters.InitContainerFilters(Ancestor=Docker.DockerId.ImageName "ubuntu"))
+
+
+//Docker.Container.listContainersWithAsync client filters
+//|> Async.RunSynchronously
+//|> Seq.map (fun x -> x.Command,x.Image,x.Labels)
+//|> Seq.toArray
+
+////client.Containers.StartWithConfigContainerExecAsync
+//let p = Docker.DotNet.Models.ContainerExecStartParameters()
+
+
+//let ap = Docker.DotNet.Models.ContainerAttachParameters()
+
+
+
+
+//Docker.Container.existsByImage client (Docker.DockerId.ImageName "targetp_image")
+
+
+//let idtp = "61fbfbc30382e83dd585c99583c036ef8c5ced4eb10e1b274f199da6b6969588"
+
+////let pipe = System.Uri("npipe://./pipe/docker_engine")
+
+////let config = new DockerClientConfiguration(pipe)
+////let client = config.CreateClient()
+
+////let createByImage (client:DockerClient) imageName =
+////    async {
+////        let param = Models.CreateContainerParameters()
+////        param.Image <- imageName
+////        param.Cmd <- System.Collections.Generic.List(["echo"; "hello world"])
+////        let! container =  
+////            client.Containers.CreateContainerAsync (param,CancellationToken.None)
+////            |> Async.AwaitTask
+////        return container.ID
+////    }
+
+
+////let result =
+////    async {
+////        let paramLog = Models.ContainerLogsParameters() // (Stdout = System.Nullable(true),Stdin = System.Nullable(true))
+////        paramLog.ShowStdout <- System.Nullable(true)
+////        let paramRun = Models.ContainerStartParameters ()
+        
+////        //let id = 
+////        //    "4243adc7f3832ea35bdaad79aabe86f8e1c54f5c3a799cc72e060a8402bc24cb"
+        
+////        let! id = createByImage client "ubuntu"
+
+////        let! isRunnig =  
+////            client.Containers.StartContainerAsync(id,paramRun,CancellationToken.None)
+////            |> Async.AwaitTask
+        
+////        let! wait = 
+////            client.Containers.WaitContainerAsync(id,CancellationToken.None)
+////            |> Async.AwaitTask
+        
+////        let! logs =
+////            client.Containers.GetContainerLogsAsync (id,paramLog,CancellationToken.None)
+////            |> Async.AwaitTask
+
+            
+        
+////        return logs
+////    } 
+////    |> Async.RunSynchronously
+
+
+////let tmp : array = Array.zeroCreate 1024
+////result.Read(tmp,0,1024)
+
+////System.Text.Encoding.UTF8.GetString(tmp,0,1024)
+
+// Include CsbScaffold
+//#load "../../.env/CsbScaffold.fsx"
+//#r @"C:\Users\Kevin\source\repos\CSBiology\BioFSharp\bin\BioFSharp.BioTools\net47\BioFSharp.BioTools.dll"
+
+//open BioFSharp.BioTools
+
+//open BioFSharp.IO
+//open BioFSharp.IO.BlastNCBI
+//open BioFSharp.IO.BlastNCBI.Parameters
+
+//let typeOfDatabase = Parameters.MakeDbParams.DbType Parameters.Protein
+
+
+//BlastWrapper(@"C:\Users\Kevin\source\repos\CSBiology\BioFSharp\lib\ncbi-blast\bin").makeblastdb @"C:\Users\Kevin\Source\Repos\CsbScaffold\Docker\data\Chlamy_Cp.fastA"  ([typeOfDatabase;] |> seq)
+
+//let outputFormat= 
+    
+//    [   
+//        OutputCustom.Query_SeqId; 
+//        OutputCustom.Subject_SeqId;
+//        OutputCustom.Query_Length;
+//        OutputCustom.Subject_Length;
+//        OutputCustom.AlignmentLength;
+//        OutputCustom.MismatchCount;
+//        OutputCustom.IdentityCount;
+//        OutputCustom.PositiveScoringMatchCount;
+//        OutputCustom.Evalue;
+//        OutputCustom.Bitscore;
+//    ] 
+//    |> List.toSeq
+
+//let outputType = OutputType.TabularWithComments
+
+//let customOutputFormat = OutputTypeCustom(outputType , outputFormat)
+
+
+
+//BlastNCBI.BlastWrapper(@"C:\Users\Kevin\source\repos\CSBiology\BioFSharp\lib\ncbi-blast\bin")
+//    .blastP 
+//        @"C:\Users\Kevin\Source\Repos\CsbScaffold\Docker\data\Chlamy_Cp.fastA" 
+//        @"C:\Users\Kevin\Source\Repos\CsbScaffold\Docker\data\testQuery.fastA"
+//        @"C:\Users\Kevin\Source\Repos\CsbScaffold\Docker\data/Output.txt"
+//        ([customOutputFormat;] |> seq)
+
+
+
+open FSharpAux
+open FSharpAux.IO
+open FSharpAux.IO.SchemaReader.Attribute
+open System.IO
+open BioFSharp.BioTools.BioContainer
+open BioFSharp.BioTools.BioContainerIO
+open Blast
+
+let client = Docker.connect "npipe://./pipe/docker_engine"
+
+let ImageBlast = Docker.DockerId.ImageId "blast"
+
+let blastContext = 
+    BioContainer.initBcContextWithMountAsync client ImageBlast @"C:\Users\Kevin\source\repos\CsbScaffold\Docker\data"
+    |> Async.RunSynchronously
+
+let paramz =
+    [
+        MakeDbParams.DbType Protein
+        MakeDbParams.Input @"C:\Users\Kevin\source\repos\CsbScaffold\Docker\data\Chlamy_Cp.fastA"
+        MakeDbParams.Output@"C:\Users\Kevin\source\repos\CsbScaffold\Docker\data\Chlamy_Cp.fastA"
+    ]
+
+let outputFormat= 
+    
+    [   
+        OutputCustom.Query_SeqId; 
+        OutputCustom.Subject_SeqId;
+        OutputCustom.Query_Length;
+        OutputCustom.Subject_Length;
+        OutputCustom.AlignmentLength;
+        OutputCustom.MismatchCount;
+        OutputCustom.IdentityCount;
+        OutputCustom.PositiveScoringMatchCount;
+        OutputCustom.Evalue;
+        OutputCustom.Bitscore;
+    ] 
+
+let blastPParamz = [
+    BlastParams.SearchDB @"C:\Users\Kevin\source\repos\CsbScaffold\Docker\data\Chlamy_Cp.fastA"
+    BlastParams.Query @"C:\Users\Kevin\source\repos\CsbScaffold\Docker\data\testQuery.fastA"
+    BlastParams.Output @"C:\Users\Kevin\source\repos\CsbScaffold\Docker\data\Output.txt"
+    OutputTypeCustom
+        (
+             OutputType.TabularWithComments,
+             [   
+                OutputCustom.Query_SeqId; 
+                OutputCustom.Subject_SeqId;
+                OutputCustom.Query_Length;
+                OutputCustom.Subject_Length;
+                OutputCustom.AlignmentLength;
+                OutputCustom.MismatchCount;
+                OutputCustom.IdentityCount;
+                OutputCustom.PositiveScoringMatchCount;
+                OutputCustom.Evalue;
+                OutputCustom.Bitscore;
+             ] 
+        )
+]
+
+runMakeBlastDBAsync blastContext paramz
+|> Async.RunSynchronously
+
+runMakeBlastDB blastContext paramz
+
+runBlastPAsync blastContext blastPParamz
+|> Async.RunSynchronously
+
+
+BioContainer.execAsync blastContext ["makeblastdb"; "-dbtype"; "prot" ;"-in"; "/data/C/Users/Kevin/Source/Repos/CsbScaffold/Docker/data/Chlamy_Cp.fastA"; "-out"; "/data/C/Users/Kevin/Source/Repos/CsbScaffold/Docker/data/Chlamy_Cp.fastA"]
+|> Async.RunSynchronously
+
+BioContainer.disposeAsync blastContext
+|> Async.RunSynchronously
+
+
+open ClustalO
+
+let clustalImage = Docker.ImageName "clustal-omega"
+
+let clustalContext = 
+    BioContainer.initBcContextWithMountAsync client clustalImage @"C:\Users\Kevin\source\repos\CsbScaffold\Docker\data"
+    |> Async.RunSynchronously
+
+// ClustalO tests
+let clustalOParamz = [
+    ClustalOParams.Input 
+        (
+            FileInput.SequenceFile @"C:\Users\Kevin\source\repos\CsbScaffold\Docker\data\Chlamy_Cp.fastA",
+            [
+                InputCustom.Format FileFormat.FastA
+            ]
+        )
+    ClustalOParams.Output 
+        (
+            @"C:\Users\Kevin\source\repos\CsbScaffold\Docker\data\Chlamy_Cp.aln",
+            []
+        )
+    ClustalOParams.Miscellaneous 
+        [
+            MiscellaneousCustom.Force
+        ]
+]
+
+runClustalO clustalContext clustalOParamz
+
+open HMMER
+open HMMER.HMMbuild
+
+let HMMERImage =  Docker.ImageName "hmmer"
+
+let hmmerContext = 
+    BioContainer.initBcContextWithMountAsync client HMMERImage @"C:\Users\Kevin\source\repos\CsbScaffold\Docker\data"
+    |> Async.RunSynchronously
+
+let hmmbuildParamz = 
+    [
+        InputMSAFile @"C:\Users\Kevin\source\repos\CsbScaffold\Docker\data\hmmer_testfiles\globins4.sto"
+        OutputHMMFile @"C:\Users\Kevin\source\repos\CsbScaffold\Docker\data\hmmer_testfiles\testOutput.hmm"
+    ]
+
+runHMMbuild hmmerContext hmmbuildParamz
\ No newline at end of file
diff --git a/src/BioFSharp.BioTools/Blast.fs b/src/BioFSharp.BioTools/Blast.fs
new file mode 100644
index 00000000..1cd46b23
--- /dev/null
+++ b/src/BioFSharp.BioTools/Blast.fs
@@ -0,0 +1,245 @@
+namespace BioFSharp.BioTools
+
+module Blast =
+
+    open FSharpAux
+    open FSharpAux.IO
+    open FSharpAux.IO.SchemaReader.Attribute
+    open BioContainer
+    open BioContainerIO
+     
+    type DbType =
+        | Protein 
+        | Nucleotide
+
+        static member make = function
+            | Protein       -> "prot"
+            | Nucleotide    -> "nucl"
+
+
+    type MakeDbParams =
+        | Input  of string
+        | Output of string
+        | DbType of DbType
+        | MaskData of string
+        | ParseSeqIds    
+
+        static member makeCmdWith (m: MountInfo) = function
+            | Input  (path)     -> ["-in"  ;(MountInfo.containerPathOf m path)]
+            | Output (path)     -> ["-out" ;(MountInfo.containerPathOf m path)]
+            | DbType (dbt)      -> ["-dbtype"; (DbType.make dbt)]
+            | MaskData (path)   -> ["-mask_data"; (sprintf "%s.asnb") (MountInfo.containerPathOf m path)]
+            | ParseSeqIds       -> ["-parse_seqids"] 
+
+
+    type OutputType = 
+        | Pairwise                        
+        | Query_anchored                  
+        | Query_anchored_NoIdentities     
+        | Query_anchored_Flat             
+        | Query_anchored_Flat_NoIdentities
+        | XML                             
+        | Tabular                         
+        | TabularWithComments             
+        | TextASN1                        
+        | BinaryASN1                      
+        | CSV                             
+        | BLAST_ArchiveFormat             
+        | JSON_Seqalign                   
+        | JSON_Blast                      
+        | XML2_Blast                      
+
+            static member make = function 
+                | Pairwise                          ->  0
+                | Query_anchored                    ->  1
+                | Query_anchored_NoIdentities       ->  2
+                | Query_anchored_Flat               ->  3
+                | Query_anchored_Flat_NoIdentities  ->  4
+                | XML                               ->  5
+                | Tabular                           ->  6
+                | TabularWithComments               ->  7
+                | TextASN1                          ->  8
+                | BinaryASN1                        ->  9
+                | CSV                               -> 10
+                | BLAST_ArchiveFormat               -> 11
+                | JSON_Seqalign                     -> 12
+                | JSON_Blast                        -> 13
+                | XML2_Blast                        -> 14
+
+    //When not provided, the default value is:
+    //'qseqid sseqid pident length mismatch gapopen qstart qend sstart send
+    //evalue bitscore', which is equivalent to the keyword 'std'
+    type OutputCustom = 
+        | Query_SeqId               
+        | Query_GI                  
+        | Query_Accesion            
+        | Query_Accesion_Version    
+        | Query_Length              
+        | Subject_SeqId             
+        | Subject_All_SeqIds        
+        | Subject_GI                
+        | Subject_All_GIs           
+        | Subject_Accession         
+        | Subject_Accession_Version 
+        | Subject_All_Accession     
+        | Subject_Length            
+        | Query_StartOfAlignment    
+        | Query_EndOfAlignment      
+        | Subject_StartOfAlignment  
+        | Subject_EndOfAlignment    
+        | Query_AlignedPartOf       
+        | Subject_AlignedPartOf     
+        | Evalue                    
+        | Bitscore                  
+        | RawScore                  
+        | AlignmentLength           
+        | Identity                  
+        | IdentityCount             
+        | MismatchCount             
+        | PositiveScoringMatchCount 
+        | GapOpeningCount           
+        | GapCount                  
+        | PositiveScoringMatch      
+        //means Query and subject frames separated by a '/'
+        | Frames                   
+        | Query_Frames             
+        | Subject_Frames           
+        //means Blast traceback operations (BTOP)
+        | BTOP                      
+        | Subject_TaxonomyIDs       
+        | Subject_Scientific_Names  
+        | Subject_Common_Names      
+        | Subject_Blast_Names       
+        | Subject_Super_Kingdoms    
+        | Subject_Title             
+        | Subject_All_Titles        
+        | Subject_Strand            
+        | Query_CoveragePerSubject  
+        | Query_CoveragePerHSP    
+
+        static member make = function
+            | Query_SeqId               -> "qseqid"
+            | Query_GI                  -> "qgi"
+            | Query_Accesion            -> "qacc"
+            | Query_Accesion_Version    -> "qaccver"
+            | Query_Length              -> "qlen"
+            | Subject_SeqId             -> "sseqid"
+            | Subject_All_SeqIds        -> "sallseqid"
+            | Subject_GI                -> "sgi"
+            | Subject_All_GIs           -> "sallgi"
+            | Subject_Accession         -> "sacc"
+            | Subject_Accession_Version -> "saccver"
+            | Subject_All_Accession     -> "sallacc"
+            | Subject_Length            -> "slen"
+            | Query_StartOfAlignment    -> "qstart"
+            | Query_EndOfAlignment      -> "qend"
+            | Subject_StartOfAlignment  -> "sstart"
+            | Subject_EndOfAlignment    -> "send"
+            | Query_AlignedPartOf       -> "qseq"
+            | Subject_AlignedPartOf     -> "sseq" 
+            | Evalue                    -> "evalue"
+            | Bitscore                  -> "bitscore"
+            | RawScore                  -> "score"
+            | AlignmentLength           -> "length"
+            | Identity                  -> "pident" 
+            | IdentityCount             -> "nident"
+            | MismatchCount             -> "mismatch"
+            | PositiveScoringMatchCount -> "positive"
+            | GapOpeningCount           -> "gapopen"
+            | GapCount                  -> "gaps"
+            | PositiveScoringMatch      -> "ppos"
+            //means Query and subject frames separated by a '/'
+            | Frames                    -> "frames" 
+            | Query_Frames              -> "qframe"
+            | Subject_Frames            -> "sframe"
+            //means Blast traceback operations (BTOP)
+            | BTOP                      -> "btop" 
+            | Subject_TaxonomyIDs       -> "staxids" 
+            | Subject_Scientific_Names  -> "sscinames"
+            | Subject_Common_Names      -> "scomnames"
+            | Subject_Blast_Names       -> "sblastnames"
+            | Subject_Super_Kingdoms    -> "sskingdoms"
+            | Subject_Title             -> "stitle"
+            | Subject_All_Titles        -> "salltitles"
+            | Subject_Strand            -> "sstrand"
+            | Query_CoveragePerSubject  -> "qcovs"
+            | Query_CoveragePerHSP      -> "qcovhsp"
+
+
+    type BlastParams =
+        | SearchDB of string
+        | Query    of string
+        | Output   of string
+        | OutputType of OutputType
+        | OutputTypeCustom of OutputType * seq
+        | Num_threads of int
+        | Max_Hits of int
+
+        static member makeCmdWith (m: MountInfo) = function
+            | SearchDB  (path)      -> ["-db"    ; (MountInfo.containerPathOf m path)]
+            | Query     (path)      -> ["-query" ; (MountInfo.containerPathOf m path)]
+            | Output    (path)      -> ["-out"   ; (MountInfo.containerPathOf m path)]
+            | OutputType(format)    -> ["-outfmt"; string (format |> OutputType.make)]
+            | OutputTypeCustom(t,p) ->  let tmp = 
+                                            p 
+                                            |> Seq.map OutputCustom.make 
+                                            |> String.concat " "
+                                        match t with
+                                        | OutputType.Tabular             -> ["-outfmt"; sprintf "%s %s" (string (t |> OutputType.make)) tmp]
+                                        | OutputType.TabularWithComments -> ["-outfmt"; sprintf "%s %s" (string (t |> OutputType.make)) tmp]
+                                        | OutputType.CSV                 -> ["-outfmt"; sprintf "%s %s" (string (t |> OutputType.make)) tmp]
+                                        | _ -> failwithf "Output format %A does not support custom columns." t                                
+            | Num_threads(i)        -> ["-num_threads"; string i]
+            | Max_Hits (i)          -> ["-max_target_seqs"; string i]
+
+
+    let runMakeBlastDBAsync (bcContext:BioContainer.BcContext) (opt:MakeDbParams list) = 
+
+        let cmds = (opt |> List.map (MakeDbParams.makeCmdWith bcContext.Mount))
+        let tp = "makeblastdb"::(cmds |> List.concat)
+
+        printfn "Starting process makeblastdb\r\nparameters:"
+        cmds |> List.iter (fun op -> printfn "\t%s" (String.concat " " op))
+
+        async {
+                let! res = BioContainer.execAsync bcContext tp           
+                return res
+        }
+
+    let runMakeBlastDB (bcContext:BioContainer.BcContext) (opt:MakeDbParams list) =
+
+        runMakeBlastDBAsync bcContext opt
+        |> Async.RunSynchronously
+
+    let runBlastPAsync (bcContext:BioContainer.BcContext) (opt:BlastParams list) = 
+        let cmds = (opt |> List.map (BlastParams.makeCmdWith bcContext.Mount))
+        let tp = "blastp"::(cmds |> List.concat)
+
+        printfn "Starting process blastp\r\nparameters:"
+        cmds |> List.iter (fun op -> printfn "\t%s" (String.concat " " op))
+
+        async {
+                let! res = BioContainer.execAsync bcContext tp           
+                return res
+        }
+
+    let runBlastP (bcContext:BioContainer.BcContext) (opt:BlastParams list) = 
+        runBlastPAsync bcContext opt
+        |> Async.RunSynchronously
+
+    let runBlastNAsync (bcContext:BioContainer.BcContext) (opt:BlastParams list) = 
+        let cmds = (opt |> List.map (BlastParams.makeCmdWith bcContext.Mount))
+        let tp = "blastn"::(cmds |> List.concat)
+
+        printfn "Starting process blastn\r\nparameters:"
+        cmds |> List.iter (fun op -> printfn "\t%s" (String.concat " " op))
+
+        async {
+                let! res = BioContainer.execAsync bcContext tp           
+                return res
+ 
+        }
+
+    let runBlastN (bcContext:BioContainer.BcContext) (opt:BlastParams list) =
+        runBlastNAsync bcContext opt
+        |> Async.RunSynchronously
\ No newline at end of file
diff --git a/src/BioFSharp.BioTools/ClustalO.fs b/src/BioFSharp.BioTools/ClustalO.fs
new file mode 100644
index 00000000..423229d0
--- /dev/null
+++ b/src/BioFSharp.BioTools/ClustalO.fs
@@ -0,0 +1,401 @@
+namespace BioFSharp.BioTools
+
+module ClustalO =
+
+    open FSharpAux
+    open BioContainer
+
+//    SEQUENCE INPUT
+//        -i, --in, --infile={,-}
+//	        Multiple sequence input file (- for stdin)
+//
+//        --hmm-in=
+//	        HMM input files
+//
+//        --dealign
+//	        Dealign input sequences
+//
+//        --profile1, --p1=
+//	        Pre-aligned multiple sequence file (aligned columns will be kept fixed)
+//
+//        --profile2, --p2=
+//	        Pre-aligned multiple sequence file (aligned columns will be kept fixed)
+//
+//        --is-profile
+//	        disable check if profile, force profile (default no)
+//
+//        -t, --seqtype={Protein, RNA, DNA} 
+//	        Force a sequence type (default: auto)
+//
+//        --infmt={a2m=fa[sta],clu[stal],msf,phy[lip],selex,st[ockholm],vie[nna]} 
+//	        Forced sequence input file format (default: auto)
+
+
+    type FileFormat = 
+        ///FastA file format
+        | FastA
+        ///Clustal file format
+        | Clustal
+        ///MSF file format
+        | MSF
+        ///Phylip file format
+        | Phylip
+        ///Selex file format
+        | Selex
+        ///Stockholm file format
+        | Stockholm
+        ///Vienna file format
+        | Vienna
+        static member make = function
+            | FastA     -> "fa"
+            | Clustal   -> "clu"
+            | MSF       -> "msf"
+            | Phylip    -> "phy"
+            | Selex     -> "selex"
+            | Stockholm -> "st"
+            | Vienna    -> "vie"
+
+    ///Types of sequences
+    type SeqType = 
+        | Protein
+        | DNA
+        | RNA
+
+        static member make = function
+            | Protein   -> "--seqtype=Protein"
+            | RNA       -> "--seqtype=RNA"
+            | DNA       -> "--seqtype=DNA"
+
+    type InputCustom =
+        ///Forced sequence input file format (default: auto)
+        | Format of FileFormat
+        ///Dealign input sequences
+        | Dealign
+        ///Disable check if profile, force profile (default no)
+        | IsProfile
+        ///Force a sequence type (default: auto)
+        | SeqType of SeqType    
+    
+        static member make = function
+            | Format f  -> sprintf "--infmt=%s" (FileFormat.make f)
+            | Dealign   -> "--dealign "
+            | IsProfile -> "--is-profile "
+            | SeqType s -> SeqType.make s
+
+    ///Specify the type of input and assign file path
+    type FileInput = 
+        ///Use this option to make a multiple alignment from a set of sequences. A sequence file must contain more than one sequence (at least two sequences).
+        | SequenceFile of string 
+        ///Use this option to align two alignments (profiles) together.
+        | TwoProfiles of string * string 
+        /// Use this option to add new sequences to an existing alignment.
+        | SequenceFileAndProfile of string * string
+        /// Use this option to make a new multiple alignment of sequences from the input file and use the HMM as a guide (EPA).
+        | SequenceFileAndHMM of string * string
+    
+        static member make = function
+            | SequenceFile path                     -> ["-i"; path]
+            | TwoProfiles (path1,path2)             -> [sprintf "--p1=%s" path1 ; sprintf"--p2=%s" path2]
+            | SequenceFileAndProfile (path1,path2)  -> ["-i"; path1; sprintf "--p1=%s " path2]
+            | SequenceFileAndHMM (path1,path2)      -> ["-i"; path1; sprintf "--hmm-in=%s" path2]
+
+        static member makeWith (m: MountInfo) = 
+            let cPath p = (MountInfo.containerPathOf m p)
+            function
+            | SequenceFile path                     -> ["-i"; (MountInfo.containerPathOf m path)]
+            | TwoProfiles (path1,path2)             -> [sprintf "--p1=%s" (cPath path1) ; sprintf "--p2=%s" (cPath path2)]
+            | SequenceFileAndProfile (path1,path2)  -> ["-i"; (cPath path1); sprintf "--p1=%s " (cPath path2)]
+            | SequenceFileAndHMM (path1,path2)      -> ["-i"; (cPath path1); sprintf "--hmm-in=%s" (cPath path2)]
+
+//    ALIGNMENT OUTPUT
+//  -o, --out, --outfile={file,-} 
+//	    Multiple sequence alignment output file (default: stdout)
+//
+//  --outfmt={a2m=fa[sta],clu[stal],msf,phy[lip],selex,st[ockholm],vie[nna]} 
+//	    MSA output file format (default: fasta)
+//
+//  --residuenumber, --resno  
+//	    in Clustal format print residue numbers (default no)
+//
+//  --wrap=  
+//	    number of residues before line-wrap in output
+//
+//  --output-order={input-order,tree-order} 
+//	    MSA output order like in input/guide-tree
+
+    ///Optional modifiers for input
+    type OutputCustom =
+        ///	MSA output file format (default: fasta)
+        | Format of FileFormat
+        ///	in Clustal format print residue numbers (default no)
+        | ResidueNumber 
+        ///	number of residues before line-wrap in output
+        | Wrap of int
+        /// Aligned sequences are ordered according to guide tree instead of input order
+        | OutputOrderAsTree
+
+        static member make = function
+            | Format f -> sprintf "--outfmt=%s" (FileFormat.make f)
+            | ResidueNumber -> "--residuenumber"
+            | Wrap i -> sprintf "--wrap=%i" i
+            | OutputOrderAsTree -> "--output-order=tree-order"
+
+
+        
+
+    ///Collection of parameters for specifying clustalo alignment
+//    CLUSTERING
+//  --distmat-in=
+//	    Pairwise distance matrix input file (skips distance computation)
+//
+//  --distmat-out=
+//	    Pairwise distance matrix output file
+//
+//  --guidetree-in=
+//	    Guide tree input file
+//	    (skips distance computation and guide tree clustering step)
+//
+//  --guidetree-out=
+//	    Guide tree output file
+//
+//  --full
+//	    Use full distance matrix for guide-tree calculation (slow; mBed is default)
+//
+//  --full-iter
+//	    Use full distance matrix for guide-tree calculation during iteration (mBed is default)
+//
+//  --cluster-size=        
+//	    soft maximum of sequences in sub-clusters
+//
+//  --clustering-out=   
+//	    Clustering output file
+//
+//  --use-kimura
+//      use Kimura distance correction for aligned sequences (default no)
+//
+//  --percent-id
+//	    convert distances into percent identities (default no)
+//
+
+    ///Optional modifiers to specify clustering
+    type ClusteringCustom =
+        ///Pairwise distance matrix input file (skips distance computation)
+        | DistanceMatrixInput of string
+        ///Pairwise distance matrix output file
+        | DistanceMatrixOutput of string
+        ///Guide tree input file (skips distance computation and guide tree clustering step)
+        | GuideTreeInput of string
+        ///Guide tree output file
+        | GuideTreeOutput of string
+        ///Use full distance matrix for guide-tree calculation (slow; mBed is default)
+        | Full
+        ///Use full distance matrix for guide-tree calculation during iteration (mBed is default)
+        | FullIter
+        /// Soft maximum of sequences in sub-clusters
+        | ClusterSize of int
+        ///	Clustering output file
+        | ClusteringOut of string
+        /// Use Kimura distance correction for aligned sequences (default no)
+        | UseKimura
+        /// convert distances into percent identities (default no)
+        | PercentID
+    
+        static member make = function
+            | DistanceMatrixInput path  -> [sprintf "--distmat-in=%s" path      ]
+            | DistanceMatrixOutput path -> [sprintf "--distmat-out=%s" path     ]
+            | GuideTreeInput path       -> [sprintf "--guidetree-in=%s" path    ]
+            | GuideTreeOutput path      -> [sprintf "--guidetree-out=%s" path   ]
+            | Full                      -> ["--full"                            ]
+            | FullIter                  -> ["--full-iter"                       ]
+            | ClusterSize i             -> [sprintf "--cluster-size=%i" i       ]
+            | ClusteringOut path        -> [sprintf "--clustering-out=%s" path  ]
+            | UseKimura                 -> ["--use-kimura"                      ]
+            | PercentID                 -> ["--percent-id"                      ]
+
+        static member makeWith (m: MountInfo) = 
+            let cPath p = (MountInfo.containerPathOf m p)
+            function
+            | DistanceMatrixInput path  -> [sprintf "--distmat-in=%s" (cPath path)      ]
+            | DistanceMatrixOutput path -> [sprintf "--distmat-out=%s" (cPath path)     ]
+            | GuideTreeInput path       -> [sprintf "--guidetree-in=%s" (cPath path)    ]
+            | GuideTreeOutput path      -> [sprintf "--guidetree-out=%s" (cPath path)   ]
+            | Full                      -> ["--full"]
+            | FullIter                  -> ["--full-iter"]
+            | ClusterSize i             -> [sprintf "--cluster-size=%i" i]
+            | ClusteringOut path        -> [sprintf "--clustering-out=%s" (cPath path)  ]
+            | UseKimura                 -> ["--use-kimura"]
+            | PercentID                 -> ["--percent-id"]
+
+//ITERATION:
+//
+//  --iterations, --iter=  Number of (combined guide tree/HMM) iterations
+//
+//  --max-guidetree-iterations= Maximum guide tree iterations
+//
+//  --max-hmm-iterations=  Maximum number of HMM iterations
+
+    ///Specify maximum number of iterations for given step
+    type IterationCustom =
+        /// Number of (combined guide tree/HMM) iterations
+        | Iterations of int
+        /// Maximum guide tree iterations
+        | MaxGuideTreeIterations of int
+        ///  Maximum number of HMM iterations
+        | MaxHMMIterations of int
+    
+        static member make = function
+            | Iterations i              -> [sprintf "--iter=%i" i]
+            | MaxGuideTreeIterations i  -> [sprintf "--max-guidetree-iterations=%i" i]
+            | MaxHMMIterations i        -> [sprintf "--max-hmm-iterations=%i" i]
+
+
+//LIMITS (will exit early, if exceeded):
+//
+//  --maxnumseq=           Maximum allowed number of sequences
+//
+//  --maxseqlen=           Maximum allowed sequence length
+    /// Will exit early, if exceeded
+    type LimitsCustom =
+        /// Maximum allowed number of sequences
+        | MaxSeqNumber of int
+        /// Maximum allowed sequence length
+        | MaxSeqLength of int
+    
+        static member make = function
+            | MaxSeqNumber i -> [sprintf "--maxnumseq=%i" i]
+            | MaxSeqLength i -> [sprintf "--maxseqlen=%i" i]
+
+
+//MISCELLANEOUS:
+//
+//  --auto                    Set options automatically (might overwrite some of your options)
+//
+//  --threads=             Number of processors to use
+//
+//  -l, --log=          Log all non-essential output to this file
+//
+//  -h, --help                Print help and exit
+//
+//  -v, --verbose             Verbose output (increases if given multiple times)
+//
+//  --version                 Print version information and exit
+//
+//  --long-version            Print long version information and exit
+//
+//  --force                   Force file overwriting
+
+    ///Optional, miscallaneous modifiers 
+    type MiscellaneousCustom =
+        /// Set options automatically (might overwrite some of your options)
+        | Auto
+        /// Number of processors to use
+        | Threads of int
+        /// Log all non-essential output to this file
+        | Log of string
+        /// Print help and exit
+        //| Help
+        /// Verbose output (ranging from 0 [nonverbose,standard] to 3 [very verbose,everything above 3 is set to 3])
+        | VerboseLevel of int
+        /// Print version information and exit
+        | Version
+        /// Print long version information and exit
+        | LongVersion
+        /// Force file overwriting
+        | Force
+
+        static member make = function
+            | Auto          -> ["--auto"]
+            | Threads i     -> [sprintf "--threads=%i" i]
+            | Log s         -> [sprintf "--log=%s" s]
+            //| Help -> "--help "
+            | VerboseLevel i-> 
+                if i > 0 && i < 4 then
+                    [for n = 0 to i-1 do yield "-v"]
+                elif i > 3 then
+                    ["-v"; "-v"; "-v"]
+                else
+                    []
+            | Version       -> ["--version"]
+            | LongVersion   -> ["--long-version"]
+            | Force         -> ["--force"]
+
+        static member makeWith (m: MountInfo) = 
+            let cPath p = (MountInfo.containerPathOf m p)
+            function
+            | Auto          -> ["--auto"]
+            | Threads i     -> [sprintf "--threads=%i" i]
+            | Log path      -> [sprintf "--log=%s" (cPath path)]
+            //| Help -> "--help "
+            | VerboseLevel i-> 
+                if i > 0 then
+                    [for n = 0 to i-1 do yield "-v"]
+                else
+                    []
+            | Version       -> ["--version"]
+            | LongVersion   -> ["--long-version"]
+            | Force         -> ["--force"]
+
+    type ClustalOParams = 
+        /// Specify input parameters
+        | Input         of FileInput * InputCustom list
+        /// Specify output parameters
+        | Output        of string * OutputCustom list
+        /// Specify clustering parameters
+        | Clustering    of ClusteringCustom list
+        /// Specify iteration parameters
+        | Iteration     of IterationCustom list
+        /// Specify limits parameters
+        | Limits        of LimitsCustom list
+        /// Specify miscallaneous parameters
+        | Miscellaneous of MiscellaneousCustom list
+
+        static member makeCmd = function
+            | Input (i,p)       ->  let tmp = 
+                                        p 
+                                        |> List.map InputCustom.make
+                                    (FileInput.make i)@tmp
+            | Output (o,p)      ->  let tmp = 
+                                        p 
+                                        |> List.map OutputCustom.make
+                                    ["-o"; o]@tmp
+            | Clustering cl     -> cl   |> List.map ClusteringCustom.make   |> List.concat
+            | Iteration it      -> it   |> List.map IterationCustom.make    |> List.concat
+            | Limits l          -> l    |> List.map LimitsCustom.make       |> List.concat
+            | Miscellaneous misc-> misc |> List.map MiscellaneousCustom.make|> List.concat
+
+
+        static member makeCmdWith (m: MountInfo) = 
+            let cPath p = (MountInfo.containerPathOf m p)
+            function
+            | Input (i,p)       ->  let tmp = 
+                                        p 
+                                        |> List.map InputCustom.make
+                                    (FileInput.makeWith m i)@tmp
+            | Output (o,p)      ->  let tmp = 
+                                        p 
+                                        |> List.map OutputCustom.make
+                                    ["-o"; (cPath o)]@tmp
+            | Clustering cl     -> cl   |> List.map (ClusteringCustom.makeWith m)   |> List.concat
+            | Iteration it      -> it   |> List.map IterationCustom.make            |> List.concat
+            | Limits l          -> l    |> List.map LimitsCustom.make               |> List.concat
+            | Miscellaneous misc-> misc |> List.map (MiscellaneousCustom.makeWith m)|> List.concat
+
+
+    let runClustalOAsync (bcContext:BioContainer.BcContext) (opt:ClustalOParams list) = 
+
+        let cmds = (opt |> List.map (ClustalOParams.makeCmdWith bcContext.Mount))
+        let tp = "clustalo"::(cmds |> List.concat)
+
+        printfn "Starting process clustalo\r\nparameters:"
+        cmds |> List.iter (fun op -> printfn "\t%s" (String.concat " " op))
+
+        async {
+                let! res = BioContainer.execAsync bcContext tp           
+                return res
+        }
+
+    let runClustalO (bcContext:BioContainer.BcContext) (opt:ClustalOParams list) = 
+        runClustalOAsync bcContext opt
+        |> Async.RunSynchronously
+
+
diff --git a/src/BioFSharp.BioTools/Docker.fs b/src/BioFSharp.BioTools/Docker.fs
new file mode 100644
index 00000000..818341ac
--- /dev/null
+++ b/src/BioFSharp.BioTools/Docker.fs
@@ -0,0 +1,945 @@
+namespace BioFSharp.BioTools
+
+open System
+open System.Threading
+open System.Collections.Generic
+open Docker.DotNet
+open Docker.DotNet.Models
+
+
+// https://docs.docker.com/engine/api/v1.24/
+
+/// Docker helper
+module Docker =
+
+
+    /// Dockerfile > (Build) > Image > (Create/Run) > Container <- start/stop
+    type DockerId =         
+        | ImageId of string         
+        | ImageName of string         
+        | ContainerId of string        
+        | ContainerName of string
+        | Tag of string*string
+    
+        override this.ToString() =
+            match this with
+            | ImageId  s      -> s
+            | ImageName  s    -> s
+            | ContainerId  s  -> s
+            | ContainerName s -> s
+            | Tag (s,t)       -> sprintf "%s:%s" s t
+
+    /// Dockerfile > (Build) > Image > (Create/Run) > Container <- start/stop
+    type ContainerStatus =         
+        | Created
+        | Restarting
+        | Running
+        | Paused
+        | Exited
+        | Dead
+    
+    
+        override this.ToString() =
+            match this with
+            | Created       -> "created"    
+            | Restarting    -> "restarting"
+            | Running       -> "running"    
+            | Paused        -> "paused"     
+            | Exited        -> "exited"     
+            | Dead          -> "dead"       
+
+
+    /// Provides a set of static methods for creating Image parameter.
+    type Filters =
+    
+        static member private DicOfSeq (s:('k * 'v) seq) = new Dictionary<'k,'v>(s |> Map.ofSeq) :> IDictionary<'k,'v>
+
+        /// Creates ImagesListParameters for pre-filtering list function
+        static member InitContainerFilters
+            (
+                ?Ancestor:DockerId,
+                ?Status:ContainerStatus,
+                // Containers that exited with status code
+                ?Exited:int,                    
+                ?Label,                
+                ?Isolation,
+                ?Before:DockerId,
+                ?Since:DockerId,
+                ?Volume,
+                ?Network
+            ) = 
+
+            let filter = System.Collections.Generic.Dictionary()            
+            Ancestor            |> Option.iter (fun nv -> filter.Add( "ancestor", [nv.ToString(),true] |> Filters.DicOfSeq) )
+            Status              |> Option.iter (fun nv -> filter.Add( "status", [nv.ToString(),true] |> Filters.DicOfSeq) )
+            Exited              |> Option.iter (fun nv -> filter.Add( "exited", [nv.ToString(),true] |> Filters.DicOfSeq) )                   
+            Label               |> Option.iter (fun nv -> filter.Add( "label", [nv.ToString(),true] |> Filters.DicOfSeq) )            
+            Isolation           |> Option.iter (fun nv -> filter.Add( "isolation", [nv.ToString(),true] |> Filters.DicOfSeq) )
+            Before              |> Option.iter (fun nv -> filter.Add( "before", [nv.ToString(),true] |> Filters.DicOfSeq) )
+            Since               |> Option.iter (fun nv -> filter.Add( "since", [nv.ToString(),true] |> Filters.DicOfSeq) )
+            Volume              |> Option.iter (fun nv -> filter.Add( "volume", [nv.ToString(),true] |> Filters.DicOfSeq) )
+            Network             |> Option.iter (fun nv -> filter.Add( "network", [nv.ToString(),true] |> Filters.DicOfSeq) )                
+        
+            filter
+
+    
+    /// Connect to docker engine (docker deamon)
+    let connect str =
+        (new DockerClientConfiguration(new Uri(str)) ).CreateClient()
+    
+    module Image = 
+        
+        
+        /// Provides a set of static methods for creating Image parameter.
+        type ImagesParams =
+
+            /// Creates ImagesListParameters for pre-filtering list function
+            static member InitImagesListParameters
+                (
+                    ?All,
+                    ?Filters,
+                    ?MatchName
+                ) = 
+
+                let param = new ImagesListParameters()                
+                All       |> Option.iter (fun nv -> param.set_All (Nullable(nv)))
+                Filters   |> Option.iter param.set_Filters
+                MatchName |> Option.iter param.set_MatchName
+                
+                param
+
+            /// Creates ImagesCreateParameters
+            static member InitImagesCreateParameters
+                (
+                    ?FromImage,
+                    ?FromSrc,
+                    ?Repo,
+                    ?Tag
+                ) = 
+                
+                let param = new ImagesCreateParameters()                                
+                FromImage |> Option.iter param.set_FromImage
+                FromSrc   |> Option.iter param.set_FromSrc
+                Repo      |> Option.iter param.set_Repo
+                Tag       |> Option.iter param.set_Tag
+
+                param
+
+            ///// Creates ImageBuildParameters
+            //static member InitImageBuildParameters
+            //    (
+            //        ?FromImage,
+            //        ?FromSrc,
+            //        ?Repo,
+            //        ?Tag
+            //    ) = 
+                
+            //    let param = new ImageBuildParameters()    
+                
+            //    FromImage |> Option.iter param.set_FromImage
+            //    FromSrc   |> Option.iter param.set_FromSrc
+            //    Repo      |> Option.iter param.set_Repo
+            //    Tag       |> Option.iter param.set_Tag
+
+            //    param        
+        
+        /// Lists available images with ImagesListParameters for filtering (async)
+        let listImagesWithAsync (connection:DockerClient) (param:ImagesListParameters) =
+            //  ImagesListParameters are only for filtering
+            async {                
+                let! tmp = 
+                    connection.Images. ListImagesAsync(param)              
+                    |> Async.AwaitTask                    
+                return (tmp |> Seq.map id)
+                }
+
+
+        /// Lists all available images (async)
+        let listImagesAsync (connection:DockerClient) =
+            listImagesWithAsync connection (ImagesParams.InitImagesListParameters())
+        
+        
+        /// Lists all available images
+        let listImages (connection:DockerClient) =
+            listImagesAsync connection
+            |> Async.RunSynchronously      
+
+
+        /// Returns true if an image matches the dockerid (async) 
+        let existsAsync (connection:DockerClient) (dockerid:DockerId) =
+            let param = ImagesParams.InitImagesListParameters(MatchName=dockerid.ToString())
+            async {
+                let! tmp = listImagesWithAsync connection param                
+                 
+                return (Seq.length tmp > 0)
+                }
+        
+        
+        /// Returns true if an image matches the name 
+        let exists (connection:DockerClient) (dockerid:DockerId) =
+            existsAsync connection dockerid
+            |> Async.RunSynchronously 
+
+        /// Lists available images with ImagesListParameters for filtering (async)
+        let buildImageFromDockerfileAsync (connection:DockerClient) (param) (stream) =
+            //  ImagesListParameters are only for filtering
+            async {                
+                let! stream = 
+                    connection.Images.BuildImageFromDockerfileAsync(stream,param)              
+                    |> Async.AwaitTask                    
+                return stream
+                }
+    
+    //#####################################################
+    //#####################################################
+    //#####################################################
+
+            
+    module Container =        
+        
+ 
+        /// Provides a set of static methods for creating Image parameter.
+        type ContainerParams =
+
+            /// Creates ContainersListParameters for pre-filtering list function
+            static member InitContainerListParameters
+                (
+                    ?All,
+                    ?Before,                    
+                    ?Filters,
+                    ?Limit,
+                    ?Since,
+                    ?Size
+                ) = 
+
+                let param = new ContainersListParameters()                
+                All       |> Option.iter (fun nv -> param.set_All (Nullable(nv)))
+                Before    |> Option.iter param.set_Before
+                Filters   |> Option.iter param.set_Filters
+                Limit     |> Option.iter (fun nv -> param.set_Limit (Nullable(nv)))
+                Since     |> Option.iter param.set_Since
+                Size      |> Option.iter (fun nv -> param.set_Size (Nullable(nv)))
+                
+                param
+
+            /// Creates ContainersListParameters for pre-filtering list function
+            static member InitContainerListParameters
+                (
+                    ?WaitBeforeKillSeconds
+                ) = 
+
+                let param = new ContainerStopParameters()
+                
+                WaitBeforeKillSeconds       |> Option.iter (fun nv -> param.set_WaitBeforeKillSeconds (Nullable(nv)))                
+                
+                param
+
+            /// Creates ContainerAttachParameters
+            static member InitContainerAttachParameters
+                (
+                    ?DetachKeys,
+                    ?Logs,
+                    ?Stderr,
+                    ?Stdin,
+                    ?Stdout,                    
+                    ?Stream
+                ) = 
+                
+                let param = new ContainerAttachParameters()                                
+                DetachKeys |> Option.iter param.set_DetachKeys
+                Logs       |> Option.iter param.set_Logs
+                Stderr     |> Option.iter (fun v -> param.set_Stderr (Nullable(v) ) )
+                Stdin      |> Option.iter (fun v -> param.set_Stdin  (Nullable(v) ) )
+                Stdout     |> Option.iter (fun v -> param.set_Stdout (Nullable(v) ) )                
+                Stream     |> Option.iter (fun v -> param.set_Stream (Nullable(v) ) )
+
+                param
+
+            /// Creates ContainerExecStartParameters
+            static member InitContainerExecStartParameters
+                (
+                    
+                    ?AttachStderr,
+                    ?AttachStdin,
+                    ?AttachStdout,                    
+                    ?Cmd:seq,
+                    ?Detach,
+                    ?DetachKeys,
+                    ?Env:seq,
+                    ?Privileged,
+                    ?Tty,
+                    ?User
+                ) = 
+                
+                let param = new ContainerExecStartParameters()                                
+                
+                AttachStderr     |> Option.iter param.set_AttachStderr
+                AttachStdin      |> Option.iter param.set_AttachStdin 
+                AttachStdout     |> Option.iter param.set_AttachStdout
+                Cmd              |> Option.iter (fun v -> param.set_Cmd (Collections.Generic.List(v)) )   
+                Detach           |> Option.iter param.set_Detach
+                DetachKeys       |> Option.iter param.set_DetachKeys
+                Env              |> Option.iter (fun v -> param.set_Env (Collections.Generic.List(v)) )   
+                Privileged       |> Option.iter param.set_Privileged
+                Tty              |> Option.iter param.set_Tty
+                User             |> Option.iter param.set_User
+
+                param
+
+            /// Creates ContainerExecStartParameters
+            static member InitContainerExecCreateParameters
+                (
+                    
+                    ?AttachStderr,
+                    ?AttachStdin,
+                    ?AttachStdout,                    
+                    ?Cmd:seq,
+                    ?Detach,
+                    ?DetachKeys,
+                    ?Env:seq,
+                    ?Privileged,
+                    ?Tty,
+                    ?User
+                ) = 
+                
+                let param = new ContainerExecCreateParameters()                                
+                
+                AttachStderr     |> Option.iter param.set_AttachStderr
+                AttachStdin      |> Option.iter param.set_AttachStdin 
+                AttachStdout     |> Option.iter param.set_AttachStdout
+                Cmd              |> Option.iter (fun v -> param.set_Cmd (Collections.Generic.List(v)) )   
+                Detach           |> Option.iter param.set_Detach
+                DetachKeys       |> Option.iter param.set_DetachKeys
+                Env              |> Option.iter (fun v -> param.set_Env (Collections.Generic.List(v)) )   
+                Privileged       |> Option.iter param.set_Privileged
+                Tty              |> Option.iter param.set_Tty
+                User             |> Option.iter param.set_User
+
+                param
+
+            /// Creates BindOptions
+            static member InitBindOptions
+                (
+                    ?Propagation
+                ) = 
+                
+                let param = new BindOptions()                                
+                Propagation |> Option.iter param.set_Propagation         
+
+                param
+
+            /// Creates VolumeOptions
+            static member InitVolumeOptions
+                (
+                    ?DriverConfig,
+                    ?Labels,
+                    ?NoCopy
+                ) = 
+                
+                let param = new VolumeOptions()                                
+                DriverConfig |> Option.iter param.set_DriverConfig         
+                Labels       |> Option.iter param.set_Labels
+                NoCopy       |> Option.iter param.set_NoCopy
+                
+                param
+
+            /// Creates TmpfsOptions
+            static member InitTmpfsOptions
+                (
+                    ?Mode,
+                    ?SizeBytes
+                ) = 
+                
+                let param = new TmpfsOptions()                                
+                Mode      |> Option.iter param.set_Mode         
+                SizeBytes |> Option.iter param.set_SizeBytes
+                
+                param
+
+            /// Creates Mount
+            static member InitMount
+                (
+                    ?Type          ,
+                    ?Source        ,
+                    ?Target        ,
+                    ?ReadOnly      ,
+                    ?BindOptions   ,
+                    ?VolumeOptions ,
+                    ?TmpfsOptions
+                ) = 
+                
+                let param = new Mount()                                
+                
+                Type          |> Option.iter param.set_Type         
+                Source        |> Option.iter param.set_Source       
+                Target        |> Option.iter param.set_Target       
+                ReadOnly      |> Option.iter param.set_ReadOnly     
+                BindOptions   |> Option.iter param.set_BindOptions  
+                VolumeOptions |> Option.iter param.set_VolumeOptions
+                TmpfsOptions  |> Option.iter param.set_TmpfsOptions 
+
+                param
+
+            /// Creates HostConfig
+            static member InitHostConfig
+                (
+                    ?ContainerIDFile     ,                        
+                    ?LogConfig           ,                  
+                    ?NetworkMode         ,                    
+                    ?PortBindings        ,                     
+                    ?RestartPolicy       ,                      
+                    ?AutoRemove          ,                   
+                    ?VolumeDriver        ,                     
+                    ?VolumesFrom         : seq,                    
+                    ?CapAdd              ,               
+                    ?CapDrop             : seq,                
+                    ?DNS                 : seq,            
+                    ?DNSOptions          : seq,                   
+                    ?DNSSearch           : seq,                  
+                    ?ExtraHosts          : seq,                   
+                    ?GroupAdd            : seq,                 
+                    ?IpcMode             ,                
+                    ?Cgroup              ,               
+                    ?Links               : seq,              
+                    ?OomScoreAdj         ,                    
+                    ?PidMode             ,                
+                    ?Privileged          ,                   
+                    ?PublishAllPorts     ,                        
+                    ?ReadonlyRootfs      ,                       
+                    ?SecurityOpt         : seq,                    
+                    ?StorageOpt          ,                   
+                    ?Tmpfs               ,              
+                    ?UTSMode             ,                
+                    ?UsernsMode          ,                   
+                    ?ShmSize             ,                
+                    ?Sysctls             ,                
+                    ?Runtime             ,                
+                    ?ConsoleSize         ,                    
+                    ?Isolation           ,                  
+                    ?CPUShares           ,                  
+                    ?Memory              ,               
+                    ?NanoCPUs            ,                 
+                    ?CgroupParent        ,                     
+                    ?BlkioWeight         ,                    
+                    ?BlkioWeightDevice   : seq<_>,                          
+                    ?BlkioDeviceReadBps  : seq<_>,                           
+                    ?BlkioDeviceWriteBps : seq<_>,                            
+                    ?BlkioDeviceReadIOps : seq<_>,                            
+                    ?BlkioDeviceWriteIOps: seq<_>,                             
+                    ?CPUPeriod           ,                  
+                    ?CPUQuota            ,                 
+                    ?CPURealtimePeriod   ,                          
+                    ?CPURealtimeRuntime  ,                           
+                    ?CpusetCpus          ,                   
+                    ?CpusetMems          ,                   
+                    ?Devices             : seq<_>,                
+                    ?DiskQuota           ,                  
+                    ?KernelMemory        ,                     
+                    ?MemoryReservation   ,                          
+                    ?MemorySwap          ,                   
+                    ?MemorySwappiness    ,                         
+                    ?OomKillDisable      ,                       
+                    ?PidsLimit           ,                  
+                    ?Ulimits             : seq,                
+                    ?CPUCount            ,                 
+                    ?CPUPercent          ,                   
+                    ?IOMaximumIOps       ,                      
+                    ?IOMaximumBandwidth  ,                           
+                    ?Mounts              : seq,               
+                    ?Init                ,             
+                    ?InitPath            
+                ) = 
+                
+                let param = new HostConfig()                                
+                
+                ContainerIDFile      |> Option.iter param.set_ContainerIDFile     
+                LogConfig            |> Option.iter param.set_LogConfig           
+                NetworkMode          |> Option.iter param.set_NetworkMode         
+                PortBindings         |> Option.iter param.set_PortBindings        
+                RestartPolicy        |> Option.iter param.set_RestartPolicy       
+                AutoRemove           |> Option.iter param.set_AutoRemove          
+                VolumeDriver         |> Option.iter param.set_VolumeDriver        
+                VolumesFrom          |> Option.iter (fun v -> param.set_VolumesFrom (Collections.Generic.List(v)) )        
+                CapAdd               |> Option.iter param.set_CapAdd              
+                CapDrop              |> Option.iter (fun v -> param.set_CapDrop (Collections.Generic.List(v)) )            
+                DNS                  |> Option.iter (fun v -> param.set_DNS (Collections.Generic.List(v)) )                
+                DNSOptions           |> Option.iter (fun v -> param.set_DNSOptions (Collections.Generic.List(v)) )         
+                DNSSearch            |> Option.iter (fun v -> param.set_DNSSearch (Collections.Generic.List(v)) )
+                ExtraHosts           |> Option.iter (fun v -> param.set_ExtraHosts (Collections.Generic.List(v)) )         
+                GroupAdd             |> Option.iter (fun v -> param.set_GroupAdd (Collections.Generic.List(v)) )
+                IpcMode              |> Option.iter param.set_IpcMode           
+                Cgroup               |> Option.iter param.set_Cgroup              
+                Links                |> Option.iter (fun v -> param.set_Links (Collections.Generic.List(v)) )               
+                OomScoreAdj          |> Option.iter param.set_OomScoreAdj         
+                PidMode              |> Option.iter param.set_PidMode             
+                Privileged           |> Option.iter param.set_Privileged          
+                PublishAllPorts      |> Option.iter param.set_PublishAllPorts     
+                ReadonlyRootfs       |> Option.iter param.set_ReadonlyRootfs      
+                SecurityOpt          |> Option.iter (fun v -> param.set_SecurityOpt (Collections.Generic.List(v)) )         
+                StorageOpt           |> Option.iter param.set_StorageOpt          
+                Tmpfs                |> Option.iter param.set_Tmpfs               
+                UTSMode              |> Option.iter param.set_UTSMode             
+                UsernsMode           |> Option.iter param.set_UsernsMode          
+                ShmSize              |> Option.iter param.set_ShmSize             
+                Sysctls              |> Option.iter param.set_Sysctls             
+                Runtime              |> Option.iter param.set_Runtime             
+                ConsoleSize          |> Option.iter param.set_ConsoleSize         
+                Isolation            |> Option.iter param.set_Isolation           
+                CPUShares            |> Option.iter param.set_CPUShares           
+                Memory               |> Option.iter param.set_Memory              
+                NanoCPUs             |> Option.iter param.set_NanoCPUs            
+                CgroupParent         |> Option.iter param.set_CgroupParent        
+                BlkioWeight          |> Option.iter param.set_BlkioWeight         
+                BlkioWeightDevice    |> Option.iter (fun v -> param.set_BlkioWeightDevice  (Collections.Generic.List(v)) )  
+                BlkioDeviceReadBps   |> Option.iter (fun v -> param.set_BlkioDeviceReadBps (Collections.Generic.List(v)) ) 
+                BlkioDeviceWriteBps  |> Option.iter (fun v -> param.set_BlkioDeviceWriteBps (Collections.Generic.List(v)) )
+                BlkioDeviceReadIOps  |> Option.iter (fun v -> param.set_BlkioDeviceReadIOps (Collections.Generic.List(v)) )
+                BlkioDeviceWriteIOps |> Option.iter (fun v -> param.set_BlkioDeviceWriteIOps (Collections.Generic.List(v)) )
+                CPUPeriod            |> Option.iter param.set_CPUPeriod           
+                CPUQuota             |> Option.iter param.set_CPUQuota            
+                CPURealtimePeriod    |> Option.iter param.set_CPURealtimePeriod   
+                CPURealtimeRuntime   |> Option.iter param.set_CPURealtimeRuntime  
+                CpusetCpus           |> Option.iter param.set_CpusetCpus          
+                CpusetMems           |> Option.iter param.set_CpusetMems          
+                Devices              |> Option.iter (fun v -> param.set_Devices (Collections.Generic.List(v)) )            
+                DiskQuota            |> Option.iter param.set_DiskQuota          
+                KernelMemory         |> Option.iter param.set_KernelMemory        
+                MemoryReservation    |> Option.iter param.set_MemoryReservation   
+                MemorySwap           |> Option.iter param.set_MemorySwap          
+                MemorySwappiness     |> Option.iter (fun v -> param.set_MemorySwappiness (Nullable(v) ) )    
+                OomKillDisable       |> Option.iter (fun v -> param.set_OomKillDisable (Nullable(v) ) )     
+                PidsLimit            |> Option.iter param.set_PidsLimit           
+                Ulimits              |> Option.iter (fun v -> param.set_Ulimits (Collections.Generic.List(v)) )            
+                CPUCount             |> Option.iter param.set_CPUCount            
+                CPUPercent           |> Option.iter param.set_CPUPercent          
+                IOMaximumIOps        |> Option.iter param.set_IOMaximumIOps       
+                IOMaximumBandwidth   |> Option.iter param.set_IOMaximumBandwidth  
+                Mounts               |> Option.iter (fun v -> param.set_Mounts (Collections.Generic.List(v)) )             
+                Init                 |> Option.iter (fun v -> param.set_Init (Nullable(v) ) )                
+                InitPath             |> Option.iter param.set_InitPath 
+
+                param
+
+            /// Creates CreateContainerParameters
+            static member InitCreateContainerParameters
+                (
+                    ?Hostname,
+                    ?HostConfig,
+                    ?Domainname,
+                    ?User,
+                    ?AttachStdin,
+                    ?AttachStdout,
+                    ?AttachStderr,
+                    ?ExposedPorts,
+                    ?Tty,
+                    ?OpenStdin,
+                    ?StdinOnce,
+                    ?Env : seq,
+                    ?Cmd : seq,
+                    ?Healthcheck,
+                    ?ArgsEscaped,
+                    ?Image,
+                    ?Volumes,
+                    ?WorkingDir,
+                    ?Entrypoint : seq,
+                    ?NetworkDisabled,
+                    ?MacAddress,
+                    ?OnBuild : seq,
+                    ?Labels,
+                    ?StopSignal,
+                    ?StopTimeout,
+                    ?Shell : seq
+                ) = 
+                
+                let param = new CreateContainerParameters()
+                
+                Hostname         |> Option.iter param.set_Hostname       
+                HostConfig       |> Option.iter param.set_HostConfig
+                Domainname       |> Option.iter param.set_Domainname     
+                User             |> Option.iter param.set_User           
+                AttachStdin      |> Option.iter param.set_AttachStdin    
+                AttachStdout     |> Option.iter param.set_AttachStdout   
+                AttachStderr     |> Option.iter param.set_AttachStderr   
+                ExposedPorts     |> Option.iter param.set_ExposedPorts   
+                Tty              |> Option.iter param.set_Tty            
+                OpenStdin        |> Option.iter param.set_OpenStdin      
+                StdinOnce        |> Option.iter param.set_StdinOnce      
+                Env              |> Option.iter (fun v -> param.set_Env (Collections.Generic.List(v)) )            
+                Cmd              |> Option.iter (fun v -> param.set_Cmd (Collections.Generic.List(v)) )                
+                Healthcheck      |> Option.iter param.set_Healthcheck    
+                ArgsEscaped      |> Option.iter param.set_ArgsEscaped    
+                Image            |> Option.iter param.set_Image          
+                Volumes          |> Option.iter param.set_Volumes        
+                WorkingDir       |> Option.iter param.set_WorkingDir     
+                Entrypoint       |> Option.iter (fun v -> param.set_Entrypoint (Collections.Generic.List(v)) )      
+                NetworkDisabled  |> Option.iter param.set_NetworkDisabled
+                MacAddress       |> Option.iter param.set_MacAddress     
+                OnBuild          |> Option.iter (fun v -> param.set_OnBuild (Collections.Generic.List(v)) )         
+                Labels           |> Option.iter param.set_Labels         
+                StopSignal       |> Option.iter param.set_StopSignal     
+                StopTimeout      |> Option.iter (fun v -> param.set_StopTimeout (Nullable(v) ) )   
+                Shell            |> Option.iter (fun v -> param.set_Shell (Collections.Generic.List(v)) )  
+                
+                param
+
+                
+            /// Creates ContainerStartParameters
+            static member InitContainerStartParameters
+                (                    
+                    ?DetachKeys 
+                ) = 
+                
+                let param = new ContainerStartParameters()                
+                DetachKeys         |> Option.iter param.set_DetachKeys       
+
+                param
+
+
+            /// Creates ContainerLogsParameters
+            static member InitContainerLogsParameters
+                (                    
+                    ?Follow,
+                    ?ShowStderr,
+                    ?ShowStdout,
+                    ?Since,
+                    ?Tail,
+                    ?Timestamps
+                ) = 
+                
+                let param = new ContainerLogsParameters()
+                Follow         |> Option.iter (fun v -> param.set_Follow     (Nullable(v) ) )
+                ShowStderr     |> Option.iter (fun v -> param.set_ShowStderr (Nullable(v) ) )
+                ShowStdout     |> Option.iter (fun v -> param.set_ShowStdout (Nullable(v) ) )
+                Since          |> Option.iter param.set_Since
+                Tail           |> Option.iter param.set_Tail
+                Timestamps     |> Option.iter (fun v -> param.set_Timestamps (Nullable(v) ) )
+                                
+                param
+
+
+            /// Creates ContainerRemoveParameters
+            static member InitContainerRemoveParameters
+                (                    
+                    ?Force,
+                    ?RemoveLinks,
+                    ?RemoveVolumes
+                ) = 
+                
+                let param = new ContainerRemoveParameters ()                
+                Force         |> Option.iter (fun v -> param.set_Force         (Nullable(v) ) )
+                RemoveLinks   |> Option.iter (fun v -> param.set_RemoveLinks   (Nullable(v) ) )
+                RemoveVolumes |> Option.iter (fun v -> param.set_RemoveVolumes (Nullable(v) ) )
+                                
+                param
+
+
+            /// Creates ContainerPathStatParameters
+            static member InitContainerPathStatParameters
+                (                    
+                    ?AllowOverwriteDirWithFile,
+                    // Target path (example: /tmp/) 
+                    ?Path
+                ) = 
+                
+                let param = new ContainerPathStatParameters()                
+                AllowOverwriteDirWithFile |> Option.iter (fun v -> param.set_AllowOverwriteDirWithFile (Nullable(v) ) )
+                Path                      |> Option.iter param.set_Path
+                                
+                param
+
+
+            /// Creates ContainerPathStatParameters
+            static member InitGetArchiveFromContainerParameters
+                (                    
+                    ?Path
+                ) = 
+                
+                let param = new GetArchiveFromContainerParameters()                
+                Path    |> Option.iter param.set_Path
+                                
+                param
+
+
+        /// Creates docker container with CreateContainerParameters for config (async)
+        let createContainerWithAsync (connection:DockerClient) (param:CreateContainerParameters) =        
+            async {              
+                let! tmp = 
+                    connection.Containers.CreateContainerAsync(param)
+                    |> Async.AwaitTask            
+        
+                return tmp
+                }   
+                
+
+        /// Creates docker container of an image given by image name (async)
+        let createContainerByImageAsync (connection:DockerClient) imageName =            
+            async {         
+                let param = ContainerParams.InitCreateContainerParameters(Image=imageName)                
+                //param.Cmd <-  Collections.Generic.List(["/bin/bash";"c";"echo 'Hello'"])        
+                let! tmp = 
+                    connection.Containers.CreateContainerAsync(param)
+                    |> Async.AwaitTask            
+        
+                return tmp
+                }   
+                
+
+        /// Creates docker container of an image given by image name (async)    
+        let createContainerByImage (connection:DockerClient) imageName =
+            createContainerByImageAsync connection imageName
+            |>  Async.RunSynchronously 
+
+
+        /// Creates docker container with ContainerExecStartParameters for config (async)
+        let createContainerWithExecConfigAsync (connection:DockerClient) (param:ContainerExecCreateParameters) id =        
+            async {              
+                let! tmp = 
+                    connection.Containers.ExecCreateContainerAsync(id,param,CancellationToken.None)
+                    |> Async.AwaitTask            
+        
+                return tmp
+                }   
+
+
+        /// Start container with CreateContainerParameters for config (async)
+        let startContainerWithAsync (connection:DockerClient) (param:ContainerStartParameters) id =        
+            async {              
+                let! isRunnig = 
+                    connection.Containers.StartContainerAsync(id,param,CancellationToken.None)
+                    |> Async.AwaitTask            
+        
+                return isRunnig
+                }   
+
+
+        /// Start container by a given ID (async)
+        let startContainerAsync (connection:DockerClient) id =        
+            startContainerWithAsync connection (ContainerParams.InitContainerStartParameters()) id
+
+
+        /// Start container by a given ID
+        let startContainer (connection:DockerClient) id =        
+            startContainerWithAsync connection (ContainerParams.InitContainerStartParameters()) id
+            |> Async.RunSynchronously
+
+
+        /// Start container with ContainerExecStartParameters for config (async)
+        let startContainerWithExecConfigAsync (connection:DockerClient) (param:ContainerExecStartParameters) id =        
+            async {              
+                let! isRunnig = 
+                    connection.Containers.StartWithConfigContainerExecAsync(id,param,CancellationToken.None)
+                    |> Async.AwaitTask            
+        
+                return isRunnig
+                }   
+
+
+
+        /// Wait for container (async)
+        let waitContainerAsync (connection:DockerClient) id =        
+            async {              
+                let! tmp = 
+                    connection.Containers.WaitContainerAsync(id,CancellationToken.None)
+                    |> Async.AwaitTask            
+        
+                return tmp
+                }   
+
+        /// Wait for container 
+        let waitContainer (connection:DockerClient) id =        
+            waitContainerAsync connection id
+            |> Async.RunSynchronously
+
+       
+        /// Get logs from container (async)
+        let getContainerLogsAsync (connection:DockerClient) param id =        
+            async {                              
+                let! tmp = 
+                    connection.Containers.GetContainerLogsAsync(id,param,CancellationToken.None)
+                    |> Async.AwaitTask            
+        
+                return tmp
+                }   
+
+
+        /// Get logs from container and show StdOut
+        let getContainerLogs (connection:DockerClient) id =        
+            getContainerLogsAsync connection (ContainerParams.InitContainerLogsParameters(ShowStdout=true)) id
+            |> Async.RunSynchronously
+
+
+        /// Lists available containers with ContainersListParameters for filtering (async)
+        let listContainersWithAsync (connection:DockerClient) (param:ContainersListParameters) =
+            //  ImagesListParameters are only for filtering
+            async {                
+                let! tmp = 
+                    connection.Containers.ListContainersAsync(param,CancellationToken.None)              
+                    |> Async.AwaitTask                    
+                return (tmp |> Seq.map id)
+                }
+
+
+        /// Lists all available containers (async)
+        let listAllContainersAsync (connection:DockerClient) =
+            listContainersWithAsync connection (ContainerParams.InitContainerListParameters(All=true))
+
+                       
+        /// Returns true if acontainer matches the dockerid (async) 
+        let existsByAsync (connection:DockerClient) (dockerid:DockerId) =
+            let filter = Filters.InitContainerFilters(Ancestor=dockerid)
+            let param = ContainerParams.InitContainerListParameters(All=true,Filters=filter)
+            async {
+                let! tmp = listContainersWithAsync connection param                
+                 
+                return (Seq.length tmp > 0)
+                }
+
+
+        /// Returns true if acontainer matches the dockerid 
+        let existsByImage (connection:DockerClient) (dockerid:DockerId) =
+            existsByAsync connection dockerid
+            |> Async.RunSynchronously
+                       
+
+        /// Removes container  
+        let removeContainerWithAsync (connection:DockerClient) (param) (dockerid:DockerId) =
+            async {                
+                do!
+                    connection.Containers.RemoveContainerAsync(dockerid.ToString(),param,CancellationToken.None)              
+                    |> Async.AwaitTask                                    
+                }
+
+
+        /// Removes container  
+        let removeContainerAsync (connection:DockerClient) (dockerid:DockerId) =
+            let param = ContainerParams.InitContainerRemoveParameters()
+            removeContainerWithAsync connection param dockerid
+
+
+        /// Creates an container that will perform the execution (async). IMPORTANT: Start exec-container using StartContainerExecAsync       
+        let execCreateContainerAsync (connection:DockerClient) param id =        
+            async {                              
+                let! tmp = 
+                    connection.Containers.ExecCreateContainerAsync (id,param,CancellationToken.None)
+                    |> Async.AwaitTask            
+        
+                return tmp
+                }  
+
+        /// Starts
+        let startContainerExecAsync (connection:DockerClient) id =        
+            async {                              
+                do!
+                    connection.Containers.StartContainerExecAsync (id,CancellationToken.None)
+                    |> Async.AwaitTask                                    
+                }  
+
+        /// Stops 
+        let stopContainerAsync (connection:DockerClient) (param) id =        
+            async {                              
+                let! isRunning =
+                    connection.Containers.StopContainerAsync(id,param,CancellationToken.None)
+                    |> Async.AwaitTask                                    
+                return isRunning
+                }  
+
+        /// Writes stream into docker container 
+        let extractArchiveToContainerAsync (connection:DockerClient) (param) id stream =        
+            async {                              
+                let! isRunning =
+                    connection.Containers.ExtractArchiveToContainerAsync(id,param,stream,CancellationToken.None)
+                    |> Async.AwaitTask                                    
+                return isRunning
+                }  
+        
+        
+        ///
+        let getArchiveFromContainerAsync (connection:DockerClient) (param) statOnly id =        
+            async {                              
+                let! isRunning =
+                    connection.Containers.GetArchiveFromContainerAsync(id,param,statOnly,CancellationToken.None)
+                    |> Async.AwaitTask                                    
+                return isRunning
+                }  
+
+
+    //#####################################################
+    //#####################################################
+    //#####################################################
+
+    module Volumes =
+        /// Provides a set of static methods for creating Image parameter.
+        type VolumesParams =
+    
+            /// Creates ContainerStartParameters
+            static member InitVolumesCreateParameters
+                (                    
+                    ?Driver,
+                    ?DriverOpts,
+                    ?Labels,
+                    ?Name
+
+                ) = 
+                
+                let param = new VolumesCreateParameters()
+                Driver     |> Option.iter param.set_Driver
+                DriverOpts |> Option.iter param.set_DriverOpts
+                Labels     |> Option.iter param.set_Labels
+                Name       |> Option.iter param.set_Name    
+                
+                param
+
+        /// Create volume with VolumesCreateParameters (async)
+        let createAsync (connection:DockerClient) param =        
+            async {                              
+                let! tmp = 
+                    connection.Volumes. CreateAsync(param,CancellationToken.None)
+                    |> Async.AwaitTask            
+        
+                return tmp
+                } 
+
+
+        /// Create volume with VolumesCreateParameters
+        let create (connection:DockerClient) param =        
+            createAsync connection param
+            |> Async.RunSynchronously
+
+
+        /// Inspect volume (async)
+        let inspectAsync (connection:DockerClient) name =        
+            async {                              
+                let! tmp = 
+                    connection.Volumes.InspectAsync(name,CancellationToken.None)
+                    |> Async.AwaitTask            
+        
+                return tmp
+                } 
+
+
+        /// Inspect volume 
+        let inspect (connection:DockerClient) name =        
+            inspectAsync connection name
+            |> Async.RunSynchronously
+
+
+        /// List volume (async)
+        let listVolumesAsync (connection:DockerClient) =        
+            async {                              
+                let! tmp = 
+                    connection.Volumes.ListAsync(CancellationToken.None)
+                    |> Async.AwaitTask            
+        
+                return tmp
+                } 
+
+
+        /// List volume 
+        let listVolumes (connection:DockerClient) =        
+            listVolumesAsync connection
+            |> Async.RunSynchronously
+
+
+
+
+
diff --git a/src/BioFSharp.BioTools/FastP.fs b/src/BioFSharp.BioTools/FastP.fs
new file mode 100644
index 00000000..669ce8c2
--- /dev/null
+++ b/src/BioFSharp.BioTools/FastP.fs
@@ -0,0 +1,496 @@
+namespace BioFSharp.BioTools
+
+module FastP =
+    open FSharpAux
+    open BioContainer
+
+    let ImageFastp = Docker.DockerId.ImageId "fastp"
+
+
+    type IOOptions =
+        ///read1 input file name
+        |Input1 of string
+        ///read1 output file name
+        |Input2 of string
+        ///read2 input file name
+        |Output1 of string
+        ///read2 output file name
+        |Output2 of string
+        ///indicate the input is using phred64 scoring (it'll be converted to phred33, so the output will still be phred33)
+        |Phred64
+        ///compression level for gzip output (1 ~ 9). 1 is fastest, 9 is smallest, default is 4.
+        |OutputCompressionLevel of int
+        ///input from STDIN. If the STDIN is interleaved paired-end FASTQ, please also add --interleaved_in.
+        |STDIN
+        ///output passing-filters reads to STDOUT. This option will result in interleaved FASTQ output for paired-end input. Disabled by default.
+        |STDOUT
+        ///indicate that Input1 is an interleaved FASTQ which contains both read1 and read2. Disabled by default.
+        |InterleavedInput
+        ///specify how many reads/pairs to be processed. Default 0 means process all reads.
+        |ReadsToProcess of int
+        ///don't overwrite existing files. Overwriting is allowed by default.
+        |DontOverwrite
+
+        static member make = function
+            |Input1 i                   ->  sprintf "--in1 %s" i
+            |Input2 i                   ->  sprintf "--in2 %s" i
+            |Output1 i                  ->  sprintf "--out1 %s" i
+            |Output2 i                  ->  sprintf "--out2 %s" i
+            |Phred64                    ->  "--phred64"
+            |OutputCompressionLevel i   ->  sprintf "--compression %i" i
+            |STDIN                      ->  "--stdin"
+            |STDOUT                     ->  "--stdout"
+            |InterleavedInput           ->  "--interleaved_in"
+            |ReadsToProcess i           ->  sprintf "--reads_to_process %i" i
+            |DontOverwrite              ->  "--dont_overwrite"
+
+        static member makeCmdWith (m:MountInfo) = function
+            |Input1 i                   ->  [ "--in1"; (MountInfo.containerPathOf m i) ] 
+            |Input2 i                   ->  [ "--in2"; (MountInfo.containerPathOf m i) ] 
+            |Output1 i                  ->  [ "--out1"; (MountInfo.containerPathOf m i)] 
+            |Output2 i                  ->  [ "--out2"; (MountInfo.containerPathOf m i)] 
+            |Phred64                    ->  ["--phred64"]
+            |OutputCompressionLevel i   ->  [ "--compression"; string i ]
+            |STDIN                      ->  ["--stdin"]
+            |STDOUT                     ->  ["--stdout"]
+            |InterleavedInput           ->  ["--interleaved_in"]
+            |ReadsToProcess i           ->  [ "--reads_to_process"; string i]
+            |DontOverwrite              ->  ["--dont_overwrite"]
+
+
+
+    type AdapterTrimmingOptions =
+        ///adapter trimming is enabled by default. If this option is specified, adapter trimming is disabled.
+        |DisableAdapterTrimming
+        ///the adapter for read1. For SE data, if not specified, the adapter will be auto-detected. For PE data, this is used if R1/R2 are found not overlapped. 
+        |AdapterSequence1 of string
+        ///the adapter for read2 (PE data only). This is used if R1/R2 are found not overlapped. If not specified, it will be the same as AdapterSequence1.
+        |AdapterSequence2 of string
+
+        static member  make = function
+            |DisableAdapterTrimming ->  "--disable_adapter_trimming"
+            |AdapterSequence1 a1    ->  sprintf "--adapter_sequence %s" a1
+            |AdapterSequence2 a2    ->  sprintf "--adapter_sequence_r2 %s" a2
+
+        static member  makeCmdWith (m:MountInfo) = function
+            |DisableAdapterTrimming ->  ["--disable_adapter_trimming"]
+            |AdapterSequence1 a1    ->  ["--adapter_sequence"; a1]
+            |AdapterSequence2 a2    ->  ["--adapter_sequence_r2"; a2]
+
+
+
+    type GlobalTrimmingOptions =
+        ///trimming how many bases in front for read1, default is 0.
+        |TrimFront1 of int 
+        ///trimming how many bases in tail for read1, default is 0.
+        |TrimFront2 of int
+        ///trimming how many bases in front for read2. If it's not specified, it will follow read1's settings.
+        |TrimTail1 of int
+        ///trimming how many bases in tail for read2. If it's not specified, it will follow read1's settings.
+        |TrimTail2 of int
+
+        static member make = function
+            |TrimFront1 tf1 ->  sprintf "--trim_front1 %i" tf1
+            |TrimFront2 tf2 ->  sprintf "--trim_front2 %i" tf2
+            |TrimTail1 tt1  ->  sprintf "--trim_tail1 %i" tt1
+            |TrimTail2 tt2  ->  sprintf "--trim_tail2 %i" tt2
+
+        static member makeCmdWith (m:MountInfo) = function
+            |TrimFront1 tf1 ->  ["--trim_front1"; string tf1]
+            |TrimFront2 tf2 ->  ["--trim_front2"; string tf2]
+            |TrimTail1 tt1  ->  ["--trim_tail1"; string tt1]
+            |TrimTail2 tt2  ->  ["--trim_tail2"; string tt2]
+
+
+
+    type PolyXTrimmingOptions =
+        ///force polyG tail trimming, by default trimming is automatically enabled for Illumina NextSeq/NovaSeq data.
+        |PolyGTrimming
+        ///the minimum length to detect polyG in the read tail. 10 by default.
+        |PolyGMinLength of int
+        ///disable polyG tail trimming, by default trimming is automatically enabled for Illumina NextSeq/NovaSeq data.
+        |DisablePolyGTrimming
+        ///enable polyX trimming in 3' ends.
+        |PolyXTrimming
+        ///the minimum length to detect polyX in the read tail. 10 by default.
+        |PolyXMinLength of int
+
+        static member make = function
+            |PolyGTrimming          -> "--trim_poly_g "
+            |PolyGMinLength pgl     -> sprintf "--poly_g_min_len %i" pgl
+            |DisablePolyGTrimming   -> "--disable_trim_poly_g"
+            |PolyXTrimming          -> "--trim_poly_x"
+            |PolyXMinLength pxl     -> sprintf "--poly_x_min_len %i" pxl
+
+        static member makeCmdWith (m:MountInfo) = function
+            |PolyGTrimming          -> ["--trim_poly_g"]
+            |PolyGMinLength pgl     -> [ "--poly_g_min_len"; string pgl]
+            |DisablePolyGTrimming   -> ["--disable_trim_poly_g"]
+            |PolyXTrimming          -> ["--trim_poly_x"]
+            |PolyXMinLength pxl     -> [ "--poly_x_min_len"; string pxl]
+
+
+
+    type QualityCuttingOptions =
+        ///enable per read cutting by quality in front (5'), default is disabled (WARNING: this will interfere deduplication for both PE/SE data).
+        |FivePrimeQualityCut
+        ///enable per read cutting by quality in tail (3'), default is disabled (WARNING: this will interfere deduplication for SE data).
+        |ThreePrimeQualityCut
+        ///the size of the sliding window for sliding window trimming, default is 4.
+        |CutWindowSize of int
+        ///the bases in the sliding window with mean quality below cutting_quality will be cut, default is Q20.
+        |CutMeanQuality of int
+
+        static member make = function
+            |FivePrimeQualityCut    ->  "--cut_by_quality5"
+            |ThreePrimeQualityCut   ->  "--cut_by_quality3"
+            |CutWindowSize cws      ->  sprintf "--cut_window_size %i" cws
+            |CutMeanQuality cmq     ->  sprintf "--cut_mean_quality %i" cmq
+
+        static member makeCmdWith (m:MountInfo) = function
+            |FivePrimeQualityCut    ->  ["--cut_by_quality5"]
+            |ThreePrimeQualityCut   ->  ["--cut_by_quality3"]
+            |CutWindowSize cws      ->  ["--cut_window_size"; string cws]
+            |CutMeanQuality cmq     ->  ["--cut_mean_quality"; string cmq]
+
+
+    type QualityFilteringOptions =
+        ///quality filtering is enabled by default. If this option is specified, quality filtering is disabled.
+        |DisableQualityFiltering
+        ///the quality value that a base is qualified. Default 15 means phred quality >=Q15 is qualified.
+        |BaseQualityThreshold of int
+        ///how many percents of bases are allowed to be unqualified (0~100). Default 40 means 40%.
+        |UnqualifiedThreshold of int
+        ///if one read's number of N base is >n_base_limit, then this read/pair is discarded. Default is 5.
+        |NBaseLimit of int
+
+        static member make = function   
+            |DisableQualityFiltering    -> "--disable_quality_filtering"
+            |BaseQualityThreshold bqt   -> sprintf "--qualified_quality_phred %i" bqt
+            |UnqualifiedThreshold ut    -> sprintf "--unqualified_percent_limit %i" ut
+            |NBaseLimit nbl             -> sprintf "--n_base_limit %i" nbl
+
+        static member makeCmdWith (m:MountInfo) = function   
+            |DisableQualityFiltering    -> ["--disable_quality_filtering"]
+            |BaseQualityThreshold bqt   -> [ "--qualified_quality_phred"; string bqt]
+            |UnqualifiedThreshold ut    -> [ "--unqualified_percent_limit"; string ut]
+            |NBaseLimit nbl             -> [ "--n_base_limit"; string nbl]
+
+
+    type LengthFilteringOptions =
+        ///length filtering is enabled by default. If this option is specified, length filtering is disabled.
+        |DisableLengthFiltering
+        ///reads shorter than length_required will be discarded, default is 15.
+        |RequiredLength of int
+        ///reads longer than length_limit will be discarded, default 0 means no limitation.
+        |LengthLimit of int
+    
+        static member make = function
+            |DisableLengthFiltering -> "--disable_length_filtering"
+            |RequiredLength rl      -> sprintf "--length_required %i" rl
+            |LengthLimit lm         -> sprintf "--length_limit %i" lm
+
+        static member makeCmdWith (m:MountInfo) = function
+            |DisableLengthFiltering -> ["--disable_length_filtering"]
+            |RequiredLength rl      -> [ "--length_required"; string rl]
+            |LengthLimit lm         -> [ "--length_limit"; string lm]
+
+
+    type LowComplexityFilteringOptions =
+        ///enable low complexity filter. The complexity is defined as the percentage of base that is different from its next base (base[i] != base[i+1]).
+        |EnableLowComplexityFiltering
+        ///the threshold for low complexity filter (0~100). Default is 30, which means 30% complexity is required.
+        |ComplexityThreshold of int
+
+        static member make = function
+            |EnableLowComplexityFiltering   -> "--low_complexity_filter"
+            |ComplexityThreshold ct         -> sprintf "--complexity_threshold %i" ct
+
+        static member makeCmdWith (m:MountInfo) = function
+            |EnableLowComplexityFiltering   -> ["--low_complexity_filter"]
+            |ComplexityThreshold ct         -> ["--complexity_threshold"; string ct]
+
+
+    type IndexFilteringOptions =
+        ///specify a file contains a list of barcodes of index1 to be filtered out, one barcode per line.
+        |FilterByIndex1 of string
+        ///specify a file contains a list of barcodes of index2 to be filtered out, one barcode per line.
+        |FilterByIndex2 of string
+        ///the allowed difference of index barcode for index filtering, default 0 means completely identical.
+        |IndexFilterThreshold of int
+
+        static member make = function
+            |FilterByIndex1 fbi1        -> sprintf "--filter_by_index1 %s" fbi1
+            |FilterByIndex2 fbi2        -> sprintf "--filter_by_index2 %s" fbi2
+            |IndexFilterThreshold ift   -> sprintf "--filter_by_index_threshold %i" ift
+
+        static member makeCmdWith (m:MountInfo) = function
+            |FilterByIndex1 fbi1        -> ["--filter_by_index1"; fbi1]
+            |FilterByIndex2 fbi2        -> ["--filter_by_index2"; fbi2]
+            |IndexFilterThreshold ift   -> ["--filter_by_index_threshold %i"; string ift]
+
+
+    type BaseCorrectionOptions =
+        ///enable base correction in overlapped regions (only for PE data), default is disabled.
+        |EnableBaseCorrection
+        ///the minimum length of the overlapped region for overlap analysis based adapter trimming and correction. 30 by default.
+        |RequiredLengthOverlap of int
+        ///the maximum difference of the overlapped region for overlap analysis based adapter trimming and correction. 5 by default.
+        |MaxOverlapDifference of int
+
+        static member make = function
+            |EnableBaseCorrection       -> "--correction" 
+            |RequiredLengthOverlap rlo  -> sprintf "--overlap_len_require %i" rlo
+            |MaxOverlapDifference modiff   -> sprintf "--overlap_diff_limit %i" modiff
+
+        static member makeCmdWith (m:MountInfo) = function
+            |EnableBaseCorrection       -> ["--correction"]
+            |RequiredLengthOverlap rlo  -> [ "--overlap_len_require"; string rlo]
+            |MaxOverlapDifference modiff ->[  "--overlap_diff_limit"; string modiff]
+
+
+    type UMILocation =
+        |Index1
+        |Index2
+        |Read1
+        |Read2
+        |PerIndex
+        |PerRead
+        |NoLocation
+
+        static member make = function
+            |Index1     -> "index1"
+            |Index2     -> "index2"
+            |Read1      -> "read1"
+            |Read2      -> "read2"
+            |PerIndex   -> "per_index"
+            |PerRead    -> "per_read"
+            |NoLocation -> ""
+
+
+        static member makeCmdWith (m:MountInfo) = function
+            |Index1     -> ["index1"]
+            |Index2     -> ["index2"]
+            |Read1      -> ["read1"]
+            |Read2      -> ["read2"]
+            |PerIndex   -> ["per_index"]
+            |PerRead    -> ["per_read"]
+            |NoLocation -> [""]
+
+    //========================================= Helper ==========================================
+    let stringOfUMILocation (loc:UMILocation) =
+        match loc with
+        |Index1     -> "index1"
+        |Index2     -> "index2"
+        |Read1      -> "read1"
+        |Read2      -> "read2"
+        |PerIndex   -> "per_index"
+        |PerRead    -> "per_read"
+        |NoLocation -> ""
+    //=============================================================================================
+
+    type UMIProcessingOptions =
+        ///enable unique molecular identifer (UMI) preprocessing.
+        |EnableUMI
+        ///specify the location of UMI, can be index1/index2/read1/read2/per_index/per_read, default is none.
+        |Location of UMILocation
+        ///if the UMI is in read1/read2, its length should be provided.
+        |UMILength of int
+        ///if specified, an underline will be used to connect prefix and UMI (i.e. prefix=UMI, UMI=AATTCG, final=UMI_AATTCG). No prefix by default.
+        |UMIPrefix of string
+        ///if the UMI is in read1/read2, fastp can skip several bases following UMI, default is 0.
+        |UMISkip of int
+
+        static member make = function
+            |EnableUMI          -> sprintf "--umi"
+            |Location loc       -> sprintf "--umi_loc %s" (stringOfUMILocation loc) //(stringOfUMILocation loc)
+            |UMILength len      -> sprintf "--umi_len %i" len
+            |UMIPrefix prefix   -> sprintf "--umi_prefix %s" prefix
+            |UMISkip s          -> sprintf "--umi_skip %i" s
+
+        static member makeCmdWith (m:MountInfo) = function
+            |EnableUMI          -> ["--umi"]
+            |Location loc       -> ["--umi_loc"; (stringOfUMILocation loc)]   //(stringOfUMILocation loc)
+            |UMILength len      -> ["--umi_len"; string len]
+            |UMIPrefix prefix   -> ["--umi_prefix"; prefix]
+            |UMISkip s          -> ["--umi_skip"; string s]
+
+
+    type OverrepresentationAnalysisOptions =
+        ///enable overrepresented sequence analysis.
+        |EnableORA
+        ///One in ORASampling reads will be computed for overrepresentation analysis (1~10000), smaller is slower, default is 20.
+        |ORASampling of int
+
+        static member make = function
+            |EnableORA          -> "--overrepresentation_analysis"
+            |ORASampling smpl   -> sprintf "--overrepresentation_sampling %i" smpl
+
+        static member makeCmdWith (m:MountInfo) = function
+            |EnableORA          -> ["--overrepresentation_analysis"]
+            |ORASampling smpl   -> [ "--overrepresentation_sampling"; string smpl]
+
+
+    type ReportingOptions =
+        ///the json format report file name. default is "fastp.json".
+        |JsonReport of string
+        ///the html format report file name. default is "fastp.html".
+        |HtmlReport of string
+        ///should be quoted with ' or ", default is "fastp report".
+        |ReportTitle of string    
+
+        static member make = function
+            |JsonReport jsonrep ->  sprintf "--json %s" jsonrep 
+            |HtmlReport htmlrep ->  sprintf "--html %s" htmlrep 
+            |ReportTitle title  ->  sprintf "--report_title %s" title 
+
+        static member makeCmdWith (m:MountInfo) = function
+            |JsonReport jsonrep ->  ["--json"; (MountInfo.containerPathOf m jsonrep)]
+            |HtmlReport htmlrep ->  ["--html"; (MountInfo.containerPathOf m htmlrep)]
+            |ReportTitle title  ->  ["--report_title"; (MountInfo.containerPathOf m title)]
+
+
+    type OutputSplittingOptions =
+        ///split output by limiting total split file number with this option (2~999), a sequential number prefix will be added to output name ( 0001.out.fq, 0002.out.fq...), disabled by default.
+        |Split of int
+        ///split output by limiting lines of each file with this option(>=1000), a sequential number prefix will be added to output name ( 0001.out.fq, 0002.out.fq...), disabled by default.
+        |SplitByLines of int64
+        ///the digits for the sequential number padding (1~10), default is 4, so the filename will be padded as 0001.xxx, 0 to disable padding.
+        |SplitPrefixDigits of int
+
+        static member make = function
+            |Split s                ->  sprintf "--split %i" s
+            |SplitByLines sbl       ->  sprintf "--split_by_lines %i" sbl
+            |SplitPrefixDigits spd  ->  sprintf "--split_prefix_digits %i" spd
+
+        static member makeCmdWith (m:MountInfo) = function
+            |Split s                -> ["--split"; string s]
+            |SplitByLines sbl       -> ["--split_by_lines"; string sbl]
+            |SplitPrefixDigits spd  -> ["--split_prefix_digits"; string spd]
+
+
+
+    type FastpParams =
+        ///worker thread number, default is 2
+        |NumThreads                 of int
+        |IO                         of IOOptions list
+        |AdapterTrimming            of AdapterTrimmingOptions list
+        |GlobalTrimming             of GlobalTrimmingOptions list
+        |PolyXTrimming              of PolyXTrimmingOptions list
+        |QualityCutting             of QualityCuttingOptions list
+        |QualityFiltering           of QualityFilteringOptions list
+        |LengthFiltering            of LengthFilteringOptions list
+        |LowComplexityFiltering     of LowComplexityFilteringOptions list
+        |IndexFiltering             of IndexFilteringOptions list
+        |BaseCorrection             of BaseCorrectionOptions list
+        |UMIProcessing              of UMIProcessingOptions list
+        |OverrepresentationAnalysis of OverrepresentationAnalysisOptions list
+        |Reporting                  of ReportingOptions list
+        |OutputSplitting            of OutputSplittingOptions list
+
+        static member make = function 
+            |NumThreads t  -> [sprintf "--thread %i" t ]
+            |IO l ->  
+                let tmp = l |> Seq.map (fun p -> IOOptions.make p) |> Seq.toList
+                tmp
+            |AdapterTrimming l -> 
+                let tmp = l |> Seq.map (fun p -> AdapterTrimmingOptions.make p) |> Seq.toList
+                tmp
+            | GlobalTrimming l ->
+                let tmp = l |> Seq.map (fun p -> GlobalTrimmingOptions.make p) |> Seq.toList
+                tmp
+            |PolyXTrimming l   -> 
+                let tmp = l |> Seq.map (fun p -> PolyXTrimmingOptions.make p) |> Seq.toList
+                tmp
+            |QualityCutting l -> 
+                let tmp = l |> Seq.map (fun p -> QualityCuttingOptions.make p) |> Seq.toList
+                tmp
+            |QualityFiltering l -> 
+                let tmp = l |> Seq.map (fun p -> QualityFilteringOptions.make p) |> Seq.toList
+                tmp
+            |LengthFiltering l -> 
+                let tmp = l |> Seq.map (fun p -> LengthFilteringOptions.make p) |> Seq.toList
+                tmp
+            |LowComplexityFiltering l -> 
+                let tmp = l |> Seq.map (fun p -> LowComplexityFilteringOptions.make p) |> Seq.toList
+                tmp
+            |IndexFiltering l -> 
+                let tmp = l |> Seq.map (fun p -> IndexFilteringOptions.make p) |> Seq.toList
+                tmp
+            |BaseCorrection l -> 
+                let tmp = l |> Seq.map (fun p -> BaseCorrectionOptions.make p) |> Seq.toList
+                tmp
+            |UMIProcessing l -> 
+                let tmp = l |> Seq.map (fun p -> UMIProcessingOptions.make p) |> Seq.toList
+                tmp
+            |OverrepresentationAnalysis l ->
+                let tmp = l |> Seq.map (fun p -> OverrepresentationAnalysisOptions.make p) |> Seq.toList
+                tmp
+            |Reporting l -> 
+                let tmp = l |> Seq.map (fun p -> ReportingOptions.make p) |> Seq.toList
+                tmp
+            |OutputSplitting l -> 
+                let tmp = l |> Seq.map (fun p -> OutputSplittingOptions.make p) |> Seq.toList
+                tmp
+
+
+        static member makeCmd (m:MountInfo) = function 
+            |NumThreads t  -> [sprintf "--thread %i" t ]
+            |IO l ->  
+                let tmp = l |> Seq.map (fun p -> IOOptions.makeCmdWith m p) |> List.concat
+                tmp
+            |AdapterTrimming l -> 
+                let tmp = l |> Seq.map (fun p -> AdapterTrimmingOptions.makeCmdWith m p) |> List.concat
+                tmp
+            | GlobalTrimming l ->
+                let tmp = l |> Seq.map (fun p -> GlobalTrimmingOptions.makeCmdWith m p) |> List.concat
+                tmp
+            |PolyXTrimming l   -> 
+                let tmp = l |> Seq.map (fun p -> PolyXTrimmingOptions.makeCmdWith m p) |> List.concat
+                tmp
+            |QualityCutting l -> 
+                let tmp = l |> Seq.map (fun p -> QualityCuttingOptions.makeCmdWith m p) |> List.concat
+                tmp
+            |QualityFiltering l -> 
+                let tmp = l |> Seq.map (fun p -> QualityFilteringOptions.makeCmdWith m p) |> List.concat
+                tmp
+            |LengthFiltering l -> 
+                let tmp = l |> Seq.map (fun p -> LengthFilteringOptions.makeCmdWith m p) |> List.concat
+                tmp
+            |LowComplexityFiltering l -> 
+                let tmp = l |> Seq.map (fun p -> LowComplexityFilteringOptions.makeCmdWith m p) |> List.concat
+                tmp
+            |IndexFiltering l -> 
+                let tmp = l |> Seq.map (fun p -> IndexFilteringOptions.makeCmdWith m p) |> List.concat
+                tmp
+            |BaseCorrection l -> 
+                let tmp = l |> Seq.map (fun p -> BaseCorrectionOptions.makeCmdWith m p) |> List.concat
+                tmp
+            |UMIProcessing l -> 
+                let tmp = l |> Seq.map (fun p -> UMIProcessingOptions.makeCmdWith m p) |> List.concat
+                tmp
+            |OverrepresentationAnalysis l ->
+                let tmp = l |> Seq.map (fun p -> OverrepresentationAnalysisOptions.makeCmdWith m p) |> List.concat
+                tmp
+            |Reporting l -> 
+                let tmp = l |> Seq.map (fun p -> ReportingOptions.makeCmdWith m p) |> List.concat
+                tmp
+            |OutputSplitting l -> 
+                let tmp = l |> Seq.map (fun p -> OutputSplittingOptions.makeCmdWith m p) |> List.concat
+                tmp
+
+
+    let runFastpAsync (bcContext:BioContainer.BcContext) (opt:FastpParams list) =
+        let cmds = (opt |> List.map (FastpParams.makeCmd bcContext.Mount))
+        let tp = "fastp"::(cmds |> List.concat)
+
+        async {
+            let! fastP = BioContainer.execAsync bcContext tp
+            return fastP 
+            }
+        
+
+    let runFastp (bcContext:BioContainer.BcContext) (opt:FastpParams list) = 
+        runFastpAsync bcContext opt 
+        |> Async.RunSynchronously
+
diff --git a/src/BioFSharp.BioTools/HMMER.fs b/src/BioFSharp.BioTools/HMMER.fs
new file mode 100644
index 00000000..3b6136b2
--- /dev/null
+++ b/src/BioFSharp.BioTools/HMMER.fs
@@ -0,0 +1,1160 @@
+namespace BioFSharp.BioTools
+
+// adapted from the hmmer usage guide at http://eddylab.org/software/hmmer/Userguide.pdf
+// HMMER: biosequence analysis using profile hidden Markov models (http://hmmer.org/)
+// HMMER source code is available on github under The 3-Clause BSD License: https://github.com/EddyRivasLab/hmmer/blob/master/LICENSE
+
+///HMMER - Biological sequence analysis using profile hidden Markov models (http://hmmer.org/)
+///Functions providing an API for the HMMER 3 biocontainer
+module HMMER =
+
+    open BioContainer
+
+    //Common parameter types for multiple CLI tools
+    type AlphabetType =
+        ///input alignment is protein sequence data
+        | AminoAcids
+        ///input alignment is DNA sequence data
+        | DNA
+        ///input alignment is RNA sequence data
+        | RNA
+
+        static member make = function
+            | AminoAcids->  ["--amino"]
+            | DNA       ->  ["--dna"]
+            | RNA       ->  ["--rna"]
+
+    ///Options controlling reporting thresholds:
+    //  -E      : report sequences <= this E-value threshold in output  [10.0]  (x>0)
+    //  -T      : report sequences >= this score threshold in output
+    //  --domE  : report domains <= this E-value threshold in output  [10.0]  (x>0)
+    //  --domT  : report domains >= this score cutoff in output
+
+    type ReportingThresholdOptions =
+        ///report sequences <= this E-value threshold in output
+        | MaxSequenceEvalue of float
+        ///report sequences >= this score threshold in output
+        | MinSequenceScore  of float
+        ///report domains <= this E-value threshold in output
+        | MaxDomainEvalue   of float
+        ///report domains >= this score cutoff in output
+        | MinDomainScore    of float
+
+        static member make = 
+            function
+            | MaxSequenceEvalue t   -> ["-E"; string t]
+            | MinSequenceScore  t   -> ["-T"; string t]
+            | MaxDomainEvalue   t   -> ["--domE"; string t]
+            | MinDomainScore    t   -> ["--domT"; string t]
+
+    ///Options controlling inclusion (significance) thresholds:
+    //  --incE     : consider sequences <= this E-value threshold as significant
+    //  --incT     : consider sequences >= this score threshold as significant
+    //  --incdomE  : consider domains <= this E-value threshold as significant
+    //  --incdomT  : consider domains >= this score threshold as significant
+
+    type InclusionThresholdOptions =
+        ///consider sequences <= this E-value threshold as significant
+        | MaxSequenceEvalue of float
+        ///consider sequences >= this score threshold as significant 
+        | MinSequenceScore  of float
+        ///consider domains <= this E-value threshold as significant
+        | MaxDomainEvalue   of float
+        ///consider domains >= this score threshold as significant
+        | MinDomainScore    of float
+
+        static member make = 
+            function
+            | MaxSequenceEvalue t   -> ["-incE"; string t]
+            | MinSequenceScore  t   -> ["-incT"; string t]
+            | MaxDomainEvalue   t   -> ["--incdomE"; string t]
+            | MinDomainScore    t   -> ["--incdomT"; string t]
+        
+    ///Options controlling model-specific thresholding:
+    //  --cut_ga : use profile's GA gathering cutoffs to set all thresholding
+    //  --cut_nc : use profile's NC noise cutoffs to set all thresholding
+    //  --cut_tc : use profile's TC trusted cutoffs to set all thresholding
+
+    type ModelSpecificThresholdOptions =
+        ///use profile's GA gathering cutoffs to set all thresholding
+        | GatheringCutoff
+        ///use profile's NC noise cutoffs to set all thresholding
+        | NoiseCutoff
+        ///use profile's TC trusted cutoffs to set all thresholding
+        | TrustedCutoff
+
+        static member make =
+            function
+            | GatheringCutoff   -> ["--cut_ga"]
+            | NoiseCutoff       -> ["--cut_nc"]
+            | TrustedCutoff     -> ["--cut_tc"]
+
+    ///Options controlling acceleration heuristics:
+    //  --max    : Turn all heuristic filters off (less speed, more power)
+    //  --F1  : Stage 1 (MSV) threshold: promote hits w/ P <= F1  [0.02]
+    //  --F2  : Stage 2 (Vit) threshold: promote hits w/ P <= F2  [1e-3]
+    //  --F3  : Stage 3 (Fwd) threshold: promote hits w/ P <= F3  [1e-5]
+    //  --nobias : turn off composition bias filter
+
+    type AccelerationHeuristicsOptions =
+        ///Turn all heuristic filters off (less speed, more power)
+        | NoFilters
+        ///Stage 1 (MSV) threshold: promote hits w/ P <= F1
+        | MSVThreshold  of float
+        ///Stage 2 (Vit) threshold: promote hits w/ P <= F2
+        | VitThreshold  of float
+        ///Stage 3 (Fwd) threshold: promote hits w/ P <= F3
+        | FwdThreshold  of float 
+        ///turn off composition bias filter
+        | NoBiasFilter
+
+        static member make =
+            function
+            | NoFilters     -> ["max"]
+            | MSVThreshold t-> ["F1"; string t]
+            | VitThreshold t-> ["F2"; string t]
+            | FwdThreshold t-> ["F3"; string t]
+            | NoBiasFilter  -> ["nobias"]
+
+    ///hmmbuild - construct profiles from multiple sequence alignments
+    module HMMbuild =
+        //Usage: hmmbuild [-options]  
+
+        ///Options for selecting alphabet rather than guessing it:
+        //  --amino : input alignment is protein sequence data
+        //  --dna   : input alignment is DNA sequence data
+        //  --rna   : input alignment is RNA sequence data
+
+
+        ///Alternative model construction strategies:
+        //  --fast           : assign cols w/ >= symfrac residues as consensus  [default]
+        //  --hand           : manual construction (requires reference annotation)
+        //  --symfrac     : sets sym fraction controlling --fast construction  [0.5]
+        //  --fragthresh  : if L <= x*alen, tag sequence as a fragment  [0.5]
+
+        type ModelConstructionOptions =
+            | Fast
+            | Manual
+            | SymFraction       of float
+            | FragmentThreshold of float
+
+            static member make = function
+                | Fast                  -> ["--fast"]
+                | Manual                -> ["--hand"]
+                | SymFraction sf        -> ["--symfrac"; string sf]
+                | FragmentThreshold ft  -> ["--fragthresh"; string ft]
+
+        ///Alternative relative sequence weighting strategies:
+        //  --wpb     : Henikoff position-based weights  [default]
+        //  --wgsc    : Gerstein/Sonnhammer/Chothia tree weights
+        //  --wblosum : Henikoff simple filter weights
+        //  --wnone   : don't do any relative weighting; set all to 1
+        //  --wgiven  : use weights as given in MSA file
+        //  --wid  : for --wblosum: set identity cutoff  [0.62]  (0<=x<=1)
+
+        type RelativeSequenceWeightingOptions =
+            ///Henikoff position-based weights
+            | PositionBased
+            ///Gerstein/Sonnhammer/Chothia tree weights
+            | GSC
+            ///Henikoff simple filter weights
+            | Blosum
+            ///for Blosum: set identity cutoff (0<=x<=1)
+            | BlosumIdentityCutoff of float
+            ///don't do any relative weighting; set all to 1
+            | NoRelativeWeighting
+            ///use weights as given in MSA file
+            | Given 
+            
+            static member make = function
+                | PositionBased         -> ["--wpb"]
+                | GSC                   -> ["--wgsc"]
+                | Blosum                -> ["--wblosum"]
+                | BlosumIdentityCutoff c-> ["--wid"; string c]
+                | NoRelativeWeighting   -> ["--wnone"]
+                | Given                 -> ["--wgiven"]
+
+        ///Alternative effective sequence weighting strategies:
+        //  --eent       : adjust eff seq # to achieve relative entropy target  [default]
+        //  --eclust     : eff seq # is # of single linkage clusters
+        //  --enone      : no effective seq # weighting: just use nseq
+        //  --eset    : set eff seq # for all models to 
+        //  --ere     : for --eent: set minimum rel entropy/position to 
+        //  --esigma  : for --eent: set sigma param to   [45.0]
+        //  --eid     : for --eclust: set fractional identity cutoff to   [0.62]
+
+        type EffectiveSequenceWeightingOptions =
+            ///adjust eff seq # to achieve relative entropy target
+            | EntropyTargeted
+            ///for EntropyTargeted: set minimum rel entropy/position to 
+            | MinimalEntropy        of float
+            ///for EntropyTargeted: set sigma param to   [45.0]
+            | EntropySigma          of float
+            ///eff seq # is # of single linkage clusters
+            | SingleLinkageClusters
+            ///for SingleLinkageClusters: set fractional identity cutoff to   [0.62]
+            | ClusterIdentityCutoff of float
+            ///no effective seq # weighting: just use nseq
+            | NoEffectiveWeighting
+            ///set eff seq # for all models to 
+            | Set                   of float
+
+            static member make = function 
+                | EntropyTargeted           -> ["--eent"]
+                | SingleLinkageClusters     -> ["--eclust"]
+                | NoEffectiveWeighting      -> ["--enone"]
+                | Set s                     -> ["--eset";   string s]
+                | MinimalEntropy me         -> ["--ere";    string me]
+                | EntropySigma es           -> ["--esigma"; string es]
+                | ClusterIdentityCutoff c   -> ["--eid";    string c]
+
+
+        ///Alternative prior strategies:
+        //  --pnone       : don't use any prior; parameters are frequencies
+        //  --plaplace    : use a Laplace +1 prior
+        //  --popen    : force gap open prob. (w/ --singlemx, aa default 0.02, nt 0.031)
+        //  --pextend  : force gap extend prob. (w/ --singlemx, aa default 0.4, nt 0.75)
+
+        type PriorOptions =
+            ///don't use any prior; parameters are frequencies
+            | NoPrior
+            ///use a Laplace +1 prio
+            | Laplace
+            ///force gap open prob.
+            | ForcePGapOpen     of float
+            ///force gap extend prob
+            | ForcePGapExtend   of float
+
+            static member make = function 
+                |NoPrior            -> ["--pnone"]
+                |Laplace            -> ["--plaplace"]
+                |ForcePGapOpen f    -> ["--popen"; string f]
+                |ForcePGapExtend f  -> ["--pextend"; string f]
+
+        type SubstitutionScoreMatrix =
+            |PAM30      
+            |PAM70 
+            |PAM120 
+            |PAM240 
+            |BLOSUM45 
+            |BLOSUM50 
+            |BLOSUM62 
+            |BLOSUM80 
+            |BLOSUM90
+            |DNA1
+
+            static member make = function
+                |PAM30   -> "PAM30"
+                |PAM70   -> "PAM70"
+                |PAM120  -> "PAM120"
+                |PAM240  -> "PAM240"
+                |BLOSUM45-> "BLOSUM45"
+                |BLOSUM50-> "BLOSUM50"
+                |BLOSUM62-> "BLOSUM62"
+                |BLOSUM80-> "BLOSUM80"
+                |BLOSUM90-> "BLOSUM90"
+                |DNA1    -> "DNA1"
+
+        ///Handling single sequence inputs:
+        //  --singlemx   : use substitution score matrix for single-sequence inputs
+        //  --mx      : substitution score matrix (built-in matrices, with --singlemx)
+        //  --mxfile  : read substitution score matrix from file  (with --singlemx)
+        type SingleSequenceHandlingOptions =
+            ///use substitution score matrix for single-sequence inputs
+            | UseSubstitutionMatrix
+            ///substitution score matrix (built-in matrices, use with UseSubstitutionMatrix)
+            | SMatrix       of SubstitutionScoreMatrix
+            ///read substitution score matrix from file (use with UseSubstitutionMatrix)
+            | SMatrixFile   of string
+
+            static member make = function
+                | UseSubstitutionMatrix -> ["--singlemx"]
+                | SMatrix s             -> ["--mx"; (SubstitutionScoreMatrix.make s)]
+                | SMatrixFile path      -> ["--mxfile"; path]
+
+            static member makeWith (m:MountInfo) =
+                let cPath p = (MountInfo.containerPathOf m p)
+                function
+                | UseSubstitutionMatrix -> ["--singlemx"]
+                | SMatrix s             -> ["--mx"; (SubstitutionScoreMatrix.make s)]
+                | SMatrixFile path      -> ["--mxfile"; cPath path]
+
+        //Control of E-value calibration:
+        //  --EmL  : length of sequences for MSV Gumbel mu fit  [200]  (n>0)
+        //  --EmN  : number of sequences for MSV Gumbel mu fit  [200]  (n>0)
+        //  --EvL  : length of sequences for Viterbi Gumbel mu fit  [200]  (n>0)
+        //  --EvN  : number of sequences for Viterbi Gumbel mu fit  [200]  (n>0)
+        //  --EfL  : length of sequences for Forward exp tail tau fit  [100]  (n>0)
+        //  --EfN  : number of sequences for Forward exp tail tau fit  [200]  (n>0)
+        //  --Eft  : tail mass for Forward exponential tail tau fit  [0.04]  (0 ["--EmL";string l]
+                | MSVNumber         n   -> ["--EmN";string n]
+                | ViterbiLength     l   -> ["--EvL";string l]
+                | ViterbiNumber     n   -> ["--EvN";string n]
+                | ForwardLength     l   -> ["--EfL";string l]
+                | ForwardNumber     n   -> ["--EfN";string n]
+                | ForwardTailMass   m   -> ["--Eft";string m]
+
+        ///Other options:
+        //  --cpu           : number of parallel CPU workers for multithreads
+        //  --stall            : arrest after start: for attaching debugger to process
+        //  --informat      : assert input alifile is in format  (no autodetect)
+        //  --seed          : set RNG seed to  (if 0: one-time arbitrary seed)  [42]
+        //  --w_beta        : tail mass at which window length is determined
+        //  --w_length      : window length
+        //  --maxinsertlen  : pretend all inserts are length <= 
+
+        type MiscellaneousOptions =
+            ///number of parallel CPU workers for multithreads
+            | Threads               of int
+            ///arrest after start: for attaching debugger to process
+            | Stall
+            ///assert input alifile is in format  (no autodetect)
+            | AssertInFormat        of string
+            ///set RNG seed to  (if 0: one-time arbitrary seed)
+            | Seed                  of int
+            ///tail mass at which window length is determined
+            | TailMass              of float
+            ///window length
+            | WindowLength          of int
+            ///pretend all inserts are length <= 
+            | MaxInsertionLength    of int
+
+            static member make = function
+                | Threads t             -> ["--cpu"; string t]
+                | Stall                 -> ["--stall"]
+                | AssertInFormat f      -> ["--informat"; f]
+                | Seed s                -> ["--seed"; string s]
+                | TailMass m            -> ["--w_beta"; string m]
+                | WindowLength wl       -> ["--w_length"; string wl]
+                | MaxInsertionLength l  -> ["--maxinsertlen"; string l]
+
+        //Basic options:
+        //  -h     : show brief help on version and usage
+        //  -n  : name the HMM 
+        //  -o  : direct summary output to file , not stdout
+        //  -O  : resave annotated, possibly modified MSA to file 
+
+        type HMMbuildParams =
+            ///Multiple sequence alignment input file
+            | InputMSAFile                   of string
+            ///HMM file to save the generated HMMs to
+            | OutputHMMFile                  of string
+            ///name the HMM
+            | Name                          of string
+            ///direct summary output to file , not stdout
+            | SummaryToFile                 of string
+            ///resave annotated, possibly modified MSA to file
+            | ResaveFile                    of string
+            ///Specify the alphabet used in the alignment file
+            | Alphabet                      of AlphabetType list
+            ///Alternative model construction strategies:
+            | ModelConstruction             of ModelConstructionOptions list
+            ///Alternative relative sequence weighting strategies
+            | RelativeSequenceWeighting     of RelativeSequenceWeightingOptions list
+            ///Alternative effective sequence weighting strategies
+            | EffectiveSequenceWeighting    of EffectiveSequenceWeightingOptions list
+            ///Alternative prior strategies
+            | Prior                         of PriorOptions list
+            ///Handling single sequence inputs
+            | SingleSequenceHandling        of SingleSequenceHandlingOptions list
+            ///Control of E-value calibration
+            | EValueControl                 of EValueControlOptions list
+            ///Other options
+            | Miscellaneous                 of MiscellaneousOptions list
+
+            static member makeCmd =
+                function 
+                    | InputMSAFile path                     -> [path]
+                    | OutputHMMFile path                    -> [path]
+                    | Name n                                -> ["-n";n]
+                    | SummaryToFile path                    -> ["-o";path]
+                    | ResaveFile path                       -> ["-O";path]
+                    | Alphabet aList                        -> aList    |> List.map AlphabetType.make |> List.concat
+                    | ModelConstruction mcList              -> mcList   |> List.map ModelConstructionOptions.make            |> List.concat
+                    | RelativeSequenceWeighting rswList     -> rswList  |> List.map RelativeSequenceWeightingOptions.make    |> List.concat
+                    | EffectiveSequenceWeighting eswList    -> eswList  |> List.map EffectiveSequenceWeightingOptions.make   |> List.concat
+                    | Prior pList                           -> pList    |> List.map PriorOptions.make                        |> List.concat
+                    | SingleSequenceHandling sshList        -> sshList  |> List.map SingleSequenceHandlingOptions.make       |> List.concat
+                    | EValueControl evcList                 -> evcList  |> List.map EValueControlOptions.make                |> List.concat
+                    | Miscellaneous mList                   -> mList    |> List.map MiscellaneousOptions.make                |> List.concat
+
+            static member makeCmdWith (m:MountInfo) =
+                let cPath p = (MountInfo.containerPathOf m p)
+                function 
+                    | InputMSAFile path                     -> [cPath path]
+                    | OutputHMMFile path                    -> [cPath path]
+                    | Name n                                -> ["-n";n]
+                    | SummaryToFile path                    -> ["-o";cPath path]
+                    | ResaveFile path                       -> ["-O";cPath path]
+                    | Alphabet aList                        -> aList    |> List.map AlphabetType.make |> List.concat
+                    | ModelConstruction mcList              -> mcList   |> List.map ModelConstructionOptions.make                |> List.concat
+                    | RelativeSequenceWeighting rswList     -> rswList  |> List.map RelativeSequenceWeightingOptions.make        |> List.concat
+                    | EffectiveSequenceWeighting eswList    -> eswList  |> List.map EffectiveSequenceWeightingOptions.make       |> List.concat
+                    | Prior pList                           -> pList    |> List.map PriorOptions.make                            |> List.concat
+                    | SingleSequenceHandling sshList        -> sshList  |> List.map (SingleSequenceHandlingOptions.makeWith m)   |> List.concat
+                    | EValueControl evcList                 -> evcList  |> List.map EValueControlOptions.make                    |> List.concat
+                    | Miscellaneous mList                   -> mList    |> List.map MiscellaneousOptions.make                    |> List.concat
+
+        let runHMMbuildAsync (bcContext:BioContainer.BcContext) (opt:HMMbuildParams list) = 
+            //Usage: hmmbuild [-options]   -> filter for in/out and move them to the end
+            let msa     = 
+                opt 
+                |> List.filter (fun p -> match p with |InputMSAFile _ -> true |_ -> false)
+                |> fun x -> if List.isEmpty x then
+                                failwith "no input sequence given"
+                            else 
+                                HMMbuildParams.makeCmdWith bcContext.Mount x.[0]
+
+            let hmm     = 
+                opt 
+                |> List.filter (fun p -> match p with |OutputHMMFile _ -> true |_ -> false)
+                |> fun x -> if List.isEmpty x then
+                                failwith "no output sequence given"
+                            else 
+                                HMMbuildParams.makeCmdWith bcContext.Mount x.[0]
+
+            let options = opt |> List.filter (fun p -> match p with |InputMSAFile _ |OutputHMMFile _ -> false |_ -> true)
+            let cmds = (options |> List.map (HMMbuildParams.makeCmdWith bcContext.Mount))
+            let tp = ("hmmbuild"::(cmds |> List.concat))@hmm@msa
+
+            printfn "Starting process hmmbuildn\r\nparameters:"
+            printfn "%s" msa.[0]
+            printfn "%s" hmm.[0]
+            cmds |> List.iter (fun op -> printfn "\t%s" (String.concat " " op))
+
+            async {
+                    let! res = BioContainer.execAsync bcContext tp           
+                    return res
+ 
+            }
+        
+        ///For each multiple sequence alignment in a msafile build a profile HMM and save it to a
+        ///new hmmfile.
+        let runHMMbuild (bcContext:BioContainer.BcContext) (opt:HMMbuildParams list) = 
+            runHMMbuildAsync bcContext opt
+            |> Async.RunSynchronously
+
+    ///hmmalign - align sequences to a profile
+    module HMMalign =
+        //Usage: hmmalign [-options]  
+
+        //Basic options:
+        //  -h     : show brief help on version and usage
+        //  -o  : output alignment to file , not stdout
+
+        //Less common options:
+        //  --mapali     : include alignment in file  (same ali that HMM came from)
+        //  --trim          : trim terminal tails of nonaligned residues from alignment
+        //  --amino         : assert ,  both protein: no autodetection
+        //  --dna           : assert ,  both DNA: no autodetection
+        //  --rna           : assert ,  both RNA: no autodetection
+        //  --informat   : assert  is in format : no autodetection
+        //  --outformat  : output alignment in format   [Stockholm]
+
+        type HMMalignParams =
+            ///HMM input file
+            | InputHMMFile          of string
+            ///Input sequence file to align.
+            ///Sequence input formats include: FASTA, EMBL, GenBank, UniProt
+            | InputSequenceFile     of string
+            ///output alignment to file not stdout
+            ///Alignment output formats include: Stockholm, Pfam, A2M, PSIBLAST
+            | SummaryToFile         of string
+            ///assert alphabet to both input hmm and seqfile
+            | Alphabet              of AlphabetType list
+            ///assert  is in format : no autodetection
+            | SequenceFileFormat    of string
+            ///output alignment in format 
+            | OutFormat             of string
+            ///trim terminal tails of nonaligned residues from alignment
+            | Trim
+            ///include alignment in file  (same ali that HMM came from)
+            | MapAlignmentToFile    of string
+
+            static member makeCmd = function
+                | InputHMMFile path         -> [path]
+                | InputSequenceFile path    -> [path]
+                | SummaryToFile path        -> ["-o";path]
+                | Alphabet aList            -> aList |> List.map AlphabetType.make |> List.concat
+                | SequenceFileFormat f      -> ["--informat" ;f]
+                | OutFormat f               -> ["--outformat" ;f]
+                | Trim                      -> ["--trim"]
+                | MapAlignmentToFile path   -> ["--mapali"; path]
+
+            static member makeCmdWith (m:MountInfo) =
+                let cPath p = (MountInfo.containerPathOf m p)
+                function
+                | InputHMMFile path         -> [cPath path]
+                | InputSequenceFile path    -> [cPath path]
+                | SummaryToFile path        -> ["-o";cPath path]
+                | Alphabet aList            -> aList |> List.map AlphabetType.make |> List.concat
+                | SequenceFileFormat f      -> ["--informat" ;f]
+                | OutFormat f               -> ["--outformat" ;f]
+                | Trim                      -> ["--trim"]
+                | MapAlignmentToFile path   -> ["--mapali"; cPath path]
+
+
+
+        let runHMMalignAsync (bcContext:BioContainer.BcContext) (opt:HMMalignParams list) = 
+            //Usage: hmmalign [options] hmmfile seqfile
+            let hmm     = 
+                opt 
+                |> List.filter (fun p -> match p with |InputHMMFile _ -> true |_ -> false)
+                |> fun x -> if List.isEmpty x then
+                                failwith "no input hmm given"
+                            else 
+                                HMMalignParams.makeCmdWith bcContext.Mount x.[0]
+
+            let seqFile     = 
+                opt 
+                |> List.filter (fun p -> match p with |InputSequenceFile _ -> true |_ -> false)
+                |> fun x -> if List.isEmpty x then
+                                failwith "no input sequence file given"
+                            else 
+                                HMMalignParams.makeCmdWith bcContext.Mount x.[0]
+
+            let options = opt |> List.filter (fun p -> match p with |InputHMMFile _ |InputSequenceFile _ -> false |_ -> true)
+            let cmds = (options |> List.map (HMMalignParams.makeCmdWith bcContext.Mount))
+            let tp = ("hmmalign"::(cmds |> List.concat))@hmm@seqFile
+
+            printfn "Starting process hmmalign\r\nparameters:"
+            printfn "%s" hmm.[0]
+            printfn "%s" seqFile.[0]
+            cmds |> List.iter (fun op -> printfn "\t%s" (String.concat " " op))
+
+            async {
+                    let! res = BioContainer.execAsync bcContext tp           
+                    return res
+ 
+            }
+        ///Perform a multiple sequence alignment of all the sequences in seqfile by aligning
+        ///them individually to the profile HMM in hmmfile. The new alignment is output to
+        ///stdout (when not specified otherwise).
+        let runHMMalign (bcContext:BioContainer.BcContext) (opt:HMMalignParams list) =
+            runHMMalignAsync bcContext opt
+            |> Async.RunSynchronously
+
+    ///hmmsearch - search profile(s) against a sequence database
+    module HMMsearch =
+        
+        ///Options directing output:
+        //  -o            : direct output to file , not stdout
+        //  -A            : save multiple alignment of all hits to file 
+        //  --tblout      : save parseable table of per-sequence hits to file 
+        //  --domtblout   : save parseable table of per-domain hits to file 
+        //  --pfamtblout  : save table of hits and domains to file, in Pfam format 
+        //  --acc            : prefer accessions over names in output
+        //  --noali          : don't output alignments, so output is smaller
+        //  --notextw        : unlimit ASCII text output line width
+        //  --textw       : set max width of ASCII text output lines  [120]  (n>=120)
+
+        type OutputDirectionsOptions =
+            ///direct output to file , not stdout
+            | OutputToFile              of string
+            ///save multiple alignment of all hits to file 
+            | AllHitsToFile             of string
+            ///save parseable table of per-sequence hits to file 
+            | HitsToPerSequenceTable    of string
+            ///save parseable table of per-domain hits to file 
+            | HitsToPerDomainTable      of string
+            ///save table of hits and domains to file, in Pfam format 
+            | HitsToPfam                of string
+            ///prefer accessions over names in output
+            | PreferAccessionsOverNames
+            ///don't output alignments, so output is smaller
+            | NoAlignments
+            ///unlimit ASCII text output line width
+            | UnlimitedTextLineWidth
+            ///set max width of ASCII text output lines
+            | MaxTextLineWidth          of int
+
+            static member make = 
+                function
+                | OutputToFile path             -> ["-o"; path]
+                | AllHitsToFile path            -> ["-A"; path]
+                | HitsToPerSequenceTable path   -> ["--tblout"; path]
+                | HitsToPerDomainTable path     -> ["--domtblout"; path]
+                | HitsToPfam path               -> ["--pfamtblout"; path]
+                | PreferAccessionsOverNames     -> ["--acc"]
+                | NoAlignments                  -> ["--noali"]
+                | UnlimitedTextLineWidth        -> ["--notextw "]
+                | MaxTextLineWidth lw           -> ["--textw"]
+
+            static member makeWith (m:MountInfo) =
+                let cPath p = (MountInfo.containerPathOf m p)
+                function
+                | OutputToFile path             -> ["-o"; cPath path]
+                | AllHitsToFile path            -> ["-A"; cPath path]
+                | HitsToPerSequenceTable path   -> ["--tblout"; cPath path]
+                | HitsToPerDomainTable path     -> ["--domtblout"; cPath path]
+                | HitsToPfam path               -> ["--pfamtblout"; cPath path]
+                | PreferAccessionsOverNames     -> ["--acc"]
+                | NoAlignments                  -> ["--noali"]
+                | UnlimitedTextLineWidth        -> ["--notextw "]
+                | MaxTextLineWidth lw           -> ["--textw"]
+
+        
+
+        ///Other expert options:
+        //  --nonull2     : turn off biased composition score corrections
+        //  -Z         : set # of comparisons done, for E-value calculation
+        //  --domZ     : set # of significant seqs, for domain E-value calculation
+        //  --seed     : set RNG seed to  (if 0: one-time arbitrary seed)  [42]
+        //  --tformat  : assert target  is in format : no autodetection
+        //  --cpu      : number of parallel CPU workers to use for multithreads
+
+        type MiscellaneousOptions =
+            ///turn off biased composition score corrections
+            | TurnOffBiasedScoreCorrections
+            ///set # of comparisons done, for E-value calculation
+            | EValueComparisons     of int
+            ///set # of significant seqs, for domain E-value calculation
+            | NumberOfSigSeqs       of int
+            ///set RNG seed to  (if 0: one-time arbitrary seed)
+            | RNGSeed               of int
+            ///assert target  is in format : no autodetection
+            | SequenceFileFormat    of string
+            ///number of parallel CPU workers to use for multithreads
+            | Threads               of int   
+            
+            static member make =
+                function
+                | TurnOffBiasedScoreCorrections -> ["--nonull2"]
+                | EValueComparisons n           -> ["-Z"; string n]
+                | NumberOfSigSeqs n             -> ["--domZ"; string n]
+                | RNGSeed s                     -> ["--seed"; string s]
+                | SequenceFileFormat f          -> ["--tformat"; f]
+                | Threads t                     -> ["--cpu"; string t]
+
+        type HMMsearchParams =
+            /// Input HMM file containing query profiles to search for
+            | InputHMMFile              of string
+            /// Input sequence database to search query profiles in
+            | SequenceDB                of string
+            ///Options directing output
+            | OutputDirections          of OutputDirectionsOptions list
+            ///Options controlling reporting thresholds
+            | ReportingThreshold        of ReportingThresholdOptions list
+            ///Options controlling inclusion (significance) thresholds
+            | InclusionThreshold        of InclusionThresholdOptions list
+            ///Options controlling model-specific thresholding
+            | ModelSpecificThreshold    of ModelSpecificThresholdOptions list
+            ///Options controlling acceleration heuristics
+            | AccelerationHeuristics    of AccelerationHeuristicsOptions list
+            ///Other expert options
+            | Miscellaneous             of MiscellaneousOptions list
+
+            static member makeCMD = 
+                function
+                | InputHMMFile path                 -> [path]
+                | SequenceDB path                   -> [path]
+                | OutputDirections odList           -> odList |> List.map OutputDirectionsOptions.make          |> List.concat
+                | ReportingThreshold rtList         -> rtList |> List.map ReportingThresholdOptions.make        |> List.concat
+                | InclusionThreshold itList         -> itList |> List.map InclusionThresholdOptions.make        |> List.concat
+                | ModelSpecificThreshold mstList    -> mstList|> List.map ModelSpecificThresholdOptions.make    |> List.concat
+                | AccelerationHeuristics ahList     -> ahList |> List.map AccelerationHeuristicsOptions.make    |> List.concat
+                | Miscellaneous mList               -> mList  |> List.map MiscellaneousOptions.make             |> List.concat
+
+            static member makeCmdWith (m:MountInfo) =
+                let cPath p = (MountInfo.containerPathOf m p)
+                function
+                | InputHMMFile path                 -> [cPath path]
+                | SequenceDB path                   -> [cPath path]
+                | OutputDirections odList           -> odList |> List.map (OutputDirectionsOptions.makeWith m)  |> List.concat
+                | ReportingThreshold rtList         -> rtList |> List.map ReportingThresholdOptions.make        |> List.concat
+                | InclusionThreshold itList         -> itList |> List.map InclusionThresholdOptions.make        |> List.concat
+                | ModelSpecificThreshold mstList    -> mstList|> List.map ModelSpecificThresholdOptions.make    |> List.concat
+                | AccelerationHeuristics ahList     -> ahList |> List.map AccelerationHeuristicsOptions.make    |> List.concat
+                | Miscellaneous mList               -> mList  |> List.map MiscellaneousOptions.make             |> List.concat
+            //Usage: hmmsearch [options]  
+
+        let runHMMsearchAsync (bcContext:BioContainer.BcContext) (opt:HMMsearchParams list) = 
+            //hmmsearch [options] hmmfile seqdb
+            let hmm     = 
+                opt 
+                |> List.filter (fun p -> match p with |InputHMMFile _ -> true |_ -> false)
+                |> fun x -> if List.isEmpty x then
+                                failwith "no input hmm given"
+                            else 
+                                HMMsearchParams.makeCmdWith bcContext.Mount x.[0]
+
+            let seqDB     = 
+                opt 
+                |> List.filter (fun p -> match p with |SequenceDB _ -> true |_ -> false)
+                |> fun x -> if List.isEmpty x then
+                                failwith "no input sequence file given"
+                            else 
+                                HMMsearchParams.makeCmdWith bcContext.Mount x.[0]
+
+            let options = opt |> List.filter (fun p -> match p with |InputHMMFile _ |SequenceDB _ -> false |_ -> true)
+            let cmds = (options |> List.map (HMMsearchParams.makeCmdWith bcContext.Mount))
+            let tp = ("hmmsearch"::(cmds |> List.concat))@hmm@seqDB
+
+            printfn "Starting process hmmsearch\r\nparameters:"
+            printfn "%s" hmm.[0]
+            printfn "%s" seqDB.[0]
+            cmds |> List.iter (fun op -> printfn "\t%s" (String.concat " " op))
+
+            async {
+                    let! res = BioContainer.execAsync bcContext tp           
+                    return res
+ 
+            }
+
+        ///hmmsearch is used to search one or more profiles against a sequence database. For each
+        ///profile in hmmfile, use that query profile to search the target database of sequences in
+        ///seqdb, and output ranked lists of the sequences with the most significant matches to
+        ///the profile
+        let runHMMalign (bcContext:BioContainer.BcContext) (opt:HMMsearchParams list) =
+            runHMMsearchAsync bcContext opt
+            |> Async.RunSynchronously
+
+    ///hmmscan - search sequence(s) against a profile database
+    module HMMscan =
+        
+    //Usage: hmmscan [-options]  
+
+        ///Options controlling output:
+        //  -o            : direct output to file , not stdout
+        //  --tblout      : save parseable table of per-sequence hits to file 
+        //  --domtblout   : save parseable table of per-domain hits to file 
+        //  --pfamtblout  : save table of hits and domains to file, in Pfam format 
+        //  --acc            : prefer accessions over names in output
+        //  --noali          : don't output alignments, so output is smaller
+        //  --notextw        : unlimit ASCII text output line width
+        //  --textw       : set max width of ASCII text output lines  [120]  (n>=120)
+
+        //TO-DO: maybe unify overlap with output direcction options of other hmmer functions
+        type OutputDirectionsOptions =
+            ///direct output to file , not stdout
+            | OutputToFile              of string
+            ///save parseable table of per-sequence hits to file 
+            | HitsToPerSequenceTable    of string
+            ///save parseable table of per-domain hits to file 
+            | HitsToPerDomainTable      of string
+            ///save table of hits and domains to file, in Pfam format 
+            | HitsToPfam                of string
+            ///prefer accessions over names in output
+            | PreferAccessionsOverNames
+            ///don't output alignments, so output is smaller
+            | NoAlignments
+            ///unlimit ASCII text output line width
+            | UnlimitedTextLineWidth
+            ///set max width of ASCII text output lines
+            | MaxTextLineWidth          of int
+
+            static member make = 
+                function
+                | OutputToFile path             -> ["-o"; path]
+                | HitsToPerSequenceTable path   -> ["--tblout"; path]
+                | HitsToPerDomainTable path     -> ["--domtblout"; path]
+                | HitsToPfam path               -> ["--pfamtblout"; path]
+                | PreferAccessionsOverNames     -> ["--acc"]
+                | NoAlignments                  -> ["--noali"]
+                | UnlimitedTextLineWidth        -> ["--notextw "]
+                | MaxTextLineWidth lw           -> ["--textw"]
+
+            static member makeWith (m:MountInfo) =
+                let cPath p = (MountInfo.containerPathOf m p)
+                function
+                | OutputToFile path             -> ["-o"; cPath path]
+                | HitsToPerSequenceTable path   -> ["--tblout"; cPath path]
+                | HitsToPerDomainTable path     -> ["--domtblout"; cPath path]
+                | HitsToPfam path               -> ["--pfamtblout"; cPath path]
+                | PreferAccessionsOverNames     -> ["--acc"]
+                | NoAlignments                  -> ["--noali"]
+                | UnlimitedTextLineWidth        -> ["--notextw "]
+                | MaxTextLineWidth lw           -> ["--textw"]
+
+        ///Other expert options:
+        //    --nonull2     : turn off biased composition score corrections
+        //    -Z         : set # of comparisons done, for E-value calculation
+        //    --domZ     : set # of significant seqs, for domain E-value calculation
+        //    --seed     : set RNG seed to  (if 0: one-time arbitrary seed)  [42]
+        //    --qformat  : assert input  is in format : no autodetection
+        //    --daemon      : run program as a daemon
+        //    --cpu      : number of parallel CPU workers to use for multithreads
+    
+        type MiscellaneousOptions =
+            ///turn off biased composition score corrections
+            | TurnOffBiasedScoreCorrections
+            ///set # of comparisons done, for E-value calculation
+            | EValueComparisons     of int
+            ///set # of significant seqs, for domain E-value calculation
+            | NumberOfSigSeqs       of int
+            ///set RNG seed to  (if 0: one-time arbitrary seed)
+            | RNGSeed               of int
+            ///assert target  is in format : no autodetection
+            | SequenceFileFormat    of string
+            ///number of parallel CPU workers to use for multithreads
+            | Threads               of int   
+            ///run program as a daemon
+            | RunAsDaemon
+            
+            static member make =
+                function
+                | TurnOffBiasedScoreCorrections -> ["--nonull2"]
+                | EValueComparisons n           -> ["-Z"; string n]
+                | NumberOfSigSeqs n             -> ["--domZ"; string n]
+                | RNGSeed s                     -> ["--seed"; string s]
+                | SequenceFileFormat f          -> ["--qformat"; f]
+                | Threads t                     -> ["--cpu"; string t]
+                | RunAsDaemon                   -> ["--daemon"]
+
+
+        type HMMscanParams =
+            | InputHMMDB                 of string
+            | InputSequenceFile          of string
+            | OutputDirections           of OutputDirectionsOptions list
+            ///Options controlling reporting thresholds
+            | ReportingThreshold        of ReportingThresholdOptions list
+            ///Options controlling inclusion (significance) thresholds
+            | InclusionThreshold        of InclusionThresholdOptions list
+            ///Options controlling model-specific thresholding
+            | ModelSpecificThreshold    of ModelSpecificThresholdOptions list
+            ///Options controlling acceleration heuristics
+            | AccelerationHeuristics    of AccelerationHeuristicsOptions list
+            ///Other expert options
+            | Miscellaneous             of MiscellaneousOptions list
+
+            static member makeCmd = function
+                | InputHMMDB path               -> [path]
+                | InputSequenceFile path        -> [path]
+                | OutputDirections pList        -> pList |> List.map OutputDirectionsOptions.make         |> List.concat
+                | ReportingThreshold pList      -> pList |> List.map ReportingThresholdOptions.make       |> List.concat
+                | InclusionThreshold pList      -> pList |> List.map InclusionThresholdOptions.make       |> List.concat
+                | ModelSpecificThreshold pList  -> pList |> List.map ModelSpecificThresholdOptions.make   |> List.concat
+                | AccelerationHeuristics pList  -> pList |> List.map AccelerationHeuristicsOptions.make   |> List.concat
+                | Miscellaneous   pList         -> pList |> List.map MiscellaneousOptions.make            |> List.concat
+
+            static member makeCmdWith (m:MountInfo) =
+                let cPath p = (MountInfo.containerPathOf m p)
+                function
+                | InputHMMDB path               -> [cPath path]
+                | InputSequenceFile path        -> [cPath path]
+                | OutputDirections pList        -> pList |> List.map (OutputDirectionsOptions.makeWith m) |> List.concat
+                | ReportingThreshold pList      -> pList |> List.map ReportingThresholdOptions.make       |> List.concat
+                | InclusionThreshold pList      -> pList |> List.map InclusionThresholdOptions.make       |> List.concat
+                | ModelSpecificThreshold pList  -> pList |> List.map ModelSpecificThresholdOptions.make   |> List.concat
+                | AccelerationHeuristics pList  -> pList |> List.map AccelerationHeuristicsOptions.make   |> List.concat
+                | Miscellaneous   pList         -> pList |> List.map MiscellaneousOptions.make            |> List.concat
+
+        let runHMMscanAsync (bcContext:BioContainer.BcContext) (opt:HMMscanParams list) = 
+            //Usage: hmmscan [-options]  
+            let hmmDB     = 
+                opt 
+                |> List.filter (fun p -> match p with |InputHMMDB _ -> true |_ -> false)
+                |> fun x -> if List.isEmpty x then
+                                failwith "no input hmm db given"
+                            else 
+                                HMMscanParams.makeCmdWith bcContext.Mount x.[0]
+
+            let inSeq     = 
+                opt 
+                |> List.filter (fun p -> match p with |InputSequenceFile _ -> true |_ -> false)
+                |> fun x -> if List.isEmpty x then
+                                failwith "no input sequence file given"
+                            else 
+                                HMMscanParams.makeCmdWith bcContext.Mount x.[0]
+
+            let options = opt |> List.filter (fun p -> match p with |InputHMMDB _ |InputSequenceFile _ -> false |_ -> true)
+            let cmds = (options |> List.map (HMMscanParams.makeCmdWith bcContext.Mount))
+            let tp = ("hmmscan"::(cmds |> List.concat))@hmmDB@inSeq
+
+            printfn "Starting process hmmscan\r\nparameters:"
+            printfn "%s" hmmDB.[0]
+            printfn "%s" inSeq.[0]
+            cmds |> List.iter (fun op -> printfn "\t%s" (String.concat " " op))
+
+            async {
+                    let! res = BioContainer.execAsync bcContext tp           
+                    return res
+ 
+            }
+
+        ///hmmscan is used to search protein sequences against collections of protein profiles.
+        ///For each sequence in seqfile, use that query sequence to search the target database
+        ///of profiles in hmmdb, and output ranked lists of the profiles with the most significant
+        ///matches to the sequence
+        let runHMMscan (bcContext:BioContainer.BcContext) (opt:HMMscanParams list) =
+            runHMMscanAsync bcContext opt
+            |> Async.RunSynchronously
+
+
+    ///hmmpress - prepare a profile database for hmmscan
+    module HMMpress =
+        
+        type HMMpressParams = 
+            ///HMM profile to construct the binary compressed datafiles from
+            | HMMInputFile of string
+            ///overwrites any previous hmmpress’ed datafiles
+            | ForceOverwirite 
+
+            static member makeCmd =
+                function
+                | HMMInputFile path     -> [path]
+                | ForceOverwirite       -> ["-f"]
+
+            static member makeCmdWith (m:MountInfo) =
+                let cPath p = (MountInfo.containerPathOf m p)
+                function
+                | HMMInputFile path     -> [cPath path]
+                | ForceOverwirite       -> ["-f"]
+
+        let runHMMpressAsync (bcContext:BioContainer.BcContext) (opt:HMMpressParams list) = 
+            //Usage: hmmpress [options] hmmfile
+            let hmm     = 
+                opt 
+                |> List.filter (fun p -> match p with |HMMInputFile _ -> true |_ -> false)
+                |> fun x -> if List.isEmpty x then
+                                failwith "no input hmm db given"
+                            else 
+                                HMMpressParams.makeCmdWith bcContext.Mount x.[0]
+
+            let options = opt |> List.filter (fun p -> match p with |HMMInputFile _ -> false |_ -> true)
+            let cmds = (options |> List.map (HMMpressParams.makeCmdWith bcContext.Mount))
+            let tp = ("hmmpress"::(cmds |> List.concat))@hmm
+
+            printfn "Starting process hmmpress\r\nparameters:"
+            printfn "%s" hmm.[0]
+            cmds |> List.iter (fun op -> printfn "\t%s" (String.concat " " op))
+
+            async {
+                    let! res = BioContainer.execAsync bcContext tp           
+                    return res
+ 
+            }
+
+        ///Constructs binary compressed datafiles for hmmscan, starting from a profile database
+        ///hmmfile in standard HMMER3 format. The hmmpress step is required for hmmscan to work.
+        let runHMMpress (bcContext:BioContainer.BcContext) (opt:HMMpressParams list) =
+            runHMMpressAsync bcContext opt
+            |> Async.RunSynchronously
+
+
+    module HMMconvert =
+
+    //Usage: hmmconvert [-options] 
+
+    //Options:
+    //  -h           : show brief help on version and usage
+    //  -a           : ascii:  output models in HMMER3 ASCII format  [default]
+    //  -b           : binary: output models in HMMER3 binary format
+    //  -2           : HMMER2: output backward compatible HMMER2 ASCII format (ls mode)
+    //  --outfmt  : choose output legacy 3.x file formats by name, such as '3/a'
+
+        type OutputFormatOptions = 
+            | ASCII
+            | Binary
+            | HMMER2
+
+            static member make = function
+                | ASCII     -> ["-a"]
+                | Binary    -> ["-b"]
+                | HMMER2    -> ["-2"]
+
+        type HMMconvertParams =
+            | HMMInputFile of string
+            | OutputFormat of OutputFormatOptions list
+            | LegacyFormat of string
+
+            static member make = function
+                | HMMInputFile path     -> [path]
+                | OutputFormat pList    -> pList |> List.map OutputFormatOptions.make |> List.concat
+                | LegacyFormat f        -> ["--outfmt"; f]
+
+            static member makeCmdWith (m:MountInfo) =
+                let cPath p = (MountInfo.containerPathOf m p)
+                function
+                | HMMInputFile path     -> [cPath path]
+                | OutputFormat pList    -> pList |> List.map OutputFormatOptions.make |> List.concat
+                | LegacyFormat f        -> ["--outfmt"; f]
+
+        let runHMMconvertAsync (bcContext:BioContainer.BcContext) (opt:HMMconvertParams list) = 
+        //Usage: hmmconvert [options] hmmfile
+            let hmm     = 
+                opt 
+                |> List.filter (fun p -> match p with |HMMInputFile _ -> true |_ -> false)
+                |> fun x -> if List.isEmpty x then
+                                failwith "no input hmm given"
+                            else 
+                                HMMconvertParams.makeCmdWith bcContext.Mount x.[0]
+
+            let options = opt |> List.filter (fun p -> match p with |HMMInputFile _ -> false |_ -> true)
+            let cmds = (options |> List.map (HMMconvertParams.makeCmdWith bcContext.Mount))
+            let tp = ("hmmconvert"::(cmds |> List.concat))@hmm
+
+            printfn "Starting process hmmconvert\r\nparameters:"
+            printfn "%s" hmm.[0]
+            cmds |> List.iter (fun op -> printfn "\t%s" (String.concat " " op))
+
+            async {
+                    let! res = BioContainer.execAsync bcContext tp           
+                    return res
+ 
+            }
+
+        ///The hmmconvert utility converts an input profile file to different HMMER formats
+        let runHMMconvert (bcContext:BioContainer.BcContext) (opt:HMMconvertParams list) =
+            runHMMconvertAsync bcContext opt
+            |> Async.RunSynchronously
+
+    ///hmmemit - sample sequences from a profile
+    module HMMemit =
+
+
+        ///Options controlling what to emit:
+        //  -a : emit alignment
+        //  -c : emit simple majority-rule consensus sequence
+        //  -C : emit fancier consensus sequence (req's --minl, --minu)
+        //  -p : sample sequences from profile, not core model
+        
+        type EmitControllingOptions =
+            ///emit alignment
+            | EmitAlignment
+            ///emit simple majority-rule consensus sequence
+            | EmitConsensus
+            ///emit fancier consensus sequence (req's --minl, --minu)
+            | EmitConsensusFancy
+            ///sample sequences from profile, not core model
+            | SampleFromProfile
+
+            static member make = function
+                | EmitAlignment         -> ["-a"]
+                | EmitConsensus         -> ["-c"]
+                | EmitConsensusFancy    -> ["-C"]
+                | SampleFromProfile     -> ["-p"]
+
+        ///Options controlling emission from profiles with SampleFromProfile:
+        //  -L       : set expected length from profile to   [400]
+        //  --local     : configure profile in multihit local mode  [default]
+        //  --unilocal  : configure profile in unilocal mode
+        //  --glocal    : configure profile in multihit glocal mode
+        //  --uniglocal : configure profile in unihit glocal mode
+
+        type ProfileSamplingOptions =
+            ///set expected length from profile
+            | ExpectedLength of int
+            ///configure profile in multihit local mode
+            | MultihitLocal
+            ///configure profile in unilocal mode
+            | Unilocal
+            ///configure profile in multihit glocal mode
+            | MultihitGlocal
+            ///configure profile in unihit glocal mode
+            | UnihitGlocal
+
+            static member make = function
+                | ExpectedLength l  -> ["-L"; string l]
+                | MultihitLocal     -> ["--local"]
+                | Unilocal          -> ["--unilocal"]
+                | MultihitGlocal    -> ["--glocal"]
+                | UnihitGlocal      -> ["--uniglocal"]
+
+
+        ///Options controlling fancy consensus emission with -C:
+        //  --minl  : show consensus as 'any' (X/N) unless >= this fraction  [0.0]
+        //  --minu  : show consensus as upper case if >= this fraction  [0.0]
+
+        type FancyConsensusOptions =
+            ///show consensus as 'any' (X/N) unless >= this fraction
+            |ShowAsAny      of float
+            ///show consensus as upper case if >= this fraction
+            |ShowAsUpper    of float
+
+            static member make = function
+                | ShowAsAny f   -> ["--minl"; string f]
+                | ShowAsUpper f -> ["--minu"; string f]
+
+
+        //Common options are:
+        //  -h     : show brief help on version and usage
+        //  -o  : send sequence output to file , not stdout
+        //  -N  : number of seqs to sample  [1]  (n>0)
+        //Other options::
+        //  --seed  : set RNG seed to   [0]  (n>=0)
+        type HMMemitParams =
+            ///Options controlling fancy consensus emission with
+            | HMMInputFile      of string
+            ///direct output to file , not stdout
+            | OutputToFile      of string
+            ///number of seqs to sample
+            | SampleAmount      of int
+            ///set RNG seed
+            | RNGSeed           of int
+            ///Options controlling what to emit
+            | EmitControlling   of EmitControllingOptions list
+            ///Options controlling emission from profiles with SampleFromProfile. Only use when using SampleFromProfile as EmitControllingOption
+            | ProfileSampling   of ProfileSamplingOptions list
+            ///Options controlling fancy consensus emission with EmitConsensusFancy. Only use when using EmitConsensusFancy as EmitControllingOption
+            | FancyConsensus    of FancyConsensusOptions list
+
+            static member makeCmd = function
+                | HMMInputFile    path  -> [path]
+                | OutputToFile    path  -> [path]
+                | SampleAmount    s     -> ["-N"; string s]
+                | RNGSeed         s     -> ["--seed"; string s]
+                | EmitControlling pList -> pList |> List.map EmitControllingOptions.make    |> List.concat
+                | ProfileSampling pList -> pList |> List.map ProfileSamplingOptions.make    |> List.concat
+                | FancyConsensus  pList -> pList |> List.map FancyConsensusOptions.make     |> List.concat
+
+            static member makeCmdWith (m:MountInfo) =
+                let cPath p = (MountInfo.containerPathOf m p)
+                function
+                | HMMInputFile    path  -> [cPath path]
+                | OutputToFile    path  -> [cPath path]
+                | SampleAmount    s     -> ["-N"; string s]
+                | RNGSeed         s     -> ["--seed"; string s]
+                | EmitControlling pList -> pList |> List.map EmitControllingOptions.make    |> List.concat
+                | ProfileSampling pList -> pList |> List.map ProfileSamplingOptions.make    |> List.concat
+                | FancyConsensus  pList -> pList |> List.map FancyConsensusOptions.make     |> List.concat
+
+        let runHMMemitAsync (bcContext:BioContainer.BcContext) (opt:HMMemitParams list) = 
+        //Usage: hmmemit [-options] 
+            let hmm     = 
+                opt 
+                |> List.filter (fun p -> match p with |HMMInputFile _ -> true |_ -> false)
+                |> fun x -> if List.isEmpty x then
+                                failwith "no input hmm given"
+                            else 
+                                HMMemitParams.makeCmdWith bcContext.Mount x.[0]
+
+            let options = opt |> List.filter (fun p -> match p with |HMMInputFile _ -> false |_ -> true)
+            let cmds = (options |> List.map (HMMemitParams.makeCmdWith bcContext.Mount))
+            let tp = ("hmmemit"::(cmds |> List.concat))@hmm
+
+            printfn "Starting process hmmemit\r\nparameters:"
+            printfn "%s" hmm.[0]
+            cmds |> List.iter (fun op -> printfn "\t%s" (String.concat " " op))
+
+            async {
+                    let! res = BioContainer.execAsync bcContext tp           
+                    return res
+ 
+            }
+
+        ///The hmmemit program samples (emits) sequences from the profile HMM(s) in hmmfile,
+        ///and writes them to output. Sampling sequences may be useful for a variety of purposes, 
+        ///including creating synthetic true positives for benchmarks or tests.
+        let runHMMemit (bcContext:BioContainer.BcContext) (opt:HMMemitParams list) =
+            runHMMemitAsync bcContext opt
+            |> Async.RunSynchronously
diff --git a/src/BioFSharp.BioTools/Hera.fs b/src/BioFSharp.BioTools/Hera.fs
new file mode 100644
index 00000000..1c068ba0
--- /dev/null
+++ b/src/BioFSharp.BioTools/Hera.fs
@@ -0,0 +1,100 @@
+namespace BioFSharp.BioTools
+
+module Hera =
+    open FSharpAux
+    open BioContainer
+
+    let ImageHera = Docker.DockerId.ImageId "hera"
+
+    type HeraIndexOutputCustom =
+        /// Genome reference file in FASTA format.
+        | Fasta of string
+        /// Gene annotation file in GTF format.
+        | GTF  of string
+        /// Prefix name for output index files generated.
+        | Prefix of string
+        /// Output directory for index files generated. Default is "./".
+        | Outdir of string 
+
+        static member makeCmdWith (m:MountInfo) = function   
+            | Fasta path  -> [ "-g"; (MountInfo.containerPathOf m path)]
+            | GTF path    -> [ "-t"; (MountInfo.containerPathOf m path)]
+            | Prefix path -> [ "-p"; path]
+            | Outdir path -> [ "-o"; (MountInfo.containerPathOf m path)]
+
+        static member make = function  
+            | Fasta path  -> sprintf "-g %s" path
+            | GTF path    -> sprintf "-t %s" path
+            | Prefix path -> sprintf "-p %s" path
+            | Outdir path -> sprintf "-o %s" path
+
+
+    type HeraQuantOutputCustom =
+        /// Prefix name of index files, including directory.
+        | IndexDirectory of string
+        /// File(s) containing first reads in pair.
+        | FirstSource of string
+        /// File(s) containing second reads in pair.
+        | SecondSource of string
+        /// Prefix name for result files. Default is "_out"
+        | OutputPrefix of string
+        /// Output directory for result files. Default is "./".
+        | OutputDirectory of string
+        /// Number of threads running in parallel. Default is 1.
+        | ThreadNumber of int
+        /// Output read alignments in BA; file format.
+        | BamFileOutput
+        /// Compress level of bam file. Default is 1. 
+        | BamCompressionLevel of int
+
+
+        static member makeCmdWith (m:MountInfo) = function
+            | IndexDirectory d          -> [ "-x"; d]
+            | FirstSource path          -> [ "-1"; path]
+            | SecondSource value        -> [ "-2" ; (MountInfo.containerPathOf m value)]
+            | OutputPrefix path         -> [ "-p"; path]
+            | OutputDirectory path      -> [ "-o"; (MountInfo.containerPathOf m path)]
+            | ThreadNumber value        -> [ "-t"; string value]
+            | BamFileOutput             -> [ "-w"]
+            | BamCompressionLevel value -> [ "-z"; string value]
+
+        static member make = function
+            | IndexDirectory d          -> sprintf "-x %s" d    
+            | FirstSource path          -> sprintf "-1 %s" path 
+            | SecondSource value        -> sprintf "-2 %s" value
+            | OutputPrefix path         -> sprintf "-p %s" path 
+            | OutputDirectory path      -> sprintf "-o %s" path 
+            | ThreadNumber value        -> sprintf "-t %s" (string value)
+            | BamFileOutput             -> sprintf "-w" 
+            | BamCompressionLevel value -> sprintf "-z %s" (string value)  
+
+
+    let runHeraIndexAsync (bcContext:BioContainer.BcContext) (opt:HeraIndexOutputCustom list)  =
+        let cmds = (opt |> List.map (HeraIndexOutputCustom.makeCmdWith bcContext.Mount))
+        let tp = "Nora"::"index"::(cmds |> List.concat)
+        async {
+            let! heraResult = BioContainer.execReturnAsync bcContext tp
+            return heraResult 
+            }
+        
+
+    let runHeraIndex (bcContext:BioContainer.BcContext) (opt:HeraIndexOutputCustom list) = 
+        runHeraIndexAsync bcContext opt 
+        |> Async.RunSynchronously
+
+
+
+    let runHeraQuantAsync (bcContext:BioContainer.BcContext) (opt:HeraQuantOutputCustom list)  =
+        let cmds = (opt |> List.map (HeraQuantOutputCustom.makeCmdWith bcContext.Mount))
+        let tp = "Nora"::"quant"::(cmds |> List.concat)
+        async {
+            let! noraResult = BioContainer.execAsync bcContext tp
+            return noraResult 
+            }
+
+
+    let runHeraQuant (bcContext:BioContainer.BcContext) (opt:HeraQuantOutputCustom list) = 
+        runHeraQuantAsync bcContext opt 
+        |> Async.RunSynchronously
+
+
diff --git a/src/BioFSharp.BioTools/IntaRNA.fs b/src/BioFSharp.BioTools/IntaRNA.fs
new file mode 100644
index 00000000..1dfd7fa3
--- /dev/null
+++ b/src/BioFSharp.BioTools/IntaRNA.fs
@@ -0,0 +1,306 @@
+namespace BioFSharp.BioTools
+
+module IntaRNA =
+    
+    //TO-DO: add full argument support. Currently only basic arguments are wrapped.
+
+    open BioContainer
+    
+    
+        ///  -q [ --query ] arg           
+        ///
+        ///either an RNA sequence or the stream/file name from where to read the query sequences (shouldbe the shorter sequences to increaseefficiency); 
+        ///sequences have to use IUPACnucleotide encoding
+        type QueryInputOptions =
+            ///RNA sequence string
+            |RNASequence of string
+            ///stream/file name from where to read the query sequences
+            |File of string
+
+            static member make = function
+                |RNASequence s  -> ["-q"; s]
+                |File f         -> ["-q"; f]
+
+            static member makeWith (m:MountInfo) = 
+                let cPath p = (MountInfo.containerPathOf m p)
+                function
+                |RNASequence s  -> ["-q"; s]        
+                |File f         -> ["-q"; cPath f]  
+
+        ///  --qAcc arg (=C)              
+        ///
+        /// accessibility computation :
+        ///    'N' no accessibility contributions
+        ///    'C' computation of accessibilities
+        ///    'P' unpaired probabilities in RNAplfold format
+        ///   from --qAccFile
+        ///    'E' ED values in RNAplfold Pu-like format from
+        ///   --qAccFile
+        type QueryAcessibilityComputationTypeOptions =
+            ///'N' no accessibility contributions
+            |NoContributions
+            ///'C' computation of accessibilities
+            |Compute
+            ///'P' unpaired probabilities in RNAplfold format
+            |UnpairedFromFile
+            ///'E' ED values in RNAplfold Pu-like format from
+            |EDValuesFromFile
+
+            static member make = function
+                |NoContributions    -> ["--qAcc=N"]
+                |Compute            -> ["--qAcc=C"]
+                |UnpairedFromFile   -> ["--qAcc=P"]
+                |EDValuesFromFile   -> ["--qAcc=E"]
+
+
+        type QueryAcessibilityComputationOptions =
+
+            |QueryAcessibilityComputationType of QueryAcessibilityComputationTypeOptions
+
+            ///  --qAccW arg (=150)           accessibility computation : sliding window size
+            ///                               for query accessibility computation (arg in
+            ///                               range [0,99999]; 0 will use to the full sequence
+            ///                               length). Note, this also restricts the maximal
+            ///                               interaction length (see --qIntLenMax).
+
+            |SlidingWindowSize of int
+            ///  --qAccL arg (=100)           accessibility computation : maximal loop length
+            ///                               (base pair span) for query accessibility
+            ///                               computation (arg in range [0,99999]; 0 will use
+            ///                               to sliding window size 'qAccW')
+
+            |MaximalLoopLength of int
+
+            static member make = function
+                |QueryAcessibilityComputationType t ->  QueryAcessibilityComputationTypeOptions.make t
+                |SlidingWindowSize i                ->  [sprintf "--qAccW=%i" i]
+                |MaximalLoopLength i                ->  [sprintf "--qAccL=%i" i]
+    
+
+
+        type QueryOptions = 
+            |QueryInput of QueryInputOptions
+            |QueryAcessibilityComputation of QueryAcessibilityComputationOptions list
+
+            static member make = function
+                |QueryInput qi                      -> QueryInputOptions.make qi
+                |QueryAcessibilityComputation cList -> cList |> List.map QueryAcessibilityComputationOptions.make |> List.concat
+
+            static member makeWith (m:MountInfo) = 
+                let cPath p = (MountInfo.containerPathOf m p)
+                function        
+                |QueryInput qi                      -> (QueryInputOptions.makeWith m) qi
+                |QueryAcessibilityComputation cList -> cList |> List.map QueryAcessibilityComputationOptions.make |> List.concat
+
+
+
+        //Target:
+        //  -t [ --target ] arg          either an RNA sequence or the stream/file name
+        //                               from where to read the target sequences (should
+        //                               be the longer sequences to increase efficiency);
+        //                               use 'STDIN' to read from standard input stream;
+        //                               sequences have to use IUPAC nucleotide encoding
+
+        type TargetInputOptions =
+            |RNASequence of string
+            |File of string
+
+            static member make = function
+                |RNASequence s  -> ["-t"; s] 
+                |File f         -> ["-t"; f] 
+
+            static member makeWith (m:MountInfo) = 
+                let cPath p = (MountInfo.containerPathOf m p)
+                function
+                |RNASequence s  -> ["-t"; s]        
+                |File f         -> ["-t"; cPath f]  
+
+        //  --tAcc arg (=C)              accessibility computation :
+        //                                'N' no accessibility contributions
+        //                                'C' computation of accessibilities
+        //                                'P' unpaired probabilities in RNAplfold format
+        //                               from --tAccFile
+        //                                'E' ED values in RNAplfold Pu-like format from
+        //                               --tAccFile
+
+        type TargetAcessibilityComputationTypeOptions =
+            |NoContributions
+            |Compute
+            |UnpairedFromFile
+            |EDValuesFromFile
+
+            static member make = function
+                |NoContributions    -> ["--tAcc=N"]
+                |Compute            -> ["--tAcc=C"]
+                |UnpairedFromFile   -> ["--tAcc=P"]
+                |EDValuesFromFile   -> ["--tAcc=E"]
+
+        type TargetAcessibilityComputationOptions =
+            |TargetAcessibilityComputationType of TargetAcessibilityComputationTypeOptions
+
+            //  --tAccW arg (=150)           accessibility computation : sliding window size
+            //                               for query accessibility computation (arg in
+            //                               range [0,99999]; 0 will use the full sequence
+            //                               length) Note, this also restricts the maximal
+            //                               interaction length (see --tIntLenMax).
+            |SlidingWindowSize of int
+
+            //  --tAccL arg (=100)           accessibility computation : maximal loop size
+            //                               (base pair span) for query accessibility
+            //                               computation (arg in range [0,99999]; 0 will use
+            //                               the sliding window size 'tAccW')
+            |MaximalLoopLength of int
+
+            static member make = function
+                |TargetAcessibilityComputationType t ->  TargetAcessibilityComputationTypeOptions.make t
+                |SlidingWindowSize i                ->  [sprintf "--tAccW=%i" i]
+                |MaximalLoopLength i                ->  [sprintf "--tAccL=%i" i]
+
+
+
+        type TargetOptions = 
+            |TargetInput of TargetInputOptions
+            |TargetAcessibilityComputation of TargetAcessibilityComputationOptions list
+
+            static member make = function
+                |TargetInput ti                      -> TargetInputOptions.make ti
+                |TargetAcessibilityComputation cList -> cList |> List.map TargetAcessibilityComputationOptions.make |> List.concat
+
+            static member makeWith (m:MountInfo) = 
+                let cPath p = (MountInfo.containerPathOf m p)
+                function        
+                |TargetInput ti                      -> (TargetInputOptions.makeWith m) ti
+                |TargetAcessibilityComputation cList -> cList |> List.map TargetAcessibilityComputationOptions.make |> List.concat 
+
+
+        //Helix (only if --model=H):
+        type HelixOptions =
+            |Default
+            ///  --helixMinBP arg (=2)        minimal number of base pairs inside a helix (arg
+            ///                               in range [2,4])
+            |MinBP              of int
+            ///  --helixMaxBP arg (=10)       maximal number of base pairs inside a helix (arg
+            ///                               in range [2,20])
+            |MaxBP              of int
+            ///  --helixMaxIL arg (=0)        maximal size for each internal loop size in a
+            ///                               helix (arg in range [0,2]).
+            |MaxInternalLoop    of float
+            ///  --helixMaxED arg (=999)      maximal ED-value allowed (per sequence) during
+            ///                               helix computation (arg in range [-999,999]).
+            |MaxEDValue         of float
+            ///  --helixMaxE arg (=0)         maximal energy considered during helix
+            ///                               computation (arg in range [-999,999]).
+            |MaxEnergy          of float
+            ///  --helixWithED                if present, ED-values will be used within the
+            ///                               energy evaluation of a helix
+            |WithED             
+
+            static member make = function
+                |Default            ->[""]
+                |MinBP            i ->[sprintf "--helixMinBP=%i" i]
+                |MaxBP            i ->[sprintf "--helixMaxBP=%i" i]
+                |MaxInternalLoop  f ->[sprintf "--helixMaxIL=%f" f]
+                |MaxEDValue       f ->[sprintf "--helixMaxED=%f" f]
+                |MaxEnergy        f ->[sprintf "--helixMaxE=%f"  f]
+                |WithED             ->["--helixWithED"]
+
+        ///Interaction:
+        ///  -m [ --mode ] arg (=H)       prediction mode
+
+
+        type PredictionModeOptions =
+            ///'H' = heuristic (fast and low memory),
+            |Heuristic of HelixOptions list
+            ///'M' = exact and low memory
+            |ExactLowMemory
+            ///'E' = exact (high memory)
+            |Exact
+
+            static member make = function
+                |Heuristic hList->  ("--mode=H":: (hList |> List.map HelixOptions.make |> List.concat))
+                |ExactLowMemory ->  ["--mode=M"]
+                |Exact          ->  ["--mode=E"]
+
+
+
+        type SeedOptions = 
+            ///  --noSeed                     if present, no seed is enforced within the
+            ///                               predicted interactions
+            |NoSeed
+            ///  --seedTQ arg                 comma separated list of explicit seed base pair
+            ///                               encoding(s) in the format startTbpsT&startQbpsQ,
+            ///                               e.g. '3|||.|&7||.||', where startT/Q are the
+            ///                               indices of the 5' seed ends in target/query
+            ///                               sequence and 'bps' the dot-bar base pair
+            ///                               encodings. This disables all other seed
+            ///                               constraints and seed identification.
+            |SeedList of string
+            ///  --seedBP arg (=7)            number of inter-molecular base pairs within the
+            ///                               seed region (arg in range [2,20])
+            |BPAmount of int
+            ///  --seedMaxUP arg (=0)         maximal overall number (query+target) of
+            ///                               unpaired bases within the seed region (arg in
+            ///                               range [0,20])
+            |MaxUnpairedBases of int
+    
+            static member make = function
+                |NoSeed             -> ["--noSeed"]
+                |SeedList sL        -> [sprintf "--seedTQ=%s" sL    ]
+                |BPAmount i         -> [sprintf "--seedBP=%i" i     ]
+                |MaxUnpairedBases i -> [sprintf "--seedMaxUP=%i" i  ]
+
+        /// --outMode arg (=N)   
+        type OutputModeOptions = 
+            ///'N' normal output (ASCII char + energy),
+            |Normal
+            ///'D' detailed output (ASCII char + energy/position details),
+            |Detailed
+            ///'C' CSV output (see --outCsvCols),
+            |CSV
+
+            static member make = function
+                |Normal     -> ["--outMode=N"]
+                |Detailed   -> ["--outMode=D"]
+                |CSV        -> ["--outMode=C"]
+
+
+        ///Top level type for modelling basic command line arguments for IntaRNA
+        type IntaRNAParams =
+            | Query of QueryOptions list
+            | Target of TargetOptions list
+            | PredictionMode of PredictionModeOptions list
+            | Seed of SeedOptions list
+            | OutputMode of OutputModeOptions
+
+            static member makeCmd = function
+                | Query             qList -> qList |> List.map QueryOptions.make            |> List.concat 
+                | Target            tList -> tList |> List.map TargetOptions.make           |> List.concat 
+                | PredictionMode    pList -> pList |> List.map PredictionModeOptions.make   |> List.concat 
+                | Seed              sList -> sList |> List.map SeedOptions.make             |> List.concat 
+                | OutputMode        o     -> OutputModeOptions.make o
+
+            static member makeCmdWith (m:MountInfo) = 
+                let cPath p = (MountInfo.containerPathOf m p)
+                function
+                | Query             qList -> qList |> List.map (QueryOptions.makeWith m)    |> List.concat 
+                | Target            tList -> tList |> List.map (TargetOptions.makeWith m)   |> List.concat 
+                | PredictionMode    pList -> pList |> List.map PredictionModeOptions.make   |> List.concat 
+                | Seed              sList -> sList |> List.map SeedOptions.make             |> List.concat 
+                | OutputMode        o     -> OutputModeOptions.make o
+
+        ///
+        let runIntaRNAAsync (bcContext:BioContainer.BcContext) (opt:IntaRNAParams list) = 
+            let cmds = opt |> List.map (IntaRNAParams.makeCmdWith bcContext.Mount)
+            let tp = "IntaRNA"::(cmds |> List.concat)
+            printfn "starting process IntaRNA\r\nparameters:"
+            cmds |> List.iter (fun op -> printfn "\t%s" (String.concat " " op))
+
+            async {
+                let! res = BioContainer.execReturnAsync bcContext tp
+                return res
+            }
+
+        ///Runs IntaRNA with the given the input parameters in a container specified by the bcContext
+        let runIntaRNA (bcContext:BioContainer.BcContext) (opt:IntaRNAParams list) = 
+            runIntaRNAAsync bcContext opt
+            |> Async.RunSynchronously
\ No newline at end of file
diff --git a/src/BioFSharp.BioTools/TargetP.fs b/src/BioFSharp.BioTools/TargetP.fs
new file mode 100644
index 00000000..9b1972ba
--- /dev/null
+++ b/src/BioFSharp.BioTools/TargetP.fs
@@ -0,0 +1,103 @@
+namespace BioFSharp.BioTools
+
+open System
+open System.IO
+open System.Threading
+
+
+// targetp -N /opt/targetP/test/one.fsa
+
+
+module TargetP =
+ 
+    open FSharpAux
+    open FSharpAux.IO
+    open FSharpAux.IO.SchemaReader.Attribute
+ 
+    let ImageTagetP = Docker.DockerId.ImageId "targetp"
+
+    type TargetpCustomParams =
+        | CleavagePredictions
+        | CutOffChloroplast of float
+        | CutOffSecretory of float
+        | CutOffMitochondrial of float
+        | CutOffLocation of float
+
+        static member make = function
+            | CleavagePredictions   -> "-c"
+            | CutOffChloroplast v   -> sprintf "-p %.2f" v
+            | CutOffSecretory v     -> sprintf "-s %.2f" v
+            | CutOffMitochondrial v -> sprintf "-t %.2f" v
+            | CutOffLocation v      -> sprintf "-o %.2f" v
+
+
+    type TargetpParams =
+        | NonPlant
+        | Plant
+        | NonPlantCustom of seq
+        | PlantCustom of seq
+    
+        static member makeCmd = function
+            | NonPlant -> ["-N"]
+            | Plant    -> ["-P"]
+            | NonPlantCustom v -> 
+                let tmp =
+                    v |> Seq.map (fun p -> TargetpCustomParams.make p) |> Seq.toList
+                "-N"::tmp
+            | PlantCustom    v ->
+                let tmp =
+                    v |> Seq.map (fun p -> TargetpCustomParams.make p) |> Seq.toList
+                "-P"::tmp
+
+        static member make = function
+            | NonPlant -> "-N"
+            | Plant    -> "-P"
+            | NonPlantCustom v -> 
+                let tmp =
+                    v |> Seq.map (fun p -> TargetpCustomParams.make p) |> String.concat " "
+                sprintf "-N %s" tmp
+            | PlantCustom    v ->
+                let tmp =
+                    v |> Seq.map (fun p -> TargetpCustomParams.make p) |> String.concat " "
+                sprintf "-P %s" tmp
+
+
+
+    type TargetpItem = 
+        { 
+            []  Name  : string
+            []   Len   : int
+            []   Mtp   : float
+            []    SP    : float
+            [] Other : float
+            []   Loc   : string
+            []    RC    : int
+            [] TPlen : string
+        }
+
+
+    let runAsync bcContext (opt:TargetpParams) (fsaStream:Stream) = 
+        let tp = "targetp"::TargetpParams.makeCmd opt
+        let tmpFile = sprintf "/data/%A.fsa" (System.Guid.NewGuid())
+        async {
+            do!
+                BioContainer.putStreamAsync bcContext fsaStream tmpFile
+            let! targepResult =
+                BioContainer.execReturnAsync bcContext (tp@[tmpFile])
+            //do!
+            //    BioContainer.disposeAsync bcContext
+ 
+            // CsV Reader
+            let skipLines             = 1
+            let skipLinesBeforeHeader = 6 //6
+            let schemaMode = SchemaReader.Csv.Fill
+            let csvReader = SchemaReader.Csv.CsvReader(SchemaMode=schemaMode)
+            
+            return csvReader.ReadFromString(targepResult,'\t',true,skipLines, skipLinesBeforeHeader)
+ 
+        }
+        
+        
+    let run bcContext (opt:TargetpParams) (fsaStream:Stream) = 
+        runAsync bcContext opt fsaStream
+        |> Async.RunSynchronously
diff --git a/src/BioFSharp.BioTools/Tmhmm.fs b/src/BioFSharp.BioTools/Tmhmm.fs
new file mode 100644
index 00000000..822d88ff
--- /dev/null
+++ b/src/BioFSharp.BioTools/Tmhmm.fs
@@ -0,0 +1,98 @@
+namespace BioFSharp.BioTools
+
+open System
+open System.IO
+open System.Threading
+
+
+/// TMHMM 2.0c predicts transmembrane helices in proteins
+module Tmhmm =
+ 
+    open FSharpAux
+    open FSharpAux.IO
+    open FSharpAux.IO.SchemaReader.Attribute
+ 
+    let ImageTmhmm = Docker.DockerId.ImageId "tmhmm"
+
+
+    //type TmhmmParams =
+    //    | Short
+    //    | Verbose
+    
+    //    static member makeCmd = function
+    //        | Short   -> ["-short"]
+    //        | Verbose -> []
+
+    //    static member make = function
+    //        | Short   -> "-short"
+    //        | Verbose -> ""
+
+
+    
+    type ConverterSplitString() = 
+        inherit ConverterAttribute()
+        override this.convertToObj = 
+            (fun (str : string) -> 
+                String.split '=' str
+                |> Array.tryItem 1
+                |> Option.defaultValue "" 
+                |> box) |> SchemaReader.Converter.Single
+                
+    type ConverterSplitInt() = 
+        inherit ConverterAttribute()
+        override this.convertToObj = 
+            (fun (str : string) -> 
+                String.split '=' str
+                |> Array.tryItem 1
+                |> Option.defaultValue "0"
+                |> FSharpAux.String.tryParseIntDefault 0 
+                |> box) |> SchemaReader.Converter.Single
+
+    type ConverterSplitFloat() = 
+        inherit ConverterAttribute()
+        override this.convertToObj = 
+            (fun (str : string) -> 
+                String.split '=' str
+                |> Array.tryItem 1
+                |> Option.defaultValue "nan"
+                |> FSharpAux.String.tryParseFloatDefault nan 
+                |> box) |> SchemaReader.Converter.Single
+
+    // 5H2A_CRIGR      len=471 ExpAA=159.47    First60=0.02    PredHel=7       Topology=o77-99i112-134o149-171i192-214o234-256i325-347o357-379i
+    type TmhmmItem = 
+        { 
+            []                           Name     : string
+            [][]    Len      : int
+            [][]  ExpAA    : float
+            [][]  First60  : float
+            [][]    PredHel  : int
+            [][] Topology : string
+            
+        }
+
+
+    let runAsync bcContext (fsaStream:Stream) = 
+        let tp = "tmhmm"::["-short"]
+        let tmpFile = sprintf "/data/%A.fsa" (System.Guid.NewGuid())
+        async {
+            do!
+                BioContainer.putStreamAsync bcContext fsaStream tmpFile
+            let! result =
+                BioContainer.execReturnAsync bcContext (tp@[tmpFile])
+            //do!
+            //    BioContainer.disposeAsync bcContext
+ 
+            // CsV Reader
+            let skipLines             = 0
+            let skipLinesBeforeHeader = 0
+            let schemaMode = SchemaReader.Csv.Fill
+            let csvReader = SchemaReader.Csv.CsvReader(SchemaMode=schemaMode)
+            
+            return csvReader.ReadFromString(result,'\t',false,skipLines, skipLinesBeforeHeader)
+ 
+        }
+        
+        
+    let run bcContext (fsaStream:Stream) = 
+        runAsync bcContext fsaStream
+        |> Async.RunSynchronously
diff --git a/src/BioFSharp.BioTools/paket.references b/src/BioFSharp.BioTools/paket.references
new file mode 100644
index 00000000..a405df35
--- /dev/null
+++ b/src/BioFSharp.BioTools/paket.references
@@ -0,0 +1,5 @@
+FSharp.Core
+FSharpAux
+FSharpAux.IO
+SharpZipLib
+Docker.dotnet
\ No newline at end of file
diff --git a/src/BioFSharp.BioTools/paket.template b/src/BioFSharp.BioTools/paket.template
new file mode 100644
index 00000000..21c0c0bc
--- /dev/null
+++ b/src/BioFSharp.BioTools/paket.template
@@ -0,0 +1,30 @@
+type project
+id BioFSharp.BioTools
+title
+    BioFSharp.BioTools
+owners
+    Timo Mühlhaus
+authors 
+    Timo Mühlhaus
+projectUrl
+    https://github.com/CSBiology/BioFSharp
+iconUrl
+    https://raw.githubusercontent.com/CSBiology/BioFSharp/master/docs/files/img/logo.png
+licenseUrl
+    https://github.com/CSBiology/BioFSharp/blob/master/LICENSE.txt
+requireLicenseAcceptance
+    false
+language
+    F#
+copyright
+    Copyright 2019
+tags
+    bioinformatics F# fsharp docker biocontainer
+summary
+    Common bioinformatics tools in Docker containers accessible by F#
+description
+    Common bioinformatics tools in Docker containers (including BioContainers) accessible by F#
+include-referenced-projects 
+    true
+files
+    ../../bin/BioFSharp.BioTools ==> lib
diff --git a/src/BioFSharp.IO/AssemblyInfo.fs b/src/BioFSharp.IO/AssemblyInfo.fs
index 80e4f0e0..4b40b2cc 100644
--- a/src/BioFSharp.IO/AssemblyInfo.fs
+++ b/src/BioFSharp.IO/AssemblyInfo.fs
@@ -5,8 +5,8 @@ open System.Reflection
 []
 []
 [")>]
-[]
-[]
+[]
+[]
 []
 do ()
 
@@ -14,6 +14,6 @@ module internal AssemblyVersionInformation =
     let [] AssemblyTitle = "BioFSharp.IO"
     let [] AssemblyProduct = "BioFSharp"
     let [] AssemblyDescription = "An open source bioinformatics toolbox written in F#. "
-    let [] AssemblyVersion = "0.0.12"
-    let [] AssemblyFileVersion = "0.0.12"
+    let [] AssemblyVersion = "0.1.0"
+    let [] AssemblyFileVersion = "0.1.0"
     let [] AssemblyConfiguration = "Release"
diff --git a/src/BioFSharp.IO/BioFSharp.IO.fsproj b/src/BioFSharp.IO/BioFSharp.IO.fsproj
index fba519fb..8b1e79c3 100644
--- a/src/BioFSharp.IO/BioFSharp.IO.fsproj
+++ b/src/BioFSharp.IO/BioFSharp.IO.fsproj
@@ -15,6 +15,7 @@
     Debug;Release;Mono
   
   
+    
     
     
     
@@ -32,6 +33,7 @@
     
     
     
+    
   
   
 	
diff --git a/src/BioFSharp.IO/FSIPrinters.fs b/src/BioFSharp.IO/FSIPrinters.fs
index 35fb0a89..41112253 100644
--- a/src/BioFSharp.IO/FSIPrinters.fs
+++ b/src/BioFSharp.IO/FSIPrinters.fs
@@ -8,6 +8,7 @@ module FSIPrinters =
     open BioFSharp.BioID
     open BioFSharp.IO
     open BioFSharp.IO.Clustal
+    open BioFSharp.IO.GFF3
     open FSharpAux
     open System.Text
     
@@ -101,3 +102,7 @@ module FSIPrinters =
         loop 0 false
         sprintf "\r\n%s\r\n" (prnt.ToString())
 
+    ///print GFF3 formatted file as seen in the specifications.
+    let prettyPrintGFF3 (input : seq>>) =
+        toString id input
+        |> Seq.iter (fun x -> printfn "%s" x)
diff --git a/src/BioFSharp.IO/FastA.fs b/src/BioFSharp.IO/FastA.fs
index 448e64ff..d277d59d 100644
--- a/src/BioFSharp.IO/FastA.fs
+++ b/src/BioFSharp.IO/FastA.fs
@@ -5,6 +5,7 @@ open FSharpAux
 open FSharpAux.IO
     
 module FastA =
+    open System.IO
             
     /// Fasta item contains header and sequence
     type FastaItem<'a> = {
@@ -56,8 +57,8 @@ module FastA =
         |> fromFileEnumerator converter
 
 
-    /// Writes FastaItem to file. Converter determines type of sequence by converting type -> char
-    let write (toString:'T -> char) (filePath:string) (data:seq>>) =
+    /// Writes FastaItem to stream. Converter determines type of sequence by converting type -> char
+    let writeToStreaam (toString:'T -> char) (stream:Stream) (data:seq>>) =
         let toChunks (w:System.IO.StreamWriter) (length:int) (source: seq<'T>) =    
             use ie = source.GetEnumerator()
             let sourceIsEmpty = ref false
@@ -80,12 +81,21 @@ module FastA =
                                    ()
         
             loop ()
-        use sWriter = new System.IO.StreamWriter(filePath,true)
+        use sWriter = new System.IO.StreamWriter(stream)
         data
         |> Seq.iter (fun (i:FastaItem<_>) ->
                                 sWriter.WriteLine(">" + i.Header)
-                                toChunks sWriter 80 i.Sequence)   
+                                toChunks sWriter 80 i.Sequence) 
+
+    /// Writes FastaItem to file. Converter determines type of sequence by converting type -> char. If file already exists the data is overwritten.
+    let write (toString:'T -> char) (filePath:string) (data:seq>>) =
+        let file = new FileStream(filePath,FileMode.Create)
+        writeToStreaam toString file data   
 
+    /// Writes FastaItem to file. Converter determines type of sequence by converting type -> char. If file already exists the data is appended.
+    let writeAndAppend (toString:'T -> char) (filePath:string) (data:seq>>) =
+        let file = new FileStream(filePath,FileMode.Append)
+        writeToStreaam toString file data   
 
     /// Converts FastaItem to string. Converter determines type of sequence by converting type -> char
     let toString (toString:'T -> char) (data:seq>>) =
diff --git a/src/BioFSharp.IO/GAF.fs b/src/BioFSharp.IO/GAF.fs
new file mode 100644
index 00000000..81a46e99
--- /dev/null
+++ b/src/BioFSharp.IO/GAF.fs
@@ -0,0 +1,75 @@
+namespace BioFSharp.IO
+
+open System
+open FSharpAux
+open FSharpAux.IO
+
+module GAF =
+    
+    type GAFEntry = {
+        Database            : string
+        DbObjectID          : string
+        DbObjectSymbol      : string
+        Qualifier           : string []
+        GoTerm              : string
+        DbReference         : string []
+        Evidence            : string
+        WithFrom            : string []
+        Aspect              : string
+        DbObjectName        : string
+        DbObjectSynonym     : string []
+        DbObjectType        : string
+        Taxon               : string []
+        Date                : System.DateTime
+        AssignedBy          : string
+        AnnotationExtension : string [] option
+        GeneProductFormId   : string option
+                    }
+
+    let createGAFEntry (str:string) version2 =  
+        let split = str.Split([|'\t'|])
+        { 
+        Database            = split.[0]
+        DbObjectID          = split.[1]
+        DbObjectSymbol      = split.[2]
+        Qualifier           = split.[3].Split([|'|'|])
+        GoTerm              = split.[4]
+        DbReference         = split.[5].Split([|'|'|])
+        Evidence            = split.[6]
+        WithFrom            = split.[7].Split([|'|'|])
+        Aspect              = split.[8]
+        DbObjectName        = split.[9]
+        DbObjectSynonym     = split.[10].Split([|'|'|])
+        DbObjectType        = split.[11]
+        Taxon               = split.[12].Split([|'|'|])
+        Date                = System.DateTime.ParseExact(split.[13],"yyyyMMdd",null).Date
+        AssignedBy          = split.[14]
+        AnnotationExtension = if version2 then Some (split.[15].Split([|','|])) else None
+        GeneProductFormId   = if version2 then Some  split.[16]                 else None
+        }
+    
+    type GAF = {
+        Header  : seq
+        Entries : seq
+        }
+
+    let fromFile filepath :GAF=
+        let strEnumerator = (FileIO.readFile(filepath)).GetEnumerator()
+
+        let isVersion2 = 
+            strEnumerator.MoveNext() |> ignore
+            strEnumerator.Current.StartsWith("!gaf-version: 2")
+
+        let rec parseSingle (accE:GAFEntry list) (accH:string list)=
+            if strEnumerator.MoveNext() then 
+                let currentString = strEnumerator.Current
+                if currentString.StartsWith("!") then 
+                    parseSingle accE (currentString::accH) 
+                else 
+                    parseSingle ((createGAFEntry currentString isVersion2)::accE) accH
+
+            else 
+                {Header =  accH |> List.rev |> Seq.cast
+                 Entries = accE |> List.rev |> Seq.cast}   
+
+        parseSingle [] []
\ No newline at end of file
diff --git a/src/BioFSharp.IO/GFF3.fs b/src/BioFSharp.IO/GFF3.fs
index 0ac72309..788427d6 100644
--- a/src/BioFSharp.IO/GFF3.fs
+++ b/src/BioFSharp.IO/GFF3.fs
@@ -310,7 +310,7 @@ module GFF3 =
         else sprintf "%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s" g.Seqid g.Source g.Feature (toStringInt g.StartPos) (toStringInt g.EndPos) (toStringFloat g.Score) (toStringChar g.Strand) (toStringInt g.Phase) (toStringMap g.Attributes) (toStringSup g.Supplement)
 
     ///converts GFF lines to string sequence. Hint: Use id as converter if no FASTA sequence is included.
-    let toString (input : seq>>) converter path=
+    let toString converter (input : seq>>) =
         let en = input.GetEnumerator()
         let toString =
             seq {   
@@ -324,14 +324,20 @@ module GFF3 =
                 }
         toString
 
-    ///writes GFF lines to file. Hint: Use id as converter if no FASTA sequence is included.
-    let write (input : seq>>) converter path=
-        toString input converter path
+    ///writesOrAppends GFF lines to file. Hint: Use id as converter if no FASTA sequence is included.
+    let writeOrAppend converter path (input : seq>>) =
+        toString converter input
         |> Seq.writeOrAppend path
         printfn "Writing is finished! Path: %s" path
 
+    ///writes GFF lines to file. Hint: Use id as converter if no FASTA sequence is included.
+    let write converter path (input : seq>>) =
+        toString converter input
+        |> Seq.write path
+        printfn "Writing is finished! Path: %s" path
+
     ///if a FastA sequence is included this function searches the features corresponding sequence
-    let getSequence (gFFFile : seq>>) (cDSfeature:GFFEntry)= 
+    let getSequence (cDSfeature:GFFEntry) (gFFFile : seq>>) = 
     
 //        let firstCDS = 
 //            let filteredGFFEntries = 
@@ -386,3 +392,60 @@ module GFF3 =
         sequenceOfFirstCDS
 
     //Output: Nucleotides.Nucleotides [] (ATG...TAA)
+
+    ///Parse FastA to GFF3
+    module FastAHeaderParser = 
+        
+        /// Takes a sequence of FastA items and a regex pattern and transforms them into a sequence of GFF3 RNA items with decoy gene loci.
+        let createGFF3OfFastAWithRegex pattern (fastA : seq>) = 
+            fastA
+            |> Seq.mapi (fun i item -> 
+                let id = 
+                    let regex = System.Text.RegularExpressions.Regex.Match (item.Header,pattern)
+                    if regex.Success then regex.Value
+                    else 
+                        failwithf "Couldn't find pattern \"%s\" in header \"%s\"" pattern item.Header
+                [
+                GFFEntryLine (createGFFEntry "Unknown" "." "gene" "." "." "." "." "." (sprintf "ID=%i" i) "");
+                GFFEntryLine (createGFFEntry "Unknown" "." "mRNA" "." "." "." "." "." (sprintf "ID=%s;Parent=%i" id i) "")
+                ]
+            )
+            |> Seq.concat
+
+        /// Takes a sequence of FastA items and transforms them into a sequence of GFF3 RNA and gene items. FastA headers have to be UniProt style.
+        ///
+        /// For Reference see: https://www.uniprot.org/help/fasta-headers
+        let createGFF3OfFastA (fastA : seq>)= 
+            let fastAHeaders = 
+                fastA
+                |> Seq.map (fun item -> item.Header |> BioFSharp.BioID.FastA.fromString)
+            // "protein" and "mRNA" used interchangeably
+            let mRNAHeadersWithGeneName = 
+                let mutable i = 0
+                fastAHeaders
+                |> Seq.map (fun header ->
+                    // If field Gene name for a given protein is occupied then this gene name is used. In this case multiple Proteins can be associated to the same gene. Else a decoy number is set.
+                    match Map.tryFind "GN" header.Info with
+                    | Some name ->
+                        (header.ID, name)
+                    | None ->
+                        let id = i |> string
+                        i <- i + 1
+                        header.ID,id
+                    )
+            // The proteins are grouped by their gene name 
+            mRNAHeadersWithGeneName
+            |> Seq.groupBy snd
+            |> Seq.collect (fun (genename,proteins) ->
+                // All proteins/mRNAs of one gene
+                proteins
+                |> Seq.map (fun (protID,_) ->
+                    GFFEntryLine (createGFFEntry "Unknown" "." "mRNA" "." "." "." "." "." (sprintf "ID=%s;Parent=%s" protID genename) "")
+                    )
+                // Then gene they point to
+                |> Seq.append (
+                    GFFEntryLine (createGFFEntry "Unknown" "." "gene" "." "." "." "." "." (sprintf "ID=%s" genename) "")
+                    |> Seq.singleton
+                    )
+                )
+
diff --git a/src/BioFSharp.IO/Obo.fs b/src/BioFSharp.IO/Obo.fs
index 179c886f..f080964f 100644
--- a/src/BioFSharp.IO/Obo.fs
+++ b/src/BioFSharp.IO/Obo.fs
@@ -26,7 +26,7 @@ module Obo =
             IsObsolete : string
             Replacedby : string //new
             Consider : string //new
-            AltId : string
+            AltId : string list
             DisjointFrom : string list
             Subset : string list
             IntersectionOf : string list
@@ -102,7 +102,7 @@ module Obo =
                                      xrefAnalog split.[1] isObsolete replaced_by consider altId disjointFrom subset intersectionOf xref propertyValue
         
             | "is_obsolete"     -> parseSingleOboTerm en  id name name_space definition relationship related_synonym isA synonym exactSynonym broadSynonym narrowSynonym
-                                     xrefAnalog comment split.[1] replaced_by altId consider disjointFrom subset intersectionOf xref propertyValue
+                                     xrefAnalog comment split.[1] replaced_by consider altId disjointFrom subset intersectionOf xref propertyValue
                                          
             | "replaced_by"     -> parseSingleOboTerm en  id name name_space definition relationship related_synonym isA synonym exactSynonym broadSynonym narrowSynonym
                                      xrefAnalog comment isObsolete split.[1] consider altId disjointFrom subset intersectionOf xref propertyValue
@@ -111,7 +111,7 @@ module Obo =
                                      xrefAnalog comment isObsolete replaced_by split.[1] altId disjointFrom subset intersectionOf xref propertyValue
         
             | "alt_id"          -> parseSingleOboTerm en  id name name_space definition relationship related_synonym isA synonym exactSynonym broadSynonym narrowSynonym
-                                     xrefAnalog comment isObsolete replaced_by consider split.[1] disjointFrom subset intersectionOf xref propertyValue
+                                     xrefAnalog comment isObsolete replaced_by consider (split.[1]::altId) disjointFrom subset intersectionOf xref propertyValue
         
             | "disjoint_from"   -> parseSingleOboTerm en  id name name_space definition relationship related_synonym isA synonym exactSynonym broadSynonym narrowSynonym
                                      xrefAnalog comment isObsolete replaced_by consider altId (split.[1]::disjointFrom) subset intersectionOf xref propertyValue
@@ -167,7 +167,7 @@ module Obo =
                 match en.MoveNext() with
                 | true ->             
                     match en.Current with
-                    | "[Term]"    -> yield (parseSingleOboTerm en "" "" "" "" "" [] [] [] "" "" "" "" "" "" "" "" "" [] [] [] [] "")
+                    | "[Term]"    -> yield (parseSingleOboTerm en "" "" "" "" "" [] [] [] "" "" "" "" "" "" "" "" [] [] [] [] [] "")
                                      yield! loop en 
                     | _ -> yield! loop en
                 | false -> ()
diff --git a/src/BioFSharp.IO/paket.template b/src/BioFSharp.IO/paket.template
index 418dfaf6..7fc1e154 100644
--- a/src/BioFSharp.IO/paket.template
+++ b/src/BioFSharp.IO/paket.template
@@ -1,23 +1,30 @@
 type project
+id BioFSharp.IO
 title
     BioFSharp.IO
 owners
     Timo Mühlhaus
-authors
+authors 
     Timo Mühlhaus
 projectUrl
     https://github.com/CSBiology/BioFSharp
 iconUrl
-    https://raw.githubusercontent.com/CSBiology/FSharp.FGL/master/docs/files/img/logo.png
+    https://raw.githubusercontent.com/CSBiology/BioFSharp/master/docs/files/img/logo.png
 licenseUrl
-    https://github.com/CSBiology/FSharp.FGL/blob/master/LICENSE.txt
+    https://github.com/CSBiology/BioFSharp/blob/master/LICENSE.txt
 requireLicenseAcceptance
     false
+language
+    F#
 copyright
-    Copyright 2015
+    Copyright 2019
 tags
-   F# FSharp bioinformatics
+    bioinformatics F# fsharp IO
 summary
-   F# FSharp bioinformatics
+    Readers/Writers for biological file formats
 description
-   F# FSharp bioinformatics
+    Readers/Writers for biological file formats
+include-referenced-projects 
+    true
+files
+	../../bin/BioFSharp.IO ==> lib
diff --git a/src/BioFSharp.ImgP/AssemblyInfo.fs b/src/BioFSharp.ImgP/AssemblyInfo.fs
index 1552d967..d5ac9ce5 100644
--- a/src/BioFSharp.ImgP/AssemblyInfo.fs
+++ b/src/BioFSharp.ImgP/AssemblyInfo.fs
@@ -5,8 +5,8 @@ open System.Reflection
 []
 []
 [")>]
-[]
-[]
+[]
+[]
 []
 do ()
 
@@ -14,6 +14,6 @@ module internal AssemblyVersionInformation =
     let [] AssemblyTitle = "BioFSharp.ImgP"
     let [] AssemblyProduct = "BioFSharp"
     let [] AssemblyDescription = "An open source bioinformatics toolbox written in F#. "
-    let [] AssemblyVersion = "0.0.12"
-    let [] AssemblyFileVersion = "0.0.12"
+    let [] AssemblyVersion = "0.1.0"
+    let [] AssemblyFileVersion = "0.1.0"
     let [] AssemblyConfiguration = "Release"
diff --git a/src/BioFSharp.ImgP/Centroid.fs b/src/BioFSharp.ImgP/Centroid.fs
index b6ddfe69..99217011 100644
--- a/src/BioFSharp.ImgP/Centroid.fs
+++ b/src/BioFSharp.ImgP/Centroid.fs
@@ -109,18 +109,23 @@ module Centroid =
 
 
     let inline C3DWT (marr: Marr.MarrWavelet) (frame:'a[,]) =   
-        let resolutionPixel = (Array2D.length1 frame) - 40 * 2
+        let resolutionPixelfst = (Array2D.length1 frame) - 40 * 2
+        let resolutionPixelsnd = (Array2D.length2 frame) - 40 * 2
         let offset = marr.PadAreaRadius
         let paddingoffset = 40
-        let (CWTArray2D0: float[,]) = Array2D.zeroCreate (Array2D.length1 frame) (Array2D.length2 frame)
-        for x = paddingoffset to (paddingoffset + (resolutionPixel-1)) do
-            for y = paddingoffset to (paddingoffset + (resolutionPixel-1)) do
+        let (CWTArray2D0: float[,]) = Array2D.zeroCreate (Array2D.length2 frame) (Array2D.length1 frame)
+        for x = paddingoffset to (paddingoffset + (resolutionPixelsnd-1)) do
+            for y = paddingoffset to (paddingoffset + (resolutionPixelfst-1)) do
                 CWTArray2D0.[x,y] <-
-                    let mutable acc = 0.                                       
-                    for a = 0 to 2*offset do
-                        for b = 0 to 2*offset do               
-                            acc <- acc + ((marr.Values).[a,b] * (frame.[(y+(a-offset)),(x+(b-offset))] |> float))
-                    acc
+                    let rec loop acc' a b =
+                        if a <= 2 * offset then
+                            if b <= 2 * offset then
+                                let acc = acc' + ((marr.Values).[a,b] * (frame.[(y+(a-offset)),(x+(b-offset))] |> float))
+                                loop acc a (b + 1)
+                            else
+                                loop acc' (a + 1) 0
+                        else acc'
+                    loop 0. 0 0
         let deletePaddingArea =
             let arrayWithoutPaddingoffset = Array2D.zeroCreate ((Array2D.length1 CWTArray2D0)-(2*paddingoffset)) ((Array2D.length2 CWTArray2D0)-(2*paddingoffset))
             for i=paddingoffset to (Array2D.length1 CWTArray2D0)-(paddingoffset+1) do
diff --git a/src/BioFSharp.ImgP/Trace.fs b/src/BioFSharp.ImgP/Trace.fs
index 61a13ee2..4ce54523 100644
--- a/src/BioFSharp.ImgP/Trace.fs
+++ b/src/BioFSharp.ImgP/Trace.fs
@@ -25,10 +25,18 @@ module Ricker =
             Array.append 
                 (Array.map (fun x -> correctionVal * ( rickerMHwithScale scaleRise x)) [|-(padArea)..(- 1.)|]) 
                 (Array.map (fun x ->                   rickerMHwithScale scaleDecay x) [|0.0..(padArea)|])
-                
+
+        let amplitudeAdjValues =
+            let sum = values |> Array.fold (fun acc x -> acc + Math.Abs(x)) 0.
+            let max = values |> Array.max
+            let corrFacTwo = 
+                0.15 / max
+            values |> Array.map (fun x -> corrFacTwo * x)      
+
         {
         ScaleDecay  = scaleDecay
         ScaleRise   = scaleRise
+        //Values      = amplitudeAdjValues
         Values      = values
         PadArea     = padAreaInt
         }
@@ -117,14 +125,14 @@ module Trace =
 
     let paddTrace (cwtTrace:float[])= 
         let rnd = System.Random() 
-        let padding = 3000  //number of zeros the data is padded with
+        let padding = 20000  //number of zeros the data is padded with
         let listRnd = Array.map (fun x -> cwtTrace.[rnd.Next(0,cwtTrace.Length-1)]|> float ) [|0..padding-1|]
         let paddedCWT3Dtrace = Array.append (Array.append listRnd cwtTrace) listRnd
         paddedCWT3Dtrace
 
     ///computes the continious wavelet transform of the padded trace with wavelet type Ricker.
     let cWT1D (paddedTrace: float []) (myRicker: Ricker.myRicker)=
-        let padding =  3000
+        let padding =  20000
         let myRickerOffsetRise = (6. * myRicker.ScaleRise) |> ceil |> int 
         let myRickerOffset = myRicker.PadArea
         let arr = Array.zeroCreate (paddedTrace.Length - (2 * padding))
@@ -212,13 +220,15 @@ module Trace =
             let factorFstTerm = 3.4071 * factorSndTerm
             ((factorFstTerm * (x + rise)) / (Math.Sqrt(2. * Math.PI)))*Math.Exp(- 0.5 * (factorSndTerm * (x + rise)))
 
-        let blitarray = Array.zeroCreate (numberOfFrames+221)
+        //let blitarray = Array.zeroCreate (numberOfFrames+221)
+        let blitarray = Array.zeroCreate (numberOfFrames+501)
         let fitOfStandardCurve j =
                 if maximaArr.[j] >= threshold then 
                     //for i =j-7 to j+220 do
                         //blitarray.[i] <- (fittingFunction2 0.6 ((i - j) |> float)) * maximaArr.[j] 
-                    for i =j-(int rise) to j+220 do 
-                        blitarray.[i] <- (fittingFunction rise ((i - j) |> float)) * maximaArr.[j] 
+                    //for i =j-(int rise) to j+220 do 
+                    for i =j-(int rise) to j+500 do 
+                        blitarray.[i] <- (fittingFunction2 rise ((i - j) |> float)) * maximaArr.[j] 
                     blitarray.[0 .. numberOfFrames - 1]
                 else
                     blitarray.[j   ] <- 0.
diff --git a/src/BioFSharp.ML/AssemblyInfo.fs b/src/BioFSharp.ML/AssemblyInfo.fs
new file mode 100644
index 00000000..cd98b6ee
--- /dev/null
+++ b/src/BioFSharp.ML/AssemblyInfo.fs
@@ -0,0 +1,19 @@
+// Auto-Generated by FAKE; do not edit
+namespace System
+open System.Reflection
+
+[]
+[]
+[")>]
+[]
+[]
+[]
+do ()
+
+module internal AssemblyVersionInformation =
+    let [] AssemblyTitle = "BioFSharp.ML"
+    let [] AssemblyProduct = "BioFSharp"
+    let [] AssemblyDescription = "An open source bioinformatics toolbox written in F#. "
+    let [] AssemblyVersion = "0.1.0"
+    let [] AssemblyFileVersion = "0.1.0"
+    let [] AssemblyConfiguration = "Release"
diff --git a/src/BioFSharp.ML/BioFSharp.ML.fsproj b/src/BioFSharp.ML/BioFSharp.ML.fsproj
new file mode 100644
index 00000000..ee8b0ae9
--- /dev/null
+++ b/src/BioFSharp.ML/BioFSharp.ML.fsproj
@@ -0,0 +1,37 @@
+
+
+  
+    netstandard2.0
+    BioFSharp.ML
+    BioFSharp.ML
+    BioFSharp.ML
+    Library
+    true
+    
+    true
+    
+    true
+    
+    $(AllowedOutputExtensionsInPackageBuildOutputFolder);.pdb
+    Debug;Release;Mono
+  
+  
+    
+    
+    
+      PreserveNewest
+    
+    
+    
+    
+    
+    
+    
+    
+  
+  
+    
+    
+  
+  
+
\ No newline at end of file
diff --git a/src/BioFSharp.ML/BioFSharp.ML.fsx b/src/BioFSharp.ML/BioFSharp.ML.fsx
new file mode 100644
index 00000000..96f18bdc
--- /dev/null
+++ b/src/BioFSharp.ML/BioFSharp.ML.fsx
@@ -0,0 +1,172 @@
+#load "CNTKLoadscript.fsx"
+open CNTKLoadscript
+
+CNTKLoadscript.resolveCNTKDependencies ()
+
+#r @"..\..\packages\FSharpAux\lib\netstandard2.0\FSharpAux.dll"
+#r @"..\..\packages\FSharpAux.IO\lib\netstandard2.0\FSharpAux.IO.dll"
+#r @"..\..\packages\FSharp.Stats\lib\netstandard2.0\FSharp.Stats.dll"
+#I @"..\..\bin\BioFSharp.ML\netstandard2.0"
+#r @"BioFSharp.dll"
+#r "BioFSharp.IO"
+#r "BioFSharp.ML"
+//#load "DPPOP.fs"
+
+open BioFSharp.ML.DPPOP
+open BioFSharp.IO
+open BioFSharp
+open BioFSharp.IO.FastA
+open FSharpAux
+open FSharpAux.IO
+
+let tairIds = 
+    [
+    "AT5G05730.1"
+    "AT5G05590.2"
+    "AT1G29410.1"
+    "AT4G27070.1"
+    "AT4G13260.1"
+    "AT2G27150.1"
+    "AT1G04580.1"
+    "AT3G44310.1"
+    "AT3G44300.1"
+    "AT5G22300.1"
+    "AT1G18590.1"
+    "AT1G16410.1"
+    "AT1G16400.1"
+    "AT3G26830.1"
+    "AT2G30750.1"
+    "AT1G26380.1"
+    "AT4G31970.1"
+    "AT3G53260.1"
+    "AT5G04230.1"
+    "AT3G21230.1"
+    "AT4G37390.1"
+    "AT4G27260.1"
+    "AT5G54510.1"
+    "AT5G13320.1"
+    "AT5G13360.1"
+    "AT1G28130.1"
+    "AT3G02875.1"
+    "AT2G46370.1" 
+    "AT1G51760.1"
+    "AT5G56650.1"
+    "AT5G56660.1"
+    "AT1G51780.1"
+    "AT5G05600.1"
+    "AT2G44810.1"
+    "AT1G72520.1"
+    "AT1G67560.1"
+    "AT3G25780.1"
+    "AT2G06050.2"
+    "AT2G35690.1"
+    "AT1G23080.1" 
+    "AT3G26810.1"
+    "AT1G80490.1"
+    "AT5G27030.1"
+    "AT4G28910.2"
+    "AT1G19180.1"
+    "AT4G02570.1"
+    "AT5G20570.1"
+    "AT1G33410.2"
+    "AT1G80680.1"
+    "AT4G35580.1"
+    "AT1G58100.1" 
+    "AT1G74710.1" 
+    "AT2G43840.1"
+    "AT4G26200.1"
+    "AT2G05100.1"
+    "AT2G05070.1"
+    "AT4G02630.1"
+    "AT3G54050.1"
+    "AT3G20440.1"
+    "AT1G29910.1"
+    "AT3G52720.1"
+    "AT5G18200.1"
+    "AT1G49380.1"
+    "AT4G39120.1"
+    "AT1G71180.1"
+    "AT5G64380.1"
+    "AT1G30120.1"
+    "AT4G28706.1"
+    "AT3G15640.2"
+    "AT5G24300.1"
+    "AT4G09520.1"
+    "AT2G18700.1"
+    "AT5G11920.1"
+    "AT5G40650.1"
+    "AT3G14940.1"
+    "AT4G05020.2"
+    "AT4G15530.3"
+    "AT5G36120.1"
+    "AT1G44170.1"
+    "AT1G70730.1"
+    "AT3G55650.1"
+    "AT1G53310.1"
+    "AT3G02360.2"
+    "AT1G54100.1"
+    "AT1G50460.1"
+    "AT4G26390.1"
+    "AT5G65690.1"
+    "AT5G11110.1"
+    "AT3G43190.1"
+    "AT3G22370.1"
+    "AT1G78580.1"
+    "AT1G11720.1"
+    "AT1G08940.1"
+
+    ]
+
+let tairProteome = FastA.fromFile BioArray.ofAminoAcidString @"C:\Users\Kevin\Downloads\TAIR10_pep_20101214_updated.fasta"
+["a"] |> Seq.append ["b"]
+let pois =
+    tairIds
+    |> Seq.map (fun p -> tairProteome |> Seq.find (fun x -> x.Header.Contains(p)))
+
+let res = Prediction.scoreDppopPlant (tairProteome) (pois) |> List.ofSeq
+res
+|> Seq.concat
+|> Seq.groupBy (fun x -> x.ProteinId)
+|> Seq.map (fun (pId,res) -> res |> Seq.map (fun x -> x.ProteinId,x.Sequence,x.PredictionScore,x.Distinct))
+|> Seq.concat
+|> Seq.toCSV "\t" false
+|> Seq.append ["ProteinId\tPeptideSequence\tRelativeObservabilityScore\tDistinctPeptide"]
+|> Seq.write @"C:\Users\Kevin\Desktop\Tair10_prediction_results_final.txt"
+
+List.map2 
+    (fun a b -> if (not (a=b)) then printfn "%s is not %s" a b else printfn "same"               )
+    (Seq.fromFile @"C:\Users\Kevin\Desktop\Tair10_prediction_results_distinct.txt" |> List.ofSeq)
+    (Seq.fromFile @"C:\Users\Kevin\Desktop\Tair10_prediction_results_final.txt" |> List.ofSeq)
+
+let getDistinctTrypticPeptidesFromFasta (fa:seq>>)= 
+    //fileDir + "Chlamy_Cp.fastA"
+    fa
+    |> Seq.map (fun fi -> fi.Sequence |> Array.filter (not << AminoAcids.isTerminator))
+    |> Seq.collect Classification.digestTryptic
+    |> Seq.map BioArray.toString
+    |> Set.ofSeq
+
+let getDistinctTrypticPeptidesFromFasta2 (fa:seq>>)= 
+            //fileDir + "Chlamy_Cp.fastA"
+            fa
+            |> Seq.map (fun fi -> fi.Sequence |> Array.filter (not << AminoAcids.isTerminator))
+            |> Seq.collect Classification.digestTryptic
+            |> Seq.map BioArray.toString
+            |> Seq.countBy id
+            |> Seq.filter (fun (key,count) -> count=1)
+            |> Seq.map fst
+            |> Set.ofSeq
+
+let a = getDistinctTrypticPeptidesFromFasta tairProteome
+let b = Classification.getDistinctTrypticPeptidesFromFasta tairProteome
+
+Set.filter (fun x -> not (Set.contains x b)) a
+
+
+let chlamyProteome = 
+    FastA.fromFile BioArray.ofAminoAcidString @"C:\Users\Kevin\Desktop\Chlamy_JGI5_5.fasta"
+    |> List.ofSeq
+
+let testProt = List.find (fun (x:FastaItem<'a>) -> x.Header = @"Cre02.g120100.t1.2 Ribulose-1,5-bisphosphate carboxylase/oxygenase small subunit 1, chloroplast precursor ALS=RBCS1 DBV=JGI5.5 GN=RBCS1 OS=Chlamydomonas reinhardtii SV=2 TOU=Cre") chlamyProteome
+
+Prediction.scoreDppopPlant chlamyProteome [testProt]
\ No newline at end of file
diff --git a/src/BioFSharp.ML/CNTK.fs b/src/BioFSharp.ML/CNTK.fs
new file mode 100644
index 00000000..fcc4f409
--- /dev/null
+++ b/src/BioFSharp.ML/CNTK.fs
@@ -0,0 +1,7 @@
+namespace BioFSharp.ML
+
+module CNTKExtensions =
+
+    open CNTK
+    let hello name =
+        printfn "Hello %s" name
diff --git a/src/BioFSharp.ML/CNTKLoadScript.fsx b/src/BioFSharp.ML/CNTKLoadScript.fsx
new file mode 100644
index 00000000..f5ee4f5a
--- /dev/null
+++ b/src/BioFSharp.ML/CNTKLoadScript.fsx
@@ -0,0 +1,34 @@
+open System
+open System.IO
+
+  
+let dependencies = 
+    [
+        @"../../packages/CNTK.CPUOnly/lib/netstandard2.0"
+        @"../../packages/CNTK.CPUOnly/support/x64/Release"
+        @"../../packages/CNTK.CPUOnly/support/x64/Release"
+        @"../../packages/CNTK.Deps.MKL/support/x64/Dependency"
+        @"../../packages/CNTK.Deps.OpenCV.Zip/support/x64/Dependency"
+    ]
+
+let resolveCNTKDependencies () =
+    Environment.SetEnvironmentVariable("Path",
+        Environment.GetEnvironmentVariable("Path") + ";" + __SOURCE_DIRECTORY__ )
+    dependencies 
+    |> Seq.iter (fun dep -> 
+        let path = Path.Combine(__SOURCE_DIRECTORY__,dep)
+        Environment.SetEnvironmentVariable("Path",
+            Environment.GetEnvironmentVariable("Path") + ";" + path)
+        )    
+
+
+#I @"../../packages/CNTK.CPUOnly/lib/netstandard2.0"
+#I @"../../packages/CNTK.CPUOnly/support/x64/Release"
+#I @"../../packages/CNTK.CPUOnly/support/x64/Release"
+#I @"../../packages/CNTK.Deps.MKL/support/x64/Dependency"
+#I @"../../packages/CNTK.Deps.OpenCV.Zip/support/x64/Dependency"
+#I @"../../bin"
+
+ 
+#r @"../../packages/CNTK.CPUOnly/lib/netstandard2.0/Cntk.Core.Managed-2.6.dll"
+#r "netstandard"
\ No newline at end of file
diff --git a/src/BioFSharp.ML/DPPOP.fs b/src/BioFSharp.ML/DPPOP.fs
new file mode 100644
index 00000000..4d0191a3
--- /dev/null
+++ b/src/BioFSharp.ML/DPPOP.fs
@@ -0,0 +1,812 @@
+namespace BioFSharp.ML
+
+///DPPOP - DeeP Peptide Observability Predictor.
+///
+///d::pPop uses a deep neural network to predict proteotypic peptides for proteins of interest.
+///
+///See the publication: https://www.frontiersin.org/articles/10.3389/fpls.2018.01559/full
+module DPPOP =
+
+    // use old functionality due to recent changes in original library, needs fixing later
+    module internal IsoelectricPoint2 = 
+        open BioFSharp
+        open AminoAcidSymbols
+        open AminoProperties
+
+        ///Finds the value in an interval for which a given function returns a value close to 0 
+        let private tryFindRoot func accuracy lowerBound upperBound maxIter = 
+            let acc = abs accuracy
+            let rec loop a b i = 
+                let c = (a + b)/2.
+                let fc = func c
+                if (abs fc) < acc then 
+                    Some c
+                else
+                    if i = maxIter then None
+                    else 
+                        if sign fc = sign (func a) then loop c b (i+1)
+                        else loop a c (i+1)
+
+            let checkConditions a b = 
+                let fa = func a
+                let fb = func b
+                if (abs fa) < acc then Some a
+                elif (abs fb) < acc then Some b
+                else 
+                    if fa < 0. then 
+                        if fb > 0. then
+                            loop a b 0
+                        else None
+                    else 
+                        if fb < 0. then 
+                            loop a b 0
+                        else None
+            if lowerBound < upperBound then checkConditions lowerBound upperBound
+            else checkConditions upperBound lowerBound
+
+        ///Maps AminoAcidSymbol to default pK value of it's sidechain. Returns 0.0 if sidechain is neither acidic nor basic
+        let getpKr  = initGetAminoProperty AminoProperty.PKr
+
+        ///Finds the pH for which the global charge of the aaSeq is closer to 0 than the given accuracy.
+        let tryFind (pKrFunc: AminoAcidSymbol -> float) accuracy (aaSeq : AminoAcidSymbol seq) = 
+            let en = aaSeq.GetEnumerator()
+            let compVec = Array.zeroCreate 26
+            let rec loop current =
+                match en.MoveNext() with
+                | false ->
+                    current
+                | true ->
+                    let index = (int (BioItem.symbol en.Current)) - 65
+                    compVec.[index] <- compVec.[index] + 1
+                    loop en.Current
+            match en.MoveNext() with 
+            |false -> None
+            |true -> 
+                compVec.[(int (BioItem.symbol en.Current)) - 65] <- 1            
+                let nTerm,cTerm = en.Current, loop en.Current
+            
+                let f pH = 
+                    let posChargeState = 
+                        let CR = 10. ** (7.4 - pH)
+                        CR/(CR+1.)
+                        +
+                        Seq.fold 
+                            (fun chargeState aa -> 
+                                let CR = 10. ** ((pKrFunc aa) - pH)
+                                let partialCharge = CR/(CR+1.)
+                                let count = float compVec.[(int (BioItem.symbol aa)) - 65]                  
+                                chargeState + partialCharge * count
+                                )
+                            0.
+                            AminoSymbolSetPosCharged
+                    let negChargeState =
+                        let CR = 10. ** (pH - 3.9)
+                        CR/(CR+1.)
+                        +
+                        Seq.fold 
+                            (fun chargeState aa -> 
+                                let CR = 10. ** (pH - (pKrFunc aa))
+                                let partialCharge = CR/(CR+1.)
+                                let count = float compVec.[(int (BioItem.symbol aa)) - 65]                  
+                                chargeState + partialCharge * count
+                                )
+                            0.
+                            AminoSymbolSetNegCharged
+                    posChargeState - negChargeState
+                tryFindRoot f accuracy 0. 14. 50
+                |> Option.map (fun pH -> pH, f pH)
+    // use old functionality due to recent changes in original library, needs fixing later
+    module internal Digestion2 =
+        open BioFSharp
+        open AminoAcids
+        open FSharpAux
+
+        /// p4 p3 p2 p1 || p1' p2'
+        type Protease = {
+            Name : string
+            Expression : AminoAcid option -> AminoAcid option
+                      -> AminoAcid option -> AminoAcid option
+                      -> AminoAcid option-> AminoAcid option -> bool    
+            }
+
+        ///Creates a Protease from given name and motifFunction f
+        let createProtease name f =
+             {Name = name; Expression = f}
+
+        /// Digested peptide
+        // TODO: type of ProteinID to 'a; rename "missCleavageStart" to "cleavageStart"; Same for ..End..
+        type DigestedPeptide = {
+            ///Identifier of protein
+            ProteinID: int
+            ///
+            MissCleavages: int
+            MissCleavageStart:int
+            MissCleavageEnd: int
+            ///Sequence of peptide
+            PepSequence: AminoAcid list
+            }
+
+        ///Creates digested peptide from given information
+        let createDigestedPeptide proteinID missCleavages missCleavageStart missCleavageEnd pepSequence = {
+             ProteinID=proteinID
+             MissCleavages=missCleavages
+             MissCleavageStart=missCleavageStart
+             MissCleavageEnd=missCleavageEnd
+             PepSequence=pepSequence
+             }
+   
+        ///Returns true, if AminoAcid array resembles cutting site of given protease, else returns false
+        // TODO: rename
+        let isCutingSite (protease:Protease) (arr:AminoAcid option[]) =
+            match arr with
+            | [|p4; p3; p2; p1; p1'; p2';|] -> protease.Expression p4 p3 p2 p1 p1' p2'
+            | _ -> false
+
+        []
+        ///Contains functions for digesting AminoAcid sequences
+        module BioSeq =
+        
+            /// Returns current value,array tuple (current, [|prefix; current; suffix)
+            let motivy prefixLength suffixLength (source: seq<'T>) =    
+                if prefixLength < 0 then invalidArg "prefixLength" "Input must be non negative"
+                if suffixLength < 0 then invalidArg "suffixLength" "Input must be non negative"
+                let windowSize = prefixLength + suffixLength + 1
+                //if windowSize <= 0 then invalidArg "windowSize" "Input must be non zero"
+    
+                seq {   let arr = Array.create windowSize None
+                        let r = ref (suffixLength ) 
+                        let i = ref (prefixLength) 
+                        use e = source.GetEnumerator()
+                        while e.MoveNext() do
+                            arr.[!i] <- Some e.Current   // ! get while := set
+                            i := (!i + 1) % windowSize
+                            if !r = 0 then
+                                let tmp = Array.init windowSize (fun j -> arr.[(!i+j) % windowSize])
+                                yield (tmp.[prefixLength].Value,tmp)
+                            else
+                            r := (!r - 1) 
+                        // continue shifting for suffixLength  
+                        let arr = Array.init windowSize (fun j -> arr.[(!i+j) % windowSize])
+                        for i = 1 to suffixLength do
+                            let tmp = Array.create windowSize None
+                            Array.blit arr i tmp 0 (arr.Length-i)
+                            yield (tmp.[prefixLength].Value,tmp)
+                            }
+
+            ///Cuts AminoAcid sequence at each place, where the sequence fits the cutting pattern of the protease. Returns sequence of resulting AminoAcid sequences
+            let digest (protease:Protease) (aas:seq) =
+
+                let groupAfter f (input:seq<_>) =     
+                    let rec group (en:System.Collections.Generic.IEnumerator<_>) cont acc c  =            
+                            if not(f en.Current) && en.MoveNext() then
+                                group en (fun l -> cont <| c::l) acc (fst en.Current) // modified!
+                            else
+                                (fun l -> cont <| c::l) []
+                    seq{
+                        use en = input.GetEnumerator()
+                        while en.MoveNext() do
+                            yield group en id [] (fst en.Current) }// modified! 
+
+                aas
+                |> motivy 3 2
+                |> groupAfter (fun (c,arr) -> isCutingSite protease arr)       
+
+
+        []
+        ///Contains functions for digesting AminoAcid arrays
+        module BioArray =
+
+            /// Returns current value,array tuple (current, [|prefix; current; suffix|])
+            let motivy prefixLength suffixLength (source: 'T []) =    
+                if prefixLength < 0 then invalidArg "prefixLength" "Input must be non negative"
+                if suffixLength < 0 then invalidArg "suffixLength" "Input must be non negative"
+                let windowSize = prefixLength + suffixLength + 1
+
+                Array.init (source.Length) 
+                    (fun i ->
+                        let motive =
+                            Array.init windowSize 
+                                (fun ii -> 
+                                    if i+ii < prefixLength || (i+ii-prefixLength) > (source.Length-1) then
+                                        None 
+                                    else
+                                        Some source.[i+ii-prefixLength])
+                        source.[i],motive
+                    )
+
+            /// Takes Proteinsequence as input and returns Array of resulting DigestedPeptides
+            let digest (protease: Protease) (proteinID: int) (aas: AminoAcid []) =
+                let aasLength = aas.Length
+                if aasLength = 0 then [||]
+                else
+                let rec groupAfter f acc lowercounter counter (aasWithOption: (AminoAcid*'a []) []) =
+                    if counter = aasLength-1 then (createDigestedPeptide proteinID 0 (lowercounter+1) (counter+1) (aas.[lowercounter.. counter]|> Array.toList))::acc |> List.rev 
+                    else 
+                        match (f aasWithOption.[counter]) with
+                        | true  -> groupAfter f ((createDigestedPeptide proteinID 0 (lowercounter+1) (counter+1) (aas.[lowercounter.. counter]|> Array.toList))::acc) (counter+1) (counter+1) aasWithOption 
+                        | false -> groupAfter f acc lowercounter (counter+1) aasWithOption
+                aas 
+                |> motivy 3 2 
+                |> (groupAfter (fun (c,arr) -> isCutingSite protease arr) [] 0 0) 
+                |> List.toArray
+
+
+
+            /// Takes Array of DigestedPeptides and and returns Array of DigestedPeptides including those resulting of one or more Misscleavage events
+            let concernMissCleavages (minMissCleavages:int) (maxMisscleavages:int) (digestedPeptidesA:(DigestedPeptide) []) =
+                if digestedPeptidesA = [||] then [||]
+                else
+                let lengthOfPeptideL = digestedPeptidesA.Length
+                let minToMaxMissCleavagesL = [minMissCleavages.. maxMisscleavages]
+                let rec connectDigestedPeptides acc (digestedPeptidesA: DigestedPeptide []) (fstPepIdx:int)  (lastPepIdx:int) currentMissCleavages =
+                    if lengthOfPeptideL < lastPepIdx then acc
+                    else
+                    match lastPepIdx with
+                    |x when lastPepIdx = lengthOfPeptideL -> acc
+                    |_ ->   
+                        let currentPeptideSeq = 
+                            (digestedPeptidesA.[fstPepIdx.. lastPepIdx]) 
+                            |> Array.map (fun digpep -> digpep.PepSequence) 
+                            |> List.concat
+                        let currentPeptide = 
+                            createDigestedPeptide digestedPeptidesA.[0].ProteinID (currentMissCleavages) digestedPeptidesA.[fstPepIdx].MissCleavageStart 
+                                digestedPeptidesA.[lastPepIdx].MissCleavageEnd currentPeptideSeq
+                    
+                        connectDigestedPeptides (currentPeptide::acc) digestedPeptidesA (fstPepIdx+1) (lastPepIdx+1) currentMissCleavages
+        
+                minToMaxMissCleavagesL
+                |> List.map (fun x ->  (connectDigestedPeptides [] (digestedPeptidesA) 0 x x)) 
+                |> List.concat
+                |> Array.ofList
+
+        ///Contains frequently needed proteases
+        //TODO: switch to better list system
+        module Table = 
+            ///Possible inputs: "Trypsin", "Lys-C"
+            let getProteaseBy name = 
+                match name with
+                | "Trypsin" ->
+                    createProtease "Trypsin" (let _p1 = [AminoAcid.Lys;AminoAcid.Arg] |> Set.ofList 
+                                              fun p4 p3 p2 p1 p1' p2' -> 
+                                              match p1,p1' with
+                                              | Some a1,Some a1' -> _p1.Contains(a1) && not (a1' = AminoAcid.Pro)
+                                              | _   -> false                     
+                                             )       
+                 
+                   
+                | "Lys-C"  ->
+                    createProtease "Lys-C" (let _p1 = [AminoAcid.Lys] |> Set.ofList
+                                            fun p4 p3 p2 p1 p1' p2' -> 
+                                            match p1 with
+                                            | Some a1 -> _p1.Contains(a1)
+                                            | _ -> false
+                                           )    
+
+
+    
+    
+        // Implementation of CleavageScore [ref: Prediction of Missed Cleavage Sites in Tryptic Peptides Aids Protein Identification in Proteomics
+        //                                       Jennifer A. Siepen,1 Emma-Jayne Keevil,1 David Knight,1 and Simon J. Hubbard 
+        //                                       PMC 2009 April 3 ]
+        // As smaller the CleavageScore as more like is the cleavage
+        // A CleavageScore over a threshold of 0.5 makes the cleavage unlikely
+        //
+        // Digestion site nomenclature  :     p4  p3  p2  p1  CS p'1 p'2 p'3 p'4 
+        // Digestion site Order in Array:     p'1 p'2 p'3 p'4 CS p4  p3  p2  p1 
+    
+        /// Get cleaved probability
+        module CleavageScore =
+        
+            open AminoAcidSymbols
+
+            /// Get cleaved probability
+            let getCleavedP (a:AminoAcidSymbol) index =
+                match a with
+                | Field AminoAcidSymbol.Ala -> Array.get (   [| 0.0186237213392114  ;  0.00420022515410901 ; 3.53002332739628e-05 ; 0.00133092924208546 ; -0.0049122447091631 ; -0.00626781185306248 ; -0.00113516316915836 ; 0.00392961759045261 ; 0.0 ; |] ) index          
+                | Field AminoAcidSymbol.Cys -> Array.get (   [| 0.0173071318266984  ;  0.0182362544270455  ; 0.0058884097224481 ; 0.00192544607949062 ; 0.01095597703868 ; 0.0148254346379286 ; 0.0219373168460715 ; 0.0112312454730355 ; 0.0 ; |] ) index      
+                | Field AminoAcidSymbol.Asp -> Array.get (   [| -0.007133815548037  ; -0.0118184470690743  ; 0.00654705296026112 ; 0.0067050281150376 ; 0.0141624132472289 ; 0.0099568688324829 ; 0.000929377266564833 ; -0.0162973823115694 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Glu -> Array.get (   [| -0.0124248587558356 ; -0.0232307606334326  ; 0.00181031130741747 ; 0.00716014784841427 ; 0.00818034396259026 ; -0.00222127914039957 ; 0.0028442937155881 ; 0.00491926832574982 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Phe -> Array.get (   [| 0.0119920426834694  ;  0.0139395144782525  ; 0.00468781345674047 ; 0.00297659657226406 ; 0.0101203450370329 ; 0.0144613722136596 ; 0.0280184052564126 ; 0.0181982754958097 ; 0.0 ; |] ) index 
+                | Field AminoAcidSymbol.Gly -> Array.get (   [| -8.20312810020782e-05 ; -0.00290995754241101 ; 0.000209352903511755 ; 0.00371362222983421 ; -0.000318027050528464 ; 0.000868058270173546 ; 0.000330198733098511 ; -0.00346452584212206 ; 0.0 ; |] ) index     
+                | Field AminoAcidSymbol.His -> Array.get (   [| 0.0117663765407453 ; 0.0184516214958325 ; 0.0133219958800885 ; 0.00914181836068942 ; 0.00578955397457641 ; 0.00455527468070269 ; 0.020885755843452 ; 0.00594142776223056 ; 0.0 ; |] ) index  
+                | Field AminoAcidSymbol.Ile -> Array.get (   [| 0.0199421197114211 ; 0.0138750181207925 ; 0.00102285937033262 ; 0.00455976059407499 ; 0.00384378298672882 ; 0.00131189125848209 ; -0.000352115411225422 ; 0.00565904209703786 ; 0.0 ; |] ) index    
+                | Field AminoAcidSymbol.Lys -> Array.get (   [| 0.00402270759345742 ; -0.0148551046432945 ; -0.0267866580116544 ; -0.043014650796067 ; -0.0426349905702423 ; -0.044153635141278 ; -0.0498530534413316 ; -0.0497578863798243 ; -0.0106125875004242 ; |] ) index
+                | Field AminoAcidSymbol.Leu -> Array.get (   [| 0.0147945517930332 ; 0.00748424375471079 ; 0.00219706314393281 ; 0.00457080341133138 ; 0.00188686459291307 ; 0.00508630706016019 ; 0.0015271894185259 ; 0.000930249394099852 ; 0.0 ; |] ) index      
+                | Field AminoAcidSymbol.Met -> Array.get (   [| 0.0212654895269526 ; -0.0087793419126029 ; -0.013142656172641 ; -0.00273285530984907 ; 0.00754132801219614 ; -0.00524147923293532 ; 0.0057657018440194 ; -0.0101222284297051 ; 0.0 ; |] ) index    
+                | Field AminoAcidSymbol.Asn -> Array.get (   [| 0.0153786428896758 ; -0.00122862317011291 ; 0.00644852823507257 ; 0.0143452805729061 ; 0.00863951037114175 ; 0.0111926226367841 ; 0.010514806562435 ; 0.000459581898316416 ; 0.0 ; |] ) index   
+                | Field AminoAcidSymbol.Pyl -> 0.    
+                | Field AminoAcidSymbol.Pro -> Array.get (   [| -1.408135429711 ; -0.0151176661385177 ; 0.000582621641907323 ; 0.00559968095391454 ; 0.0103237181563291 ; 0.0150543502357306 ; 0.00487552628651554 ; 0.0258005600513561 ; 0.0 ; |] ) index    
+                | Field AminoAcidSymbol.Gln -> Array.get (   [| 0.0218723992037776 ; 0.0101618904516775 ; 0.0109962160318145 ; 0.00779255049091823 ; -0.00238950679147767 ; 0.00636975003212183 ; 0.012161600167981 ; 0.0104019447570531 ; 0.0 ; |] ) index    
+                | Field AminoAcidSymbol.Arg -> Array.get (   [| -0.0206030273840874 ; -0.0245971841699276 ; -0.0427821442802085 ; -0.0566332092070675 ; -0.0559191548111558 ; -0.0455394380519306 ; -0.0541455813655727 ; -0.0538609149609292 ; 0.0112126601842253 ; |] ) index     
+                | Field AminoAcidSymbol.Ser -> Array.get (   [| -0.00414728931498034 ; -0.00607359115820411 ; 0.00688957312924048 ; -0.00101967408837821 ; 0.00155119425371577 ; -0.00188774397621617 ; -0.00179609780733301 ; 0.00120217171057805 ; 0.0 ; |] ) index        
+                | Field AminoAcidSymbol.Thr -> Array.get (   [| 0.0115728837855243 ; 0.00871709724548706 ; 0.00208777500908572 ; 3.77150826628033e-06 ; 0.00437580160216219 ; 0.00526322191736816 ; -0.0022521384724719 ; 0.00746782714495857 ; 0.0 ; |] ) index     
+                | Field AminoAcidSymbol.Sel -> 0.
+                | Field AminoAcidSymbol.Val -> Array.get (   [| 0.00681194613657833 ; 0.0173429094275379 ; 0.00479136512294075 ; 0.00825865300614361 ; 0.00493316169438667 ; 0.00417320066605687 ; 0.00917321806055152 ; 0.00952970722162894 ; 0.0 ; |] ) index        
+                | Field AminoAcidSymbol.Trp -> Array.get (   [| 0.0306856368818309 ; 0.00282917821310596 ; 0.00730387808155344 ; 0.0120257729838156 ; 0.00693320815473958 ; 0.0181272910523906 ; 0.0254494100003613 ; 0.0354451553685568 ; 0.0 ; |] ) index    
+                | Field AminoAcidSymbol.Tyr -> Array.get (   [| 0.0194284810017644 ; 0.0127667737830556 ; 0.00498714111480968 ; 0.00476543997301542 ; -0.00523499887692041 ; 0.0152488432689032 ; 0.0194801608035318 ; 0.0168451463172139 ; 0.0 ; |] ) index   
+             
+                | Field AminoAcidSymbol.Xaa -> Array.get (   [| -0.0395949915379296 ; 0.00616249902274598 ; -0.0395949915379296 ; -0.0395949915379296 ; -0.011566267937686 ; -0.090747513985311 ; 0.0183969554397552 ; 0.0853437450703708 ; 0.0 ; |] ) index        
+                | Field AminoAcidSymbol.Xle -> 0. // nan // TODO average IL L
+                | Field AminoAcidSymbol.Glx -> Array.get (   [| 0.0853437450703708 ; nan ; nan ; nan ; 0.0853437450703708 ; 0.0853437450703708 ; nan ; nan ; nan ; |] ) index
+                | Field AminoAcidSymbol.Asx -> Array.get (   [| nan ; 0.0853437450703708 ; nan ; 0.0853437450703708 ; 0.0853437450703708 ; 0.0853437450703708 ; nan ; nan ; nan ; |] ) index
+                                       
+                | Field AminoAcidSymbol.Gap ->  Array.get (   [| 0.0853437450703708 ; -0.0332362471694817 ; -0.00965307329340537 ; 0.0276120364797184 ; 0.0 ; 0.0 ; 0.0 ; 0.0 ; 0.0 ; |] ) index //+
+                | Field AminoAcidSymbol.Ter -> Array.get (   [| 0.0 ; 0.0 ; 0.0 ; 0.0 ; 0.0809346261653139 ; 0.0688146873864017 ; -0.00598639055684963 ; -0.190862666868579 ; 0.0 ; |] ) index
+                // | '+'      -> Array.get (   [| 0.0853437450703708 ; -0.0332362471694817 ; -0.00965307329340537 ; 0.0276120364797184 ; 0.0 ; 0.0 ; 0.0 ; 0.0 ; 0.0 ; |] ) index                                     
+                | _ -> 0.
+
+
+            /// Get missed probability
+            let getMissedP (a:AminoAcidSymbol) index =
+                match a with
+                | Field AminoAcidSymbol.Ala -> Array.get (   [| -0.0978752075888718 ; -0.0198847901320386 ; -0.000162599309729288 ; -0.00618230263814537 ; 0.0219309154288239 ; 0.0277509980548175 ; 0.00518962066798522 ; -0.0185699461081312 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Cys -> Array.get (   [| -0.0900299925122872 ; -0.0955491607057649 ; -0.0281974726429136 ; -0.00897879962781697 ; -0.0543599833494246 ; -0.075678654240404 ; -0.118382665495958 ; -0.055836336165553 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Asp -> Array.get (   [| 0.0314193812682074 ; 0.0506231703962375 ; -0.0314932243949499 ; -0.0322881887450678 ; -0.0719369441334252 ; -0.0490509678757512 ; -0.00430576272835574 ; 0.0680412256217175 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Glu -> Array.get (   [| 0.053033404763815 ; 0.0933726218622424 ; -0.00843551965998847 ; -0.0345882434083597 ; -0.039797629165997 ; 0.0100851603709803 ; -0.0133442628224803 ; -0.0234020070165263 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Phe -> Array.get (   [| -0.0599479594033155 ; -0.0706875113660484 ; -0.0222660916680952 ; -0.0139772319849951 ; -0.0499143795822267 ; -0.0736193995313521 ; -0.159199188226274 ; -0.0953219453246492 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Gly -> Array.get (   [| 0.000377565111733786 ; 0.0131546928606713 ; -0.000965402581785676 ; -0.0175238898661841 ; 0.00146155865238389 ; -0.00402007022809444 ; -0.00152385762818177 ; 0.0156073024880689 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.His -> Array.get (   [| -0.0587235591508078 ; -0.0968402609563878 ; -0.0672480561621474 ; -0.0447764411487796 ; -0.0277053940254705 ; -0.021617235705912 ; -0.111752086596945 ; -0.0284616589594472 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Ile -> Array.get (   [| -0.105900364403556 ; -0.0703267734771773 ; -0.00474174668456256 ; -0.0216391778358078 ; -0.0181538887728747 ; -0.00609311139032893 ; 0.00161786388127948 ; -0.0270567632506103 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Lys -> Array.get (   [| -0.0190217159063933 ; 0.0625277225670337 ; 0.105665409640322 ; 0.156626255550412 ; 0.155520296424895 ; 0.159921600947166 ; 0.175921830105783 ; 0.175661135601301 ; 0.045780112741825 ; |] ) index 
+                | Field AminoAcidSymbol.Leu -> Array.get (   [| -0.0755035242375552 ; -0.0362350755152361 ; -0.0102637359244067 ; -0.0216931977051885 ; -0.00879665556808435 ; -0.0242240132320469 ; -0.00710306174657736 ; -0.00430982772116158 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Met -> Array.get (   [| -0.114132974344623 ; 0.0382858518051329 ; 0.0558648649150308 ; 0.0123678582184007 ; -0.0365259139536316 ; 0.0233533371391882 ; -0.0275867644275067 ; 0.0437912585812453 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Asn -> Array.get (   [| -0.0788299287119621 ; 0.00561354121363955 ; -0.0309983090244586 ; -0.0729651571272533 ; -0.0421668411875381 ; -0.0556288300737603 ; -0.05200619945784 ; -0.00212273527092521 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Pyl -> 0.
+                | Field AminoAcidSymbol.Pro -> Array.get (   [| 0.734415886060518 ; 0.0635380307218723 ; -0.00269318400929317 ; -0.0267621290007437 ; -0.0509913667336383 ; -0.0769793817145727 ; -0.0231870591559591 ; -0.143797826856209 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Gln -> Array.get (   [| -0.117969916479998 ; -0.0501341304713985 ; -0.0545754266914563 ; -0.0378086424322195 ; 0.0108374287670045 ; -0.0306030716008546 ; -0.0608706268016039 ; -0.05140647211982 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Arg -> Array.get (   [| 0.0839923310796518 ; 0.098149568952218 ; 0.15594940772927 ; 0.193963194795178 ; 0.192111114311861 ; 0.163886132848834 ; 0.187463071487379 ; 0.186710603099994 ; -0.0557364696622383 ; |] ) index
+                | Field AminoAcidSymbol.Ser -> Array.get (   [| 0.0186037055032237 ; 0.026922998856415 ; -0.0332190722669007 ; 0.00466508103872825 ; -0.00721584368297066 ; 0.00858895217038756 ; 0.00817672965188891 ; -0.00557951790496735 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Thr -> Array.get (   [| -0.0576769689205623 ; -0.0425687027718217 ; -0.00974617373848127 ; -1.73687197929421e-05 ; -0.0207404654555115 ; -0.0250966660897753 ; 0.0102232743918174 ; -0.0361514776936105 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Sel -> 0.
+                | Field AminoAcidSymbol.Val -> Array.get (   [| -0.0328272141698239 ; -0.0902410077930348 ; -0.0227738520402987 ; -0.0402005991445456 ; -0.0234703056642569 ; -0.0197532660165449 ; -0.0449401919765487 ; -0.0468044755798308 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Trp -> Array.get (   [| -0.178586262592309 ; -0.0132720175157153 ; -0.0353176592986856 ; -0.0601313220751986 ; -0.0334395263233619 ; -0.0948976405530249 ; -0.141415693210588 ; -0.215820560584245 ; 0.0 ; |] ) index 
+                | Field AminoAcidSymbol.Tyr -> Array.get (   [| -0.1027532603019 ; -0.0641827923770768 ; -0.0237357867488708 ; -0.0226466622914581 ; 0.0233253947565591 ; -0.0780881261688707 ; -0.103068708688225 ; -0.0873159356295266 ; 0.0 ; |] ) index
+             
+                | Field AminoAcidSymbol.Xaa -> Array.get (   [| 0.146525928646086 ; -0.0295653304095948 ; 0.146525928646086 ; 0.146525928646086 ; 0.0496159156380304 ; 0.271464665254386 ; -0.0965121200402077 ; 0.0 ; 0.0 ; |] ) index
+                | Field AminoAcidSymbol.Xle -> 0. // nan // TODO average IL L                                                                                             
+                | Field AminoAcidSymbol.Glx -> Array.get (   [| 0.0 ; nan ; 0.0 ; nan ; 0.0 ; 0.0 ; 0.0 ; 0.0 ;  nan; |] ) index
+                | Field AminoAcidSymbol.Asx -> Array.get (   [| nan ; 0.0 ; nan ; 0.0 ; 0.0 ; 0.0 ; nan ; nan ; nan ; |] ) index
+                                       
+                | Field AminoAcidSymbol.Gap -> 0.
+                | Field AminoAcidSymbol.Ter -> Array.get (   [| 0.0 ; 0.0 ; 0.0 ; 0.0 ; -1.2470492746235 ; -0.679188613161495 ; 0.0265506115693381 ; 0.421226985587719 ; 0.0 ; |] ) index       
+                // Nterminal    | '+'      -> Array.get (   [| 0.0 ; 0.126871240649617 ; 0.0418777246061289 ; -0.156330115182119 ; 0.0 ; 0.0 ; 0.0 ; 0.0 ; 0.0 ; |] ) index                                     
+                | _ -> 0.
+
+        
+
+            let calculateCleavageScore (source:BioArray.BioArray) =
+                let n = 4       
+                let lastIndex = source.Length - 1
+        
+                let get a i = 
+                    (getMissedP a i) - (getCleavedP a i)
+        
+                Array.init source.Length
+                    (fun i -> 
+                        match i with
+                        | pos when pos < n -> 
+                            Array.foldSub (fun (i,acc) v -> (i+1,acc + get v i) ) (0,0.0) source 0 (pos+n) 
+                            |> snd
+                        | pos when pos+n > lastIndex  ->  
+                            Array.foldSub (fun (i,acc) v -> (i+1,acc + get v i) ) (0,0.0) source (pos-n) lastIndex 
+                            |> snd
+                        | _ -> 
+                            Array.foldSub (fun (i,acc) v -> (i+1,acc + get v i) ) (0,0.0) source (i-n) (i+n) 
+                            |> snd
+                    )
+
+    open System.Collections.Generic
+    open System.Reflection
+    open FSharpAux
+    open FSharpAux.IO
+    open BioFSharp
+    open BioFSharp.IO
+    open BioFSharp.BioArray
+    open BioFSharp.Digestion
+    open BioFSharp.AminoAcidSymbols
+    open CNTK
+
+    /// Record type containing feature vector, proteinId, and sequence of a peptide
+    type PredictionInput = {
+        ///Feature vector
+        Data      : float[]
+        ///ID of the protein this peptide maps to
+        ProtId    : string
+        ///Sequence of the peptide
+        Sequence  : string
+    }
+
+    /// returns a PredictionInput type given the values for the record fields
+    let createPredictionInput data protID sequence  = {
+        Data      = data
+        ProtId    = protID
+        Sequence  = sequence
+        }
+
+    /// Record type containing prediction score, proteinId, and sequence of a peptide
+    type PredictionOutput = {
+        ///ID of the protein this peptide maps to
+        ProteinId: string
+        ///Sequence of the peptide
+        Sequence: string
+        ///observability score returned by dppop prediction
+        PredictionScore: float
+        ///Indicates wether the peptide is uniquely mapping to the protein of interest
+        Distinct: bool
+    }
+    /// returns a PredictionOutput type given the values for the record fields
+    let createPredictionOutput pId sequence score distinct = {ProteinId = pId; Sequence = sequence; PredictionScore = score; Distinct=distinct}
+
+    ///Contains functions to extract the features used in dppop to classify peptides for observability prediction
+    module Classification =
+        open FSharpAux
+
+        let private peptideFeatures =
+            [|
+                AminoProperties.initGetAminoProperty AminoProperties.AminoProperty.ActivationGibbsEnergy9;
+                AminoProperties.initGetAminoProperty AminoProperties.AminoProperty.MEMofSingleSpanning;
+                AminoProperties.initGetAminoProperty AminoProperties.AminoProperty.PrincipalComponentII;
+                AminoProperties.initGetAminoProperty AminoProperties.AminoProperty.HydrophobicityIndex2;        
+                AminoProperties.initGetAminoProperty AminoProperties.AminoProperty.ChouFasmanCoil;
+                AminoProperties.initGetAminoProperty AminoProperties.AminoProperty.AverageNumberSurroundingResidues;
+                AminoProperties.initGetAminoProperty AminoProperties.AminoProperty.CompositionIntracellular;
+                AminoProperties.initGetAminoProperty AminoProperties.AminoProperty.WeightsHelixMinus3;
+                AminoProperties.initGetAminoProperty AminoProperties.AminoProperty.HelixFormationParameters;        
+                AminoProperties.initGetAminoProperty AminoProperties.AminoProperty.FreeEnergyHelicalRegion;
+                AminoProperties.initGetAminoProperty AminoProperties.AminoProperty.ELi;        
+                AminoProperties.initGetAminoProperty AminoProperties.AminoProperty.CompositionExtracellular;
+                AminoProperties.initGetAminoProperty AminoProperties.AminoProperty.HydrophobicityIndex;
+            |]
+
+
+        ///Tryptic digestion of an amino acid sequence with the ability to control the maximal amount of misscleavages and filtering of a minimal peptide length
+        let digestTrypticWith (maxMissCleavages:int) (minPeptideLength:int) (aminoAcidSeq:BioArray) =
+            Digestion2.BioArray.digest (Digestion2.Table.getProteaseBy "Trypsin") 0 aminoAcidSeq
+            |> Digestion2.BioArray.concernMissCleavages 0 maxMissCleavages
+            |> Seq.map (fun p -> p.PepSequence |> List.toArray)
+            |> Seq.filter (fun p -> p.Length > minPeptideLength)
+
+        ///Tryptic Digestion2 of an amino acid sequence with the settings used for the dppop web API
+        let digestTryptic (aminoAcidSeq:BioArray) =
+            aminoAcidSeq
+            |> digestTrypticWith 0 6 
+
+        ///Returns a distinct set of peptides that map uniquely to a single protein from the given fasta input
+        let getDistinctTrypticPeptidesFromFasta (fa:seq>>)= 
+            //fileDir + "Chlamy_Cp.fastA"
+            fa
+            |> Seq.map (fun fi -> fi.Sequence |> Array.filter (not << AminoAcids.isTerminator))
+            |> Seq.map digestTryptic
+            |> Seq.collect (fun dig -> dig |> Seq.map BioArray.toString |> Set.ofSeq |> Set.toSeq)
+            |> Seq.countBy id
+            |> Seq.filter (fun (key,count) -> count=1)
+            |> Seq.map fst
+            |> Set.ofSeq
+
+        ///Returns a distinct set of peptides that map uniquely to a single protein from the given fasta file input
+        let getDistinctTrypticPeptidesFromFastaFile (filePath: string) = 
+            //fileDir + "Chlamy_Cp.fastA"
+            filePath
+            |> FastA.fromFile BioArray.ofAminoAcidString
+            |> getDistinctTrypticPeptidesFromFasta
+
+        ///returns a map mapping from a (proteinID*sequence) touple to the three digestion efficiency scores in the form of a (float*float*float) tuple
+        let private getDigestionEfficiency (protId) (sequence:BioArray.BioArray) =
+                let cs = sequence |> Array.map AminoAcidSymbols.aminoAcidSymbol |> Digestion2.CleavageScore.calculateCleavageScore
+        
+                let getStart index = if index < 2 then 0. else cs.[index-1]
+                let getEnd index = if index >= cs.Length  then 0. else cs.[index]
+
+                let calc (p:Digestion2.DigestedPeptide) =
+                    if p.MissCleavages < 1 then
+                        (protId,p.PepSequence |> Seq.map AminoAcidSymbols.aminoAcidSymbol |> Seq.toArray ),(getStart p.MissCleavageStart,0.,getEnd p.MissCleavageEnd)
+                    else
+                        let inter' = p.MissCleavages - 1 |> float
+                        let s = getStart p.MissCleavageStart                
+                        let e = getEnd p.MissCleavageEnd
+                        // let inter' = inter - s - e
+                        (protId,p.PepSequence |> Seq.map AminoAcidSymbols.aminoAcidSymbol |> Seq.toArray),(s,inter',e)
+
+                Digestion2.BioArray.digest (Digestion2.Table.getProteaseBy "Trypsin") 0 sequence
+                |> Digestion2.BioArray.concernMissCleavages 0 3
+                |> Seq.map calc
+
+        //adaption to new digestion module, not working correctly. 
+        //let getDigestionEfficiency (protId) (sequence) =
+        //    let cleavageScore = sequence |> Array.map AminoAcidSymbols.aminoAcidSymbol |> Digestion.CleavageScore.calculateCleavageScore
+        
+        //    //TODO: digestion hast changed from 1 based index to 0 based index, identify the numbers to change
+        //    //ERROR HERE
+        //    let getStart index = if index < 2 then 0. else cleavageScore.[index]//pretty sure this one
+        //    let getEnd index = if index >= cleavageScore.Length-1  then 0. else cleavageScore.[index]
+
+
+        //    let calc (p:DigestedPeptide) =
+        //        if p.MissCleavages < 1 then
+        //            (protId,p.PepSequence |> Seq.map AminoAcidSymbols.aminoAcidSymbol |> Seq.toArray ),(getStart p.CleavageStart,0.,getEnd p.CleavageEnd)
+        //        else
+        //            let inter' = p.MissCleavages - 1 |> float // maybe this one
+        //            let s = getStart p.CleavageStart                
+        //            let e = getEnd p.CleavageEnd
+        //            // let inter' = inter - s - e
+        //            (protId,p.PepSequence |> Seq.map AminoAcidSymbols.aminoAcidSymbol |> Seq.toArray),(s,inter',e)
+
+        //    Digestion.BioArray.digest (Digestion.Table.getProteaseBy "Trypsin") 0 sequence
+        //    |> Digestion.BioArray.concernMissCleavages 0 3
+        //    |> Seq.map calc
+
+        ///returns a map mapping from a (proteinID*sequence) touple to the three digestion efficiency scores in the form of a (float*float*float) tuple from the input fasta item collection
+        let createDigestionEfficiencyMapFromFasta (fa:seq>>) = 
+            fa
+            |> Seq.map (fun fi -> {fi with Sequence=fi.Sequence |> Array.filter (not << AminoAcids.isTerminator)})
+            |> Seq.collect (fun fi -> getDigestionEfficiency fi.Header fi.Sequence)
+            |> Map.ofSeq
+
+        ///get the physicochemical properties of a peptide: length, MolecularWeight, NetCharge, PositiveCharge, NegativeCharge, piI, Relative frewuencies of polar, hydrophobic, and negatively charge amino acids
+        let getPhysicochemicalProperties (peptide:BioArray) =
+            let pI peptide = 
+                //default function for pKr of charged aminoacids
+                let pKrFunction = IsoelectricPoint2.getpKr
+                match IsoelectricPoint2.tryFind pKrFunction 0.5 peptide with
+                | Some (ph,pk) -> pk
+                | None -> 0.
+            let len = float peptide.Length
+            let positiveCharge = peptide |> Seq.countIf AminoAcidSymbols.isPosCharged |> float
+            let negativeCharge = peptide |> Seq.countIf AminoAcidSymbols.isNegCharged |> float
+            [|
+                //length
+                len;
+                //MolecularWeight
+                BioArray.toAverageMass peptide
+                //  NetCharge
+                negativeCharge + positiveCharge
+                // PositiveCharge, 
+                positiveCharge
+                // NegativeCharge        
+                negativeCharge
+                // piI
+                //ERROR HERE
+                pI peptide      
+                //RelFreqPolar
+                peptide |> Seq.countIf AminoAcidSymbols.isPolar |> fun x -> float x / len  
+                //RelFreqHydrophobic
+                peptide |> Seq.countIf AminoAcidSymbols.isHydrophobic |> fun x -> float x / len
+                //RelFreqNegative 
+                negativeCharge / len
+            |]
+
+
+        ///Returns a PredictionInput record type for the input peptide containing the calculated feature vector given a digestion efficiency map and the proteinId the peptide maps to
+        let getPeptideFeatures (digestionEfficiencyMap:Map<(string*BioArray),(float*float*float)>) (protId:string) (peptide) =
+            let peptide' = peptide |> Array.map (fun x -> (x :> IBioItem).Symbol) |> String.fromCharArray
+            let getIndex (a:AminoAcidSymbol) = (int a) - 65
+            // Relative amino acid frequency peptide features
+            let relFreq = 
+                let tmp = BioArray.toRelCompositionVector peptide
+                [|
+                    tmp.[getIndex AminoAcidSymbol.Ala];tmp.[getIndex AminoAcidSymbol.Cys];tmp.[getIndex AminoAcidSymbol.Asp];tmp.[getIndex AminoAcidSymbol.Glu];
+                    tmp.[getIndex AminoAcidSymbol.Phe];tmp.[getIndex AminoAcidSymbol.Gly];tmp.[getIndex AminoAcidSymbol.His];tmp.[getIndex AminoAcidSymbol.Ile];
+                    tmp.[getIndex AminoAcidSymbol.Lys];tmp.[getIndex AminoAcidSymbol.Leu];tmp.[getIndex AminoAcidSymbol.Met];tmp.[getIndex AminoAcidSymbol.Asn];
+                    tmp.[getIndex AminoAcidSymbol.Pro];tmp.[getIndex AminoAcidSymbol.Gln];tmp.[getIndex AminoAcidSymbol.Arg];tmp.[getIndex AminoAcidSymbol.Ser];
+                    tmp.[getIndex AminoAcidSymbol.Thr];tmp.[getIndex AminoAcidSymbol.Val];tmp.[getIndex AminoAcidSymbol.Trp];tmp.[getIndex AminoAcidSymbol.Tyr];        
+                |]
+
+            let physicochemical = getPhysicochemicalProperties peptide
+            let pf = 
+                peptideFeatures
+                |> Array.map (fun f -> peptide |> Array.averageBy f)
+
+
+            let digest = 
+                if digestionEfficiencyMap.ContainsKey (protId,peptide) then
+                    let a,b,c = digestionEfficiencyMap.[(protId,peptide)]
+                    Some [|a;b;c|]
+                else
+                    //printfn "%s - %A" protId peptide
+                    // [|0.;0.;0.|]
+                    None
+            match digest with
+            | Some v -> let inp = createPredictionInput (Array.concat [|relFreq;physicochemical;pf;v|]) protId (peptide')
+                        Some inp 
+            | None -> None
+        
+        ///normalizes feature vector given a normalization vector containing tuples of (stDev,mean) of all features across a dataset
+        let zNormalizePeptideFeaturesBy (norm: (float*float) []) (features:PredictionInput)  =
+            if norm.Length = features.Data.Length then
+                {features with 
+                    Data = 
+                        (Array.map2 
+                            (fun d (stDev,mean) -> if nan.Equals((d-mean)/stDev) then 0. else (d-mean)/stDev) ) 
+                            features.Data 
+                            norm
+                }
+            else
+                failwithf "feature vector and normalization vector have different length"
+
+        let private chlamyNorm = 
+            [|
+                (0.1294759775, 0.1477564866); (0.03232439131, 0.0120497691);
+                (0.06583974433, 0.05254771891); (0.08037249763, 0.06582908406);
+                (0.04923115097, 0.02856099006); (0.09625444791, 0.08862113627);
+                (0.0409499754, 0.01903257066); (0.0524744837, 0.03142502873);
+                (0.04812202378, 0.03354896847); (0.08688410957, 0.09624922596);
+                (0.04372886103, 0.02224862021); (0.04796601885, 0.02697248923);
+                (0.07505402555, 0.05901261068); (0.06734344394, 0.04329616126);
+                (0.05108587462, 0.05081075304); (0.07635354655, 0.06585390926);
+                (0.06308348418, 0.04958665822); (0.07538097997, 0.07221419823);
+                (0.03104479598, 0.01143679638); (0.04449184194, 0.02294694945);
+                (18.08936628, 17.66412604); (1722.917992, 1829.439902);
+                (3.039747471, 4.021484475); (0.9115152624, 1.442723413);
+                (2.740599321, 2.578761062); (0.2259591883, -0.07734019599);
+                (0.1348471884, 0.251175164); (0.1460896078, 0.5460883068);
+                (0.114005905, 0.153373458); (0.9449223245, 17.50726637);
+                (1.869898332, 6.934009825); (0.08655133387, 0.06496131673);
+                (0.2762850846, 0.2276330281); (0.1053224169, 0.9909929427);
+                (0.2372540707, 6.112844572); (1.124168016, 6.598078652);
+                (0.1087448998, 0.01658923627); (0.2635107797, -0.1762355472);
+                (0.1007580986, 0.930990863); (0.1207067007, 1.148202475);
+                (0.6813763089, 6.154813386); (0.2165025154, 0.8372242502);
+                (0.5278203242, 0.2050764499); (0.0, 0.0); (0.6044132199, 0.2977169821)
+            |]
+
+        let private yeastNorm = 
+            [|
+                (0.07205449942, 0.05870317244); (0.03292579913, 0.01172451697);
+                (0.0738234841, 0.06475416441); (0.08378288208, 0.07431514303);
+                (0.06013549634, 0.04385188562); (0.06678701753, 0.05148753581);
+                (0.04301839169, 0.02170466904); (0.07128574467, 0.06705647111);
+                (0.05220592539, 0.05707495112); (0.08706581448, 0.09882663785);
+                (0.0413525213, 0.02031959783); (0.07346084846, 0.06278570344);
+                (0.06218915668, 0.04630561542); (0.06262204346, 0.04122256361);
+                (0.04660452507, 0.03070757211); (0.08959641681, 0.08842291913);
+                (0.06891261589, 0.05839174474); (0.06871686103, 0.05922046758);
+                (0.02926166995, 0.009963582353); (0.05267493232, 0.03316121013);
+                (10.36244966, 15.40208044); (1114.854432, 1712.257911);
+                (2.860379011, 4.239515625); (0.7426156118, 1.409460591);
+                (2.640116869, 2.830055033); (0.2325694909, -0.07824047436);
+                (0.1423364521, 0.3273767806); (0.1441495189, 0.4457713024);
+                (0.1219682278, 0.183954965); (0.8638290629, 17.83917358);
+                (1.97944068, 6.432096404); (0.07668969239, 0.005624749244);
+                (0.2632108286, 0.09569136325); (0.0912584431, 1.000864484);
+                (0.2694954289, 6.070490988); (1.027388398, 5.972532616);
+                (0.08758176723, 0.01262545237); (0.2132952054, -0.1766576158);
+                (0.07707357534, 0.9373354032); (0.1087780497, 1.173638978);
+                (0.567622227, 5.768608549); (0.229437042, 0.8949232498);
+                (0.4736182826, 0.1529837898); (0.0, 0.0); (0.5517373002, 0.2606125317)
+            |]
+
+        ///returns the z normalized version of the input feature vector by using the normalization vector dppop uses for its plant model
+        let zNormalizePlantFeatureVector (features:PredictionInput) =
+            zNormalizePeptideFeaturesBy chlamyNorm features
+
+        ///returns the z normalized version of the input feature vector by using the normalization vector dppop uses for its non-plant model
+        let zNormalizeNonPlantFeatureVector (features:PredictionInput) =
+            zNormalizePeptideFeaturesBy yeastNorm features
+
+    ///Contains functions to execute peptide observability prediction using the deep neural network dppop.
+    module Prediction =
+        open System.IO
+
+        ///Options concerning the model used for prediction.
+        type Model =
+            ///dppops original plant model used in the web API.
+            | Plant
+            ///dppops original non-plant model used in the web API.
+            | NonPlant
+            ///Option to use a custom model at the given path
+            | Custom of string
+
+            ///returns a byte array from the ressource stream. Either reads the original models from the manifest resources or reads the custom model from the given path
+            static member getModelBuffer =
+                let assembly = Assembly.GetExecutingAssembly()
+                let resnames = assembly.GetManifestResourceNames();
+                function
+                | Plant         ->  match Array.tryFind (fun (r:string) -> r.Contains("Chlamy5Times128.model")) resnames with
+                                    | Some path -> 
+                                        use stream = assembly.GetManifestResourceStream(path)
+                                        let length = int stream.Length
+                                        use bReader = new BinaryReader(stream)
+                                        bReader.ReadBytes(length)
+
+                                    | _ -> failwithf "could not plant load model from embedded ressources, check package integrity"
+
+                | NonPlant      ->  match Array.tryFind (fun (r:string) -> r.Contains("Yeast5Times128.model.model")) resnames with
+                                    | Some path ->                                         
+                                        use stream = assembly.GetManifestResourceStream(path)
+                                        let length = int stream.Length
+                                        use bReader = new BinaryReader(stream)
+                                        bReader.ReadBytes(length)
+                                    | _ -> failwithf "could not load non-plant model from embedded ressources, check package integrity"
+                | Custom path   ->  use stream = new FileStream(path,FileMode.Open)
+                                    let length = int stream.Length
+                                    use bReader = new BinaryReader(stream)
+                                    bReader.ReadBytes(length)
+
+        /// For expert use.
+        /// Returns the observability prediction for the input peptides.
+        /// Loads a trained CNTK model (either dppops plant/nonPlant models or a custom model) and evaluates the scores for the given collection of features (PredictionInput)
+        /// No feature normalization is done.
+        let scoreBy (model:Model) (distinctPeptideSet: Set) (data:PredictionInput []) = 
+            let device = DeviceDescriptor.CPUDevice
+
+            let PeptidePredictor : Function = 
+                Function.Load(Model.getModelBuffer model,device)
+
+            ///////////Input 
+            let inputVar: Variable = PeptidePredictor.Arguments.Item 0
+
+            let inputShape = inputVar.Shape
+            /// Gets Size of one Feature Vector
+            let featureVectorLength = inputShape.[0] 
+
+            /// Extracts all Features and appends them, stores Values in a List
+            let featureData = 
+                let tmp = new System.Collections.Generic.List()
+                data |> Array.iter(fun x -> 
+                                    let data' = x.Data |> Array.map (fun x -> float32 (x))
+                                    tmp.AddRange(data')
+                                   )
+                tmp
+
+            /// Creates an input Batch
+            let inputValues = Value.CreateBatch(inputShape,featureData,device)
+
+            let inputMap = new Dictionary()
+            inputMap.Add(inputVar,inputValues)
+
+            ///////////Output
+            let outputVar : Variable = PeptidePredictor.Output
+
+            let outputMap = new Dictionary()
+            outputMap.Add(outputVar,null)
+
+            PeptidePredictor.Evaluate(inputMap,outputMap,device)
+
+            let outputValues = outputMap.[outputVar]
+
+            let preds = 
+                outputValues.GetDenseData(outputVar)
+                |> Seq.concat
+                |> Array.ofSeq
+
+            let res = 
+                Array.map2 (fun (data:PredictionInput) preds -> createPredictionOutput data.ProtId data.Sequence (float preds) (distinctPeptideSet.Contains(data.Sequence))) data preds
+            res
+
+        ///Returns relative observability scores for uniquely mapping peptides of proteins of interest given a model, normalization procedure for features, and the proteome of the organism.
+        let scoreProteinsAgainstProteome (model:Model) (featureNormalization: PredictionInput -> PredictionInput) (proteome: seq>>) (proteinsOfInterest: seq>>)  =
+            printfn "Determining distinct peptides..."
+            //only uniquely mapping peptides in the given proteome will be considered candidate peptides.
+            let distinctPeptides = Classification.getDistinctTrypticPeptidesFromFasta proteome
+            let digestionEfficiencyMap = Classification.createDigestionEfficiencyMapFromFasta proteinsOfInterest
+            printfn "determining peptide features and predicting observability..."
+            proteinsOfInterest
+            |> Seq.map (fun protein ->
+                                        let protId = protein.Header
+                                        //uniquely mapping digested peptides
+                                        let digested =
+                                            Classification.digestTryptic protein.Sequence
+                                            |> Seq.map (fun x -> BioArray.toString x)
+                                            |> List.ofSeq
+                                        let candidatePeptides = 
+                                            digested
+                                            |> Seq.map (fun p -> Classification.getPeptideFeatures digestionEfficiencyMap protId (BioArray.ofAminoAcidSymbolString p))
+                                            |> Array.ofSeq
+                                            |> Array.choose id
+
+                                        candidatePeptides
+                                            |> Array.map (featureNormalization)
+                                            |> scoreBy model distinctPeptides
+                                            |> Array.sortByDescending (fun (x) -> x.PredictionScore)
+                                            |> fun x -> let max = (Array.maxBy (fun (x) -> x.PredictionScore) x).PredictionScore 
+                                                        x |> Array.map (fun (x) -> if x.PredictionScore >= 0. then {x with PredictionScore = (x.PredictionScore/max)} else {x with PredictionScore = 0.0})
+
+                )
+
+        ///Returns relative observability scores for uniquely mapping peptides of proteins of interest using dppops plant model and feature normalization procedure, given the proteome of the organism.
+        let scoreDppopPlant (proteome: seq>>) (proteinsOfInterest: seq>>) =
+            scoreProteinsAgainstProteome Model.Plant Classification.zNormalizePlantFeatureVector proteome proteinsOfInterest
+
+        ///Returns relative observability scores for uniquely mapping peptides of proteins of interest using dppops non-plant model and feature normalization procedure, given the proteome of the organism.
+        let scoreDppopNonPlant (proteome: seq>>) (proteinsOfInterest: seq>>) =
+            scoreProteinsAgainstProteome Model.NonPlant Classification.zNormalizeNonPlantFeatureVector proteome proteinsOfInterest
\ No newline at end of file
diff --git a/src/BioFSharp.ML/Resources/Chlamy5Times128.model b/src/BioFSharp.ML/Resources/Chlamy5Times128.model
new file mode 100644
index 00000000..7a0e22fe
Binary files /dev/null and b/src/BioFSharp.ML/Resources/Chlamy5Times128.model differ
diff --git a/src/BioFSharp.ML/Resources/Yeast5Times128.model b/src/BioFSharp.ML/Resources/Yeast5Times128.model
new file mode 100644
index 00000000..f403e5c1
Binary files /dev/null and b/src/BioFSharp.ML/Resources/Yeast5Times128.model differ
diff --git a/src/BioFSharp.ML/paket.references b/src/BioFSharp.ML/paket.references
new file mode 100644
index 00000000..5938e277
--- /dev/null
+++ b/src/BioFSharp.ML/paket.references
@@ -0,0 +1,4 @@
+FSharp.Core
+FSharpAux
+FSharpAux.IO
+CNTK.CPUOnly
\ No newline at end of file
diff --git a/src/BioFSharp.ML/paket.template b/src/BioFSharp.ML/paket.template
new file mode 100644
index 00000000..5aa40a45
--- /dev/null
+++ b/src/BioFSharp.ML/paket.template
@@ -0,0 +1,30 @@
+type project
+id BioFSharp.ML
+title
+    BioFSharp.ML
+owners
+    Timo Mühlhaus
+authors 
+    Timo Mühlhaus
+projectUrl
+    https://github.com/CSBiology/BioFSharp
+iconUrl
+    https://raw.githubusercontent.com/CSBiology/BioFSharp/master/docs/files/img/logo.png
+licenseUrl
+    https://github.com/CSBiology/BioFSharp/blob/master/LICENSE.txt
+requireLicenseAcceptance
+    false
+language
+    F#
+copyright
+    Copyright 2019
+tags
+    bioinformatics F# fsharp maschine learning
+summary
+    Lighweight F# wrapper for ML.NET
+description
+    Lighweight F# wrapper for ML.NET
+include-referenced-projects 
+    true
+files
+	../../bin/BioFSharp.ML ==> lib
\ No newline at end of file
diff --git a/src/BioFSharp.Parallel/AssemblyInfo.fs b/src/BioFSharp.Parallel/AssemblyInfo.fs
index 3696cfe1..1e01fd93 100644
--- a/src/BioFSharp.Parallel/AssemblyInfo.fs
+++ b/src/BioFSharp.Parallel/AssemblyInfo.fs
@@ -5,8 +5,8 @@ open System.Reflection
 []
 []
 [")>]
-[]
-[]
+[]
+[]
 []
 do ()
 
@@ -14,6 +14,6 @@ module internal AssemblyVersionInformation =
     let [] AssemblyTitle = "BioFSharp.Parallel"
     let [] AssemblyProduct = "BioFSharp"
     let [] AssemblyDescription = "An open source bioinformatics toolbox written in F#. "
-    let [] AssemblyVersion = "0.0.12"
-    let [] AssemblyFileVersion = "0.0.12"
+    let [] AssemblyVersion = "0.1.0"
+    let [] AssemblyFileVersion = "0.1.0"
     let [] AssemblyConfiguration = "Release"
diff --git a/src/BioFSharp.Parallel/paket.template b/src/BioFSharp.Parallel/paket.template
index 441036df..757307d2 100644
--- a/src/BioFSharp.Parallel/paket.template
+++ b/src/BioFSharp.Parallel/paket.template
@@ -1,4 +1,5 @@
 type project
+id BioFSharp.Parallel
 title
     BioFSharp.Parallel
 owners
@@ -8,16 +9,22 @@ authors
 projectUrl
     https://github.com/CSBiology/BioFSharp
 iconUrl
-    https://raw.githubusercontent.com/CSBiology/FSharp.FGL/master/docs/files/img/logo.png
+    https://raw.githubusercontent.com/CSBiology/BioFSharp/master/docs/files/img/logo.png
 licenseUrl
     https://github.com/CSBiology/FSharp.FGL/blob/master/LICENSE.txt
 requireLicenseAcceptance
     false
+language
+    F#
 copyright
-    Copyright 2015
+    Copyright 2019
 tags
-    F# FSharp bioinformatics
+    F# FSharp bioinformatics parallelization
 summary
-    F# FSharp bioinformatics
+    GPU parallelized functions from BioFSharp
 description
-    F# FSharp bioinformatics
\ No newline at end of file
+    GPU parallelized functions from BioFSharp
+include-referenced-projects 
+    true
+files
+    ../../bin/BioFSharp.Parallel ==> lib
\ No newline at end of file
diff --git a/src/BioFSharp.Stats/AssemblyInfo.fs b/src/BioFSharp.Stats/AssemblyInfo.fs
index f9b5b50b..5b99211f 100644
--- a/src/BioFSharp.Stats/AssemblyInfo.fs
+++ b/src/BioFSharp.Stats/AssemblyInfo.fs
@@ -5,8 +5,8 @@ open System.Reflection
 []
 []
 [")>]
-[]
-[]
+[]
+[]
 []
 do ()
 
@@ -14,6 +14,6 @@ module internal AssemblyVersionInformation =
     let [] AssemblyTitle = "BioFSharp.Stats"
     let [] AssemblyProduct = "BioFSharp"
     let [] AssemblyDescription = "An open source bioinformatics toolbox written in F#. "
-    let [] AssemblyVersion = "0.0.12"
-    let [] AssemblyFileVersion = "0.0.12"
+    let [] AssemblyVersion = "0.1.0"
+    let [] AssemblyFileVersion = "0.1.0"
     let [] AssemblyConfiguration = "Release"
diff --git a/src/BioFSharp.Stats/paket.template b/src/BioFSharp.Stats/paket.template
index f3d3864c..a27894a2 100644
--- a/src/BioFSharp.Stats/paket.template
+++ b/src/BioFSharp.Stats/paket.template
@@ -1,23 +1,30 @@
 type project
+id BioFSharp.Stats
 title
     BioFSharp.Stats
 owners
     Timo Mühlhaus
-authors
+authors 
     Timo Mühlhaus
 projectUrl
     https://github.com/CSBiology/BioFSharp
 iconUrl
-    https://raw.githubusercontent.com/CSBiology/FSharp.FGL/master/docs/files/img/logo.png
+    https://raw.githubusercontent.com/CSBiology/BioFSharp/master/docs/files/img/logo.png
 licenseUrl
-    https://github.com/CSBiology/FSharp.FGL/blob/master/LICENSE.txt
+    https://github.com/CSBiology/BioFSharp/blob/master/LICENSE.txt
 requireLicenseAcceptance
     false
+language
+    F#
 copyright
-    Copyright 2015
+    Copyright 2019
 tags
-    F# FSharp bioinformatics
+    bioinformatics F# fsharp statistic
 summary
-    F# FSharp bioinformatics
+    Special statistical functions for bioinformatic purposes
 description
-    F# FSharp bioinformatics
\ No newline at end of file
+    Special statistical functions for bioinformatic purposes
+include-referenced-projects 
+    true
+files
+    ../../bin/BioFSharp.Stats ==> lib
\ No newline at end of file
diff --git a/src/BioFSharp.Vis/AssemblyInfo.fs b/src/BioFSharp.Vis/AssemblyInfo.fs
index da9e47d5..f91abe52 100644
--- a/src/BioFSharp.Vis/AssemblyInfo.fs
+++ b/src/BioFSharp.Vis/AssemblyInfo.fs
@@ -5,8 +5,8 @@ open System.Reflection
 []
 []
 [")>]
-[]
-[]
+[]
+[]
 []
 do ()
 
@@ -14,6 +14,6 @@ module internal AssemblyVersionInformation =
     let [] AssemblyTitle = "BioFSharp.Vis"
     let [] AssemblyProduct = "BioFSharp"
     let [] AssemblyDescription = "An open source bioinformatics toolbox written in F#. "
-    let [] AssemblyVersion = "0.0.12"
-    let [] AssemblyFileVersion = "0.0.12"
+    let [] AssemblyVersion = "0.1.0"
+    let [] AssemblyFileVersion = "0.1.0"
     let [] AssemblyConfiguration = "Release"
diff --git a/src/BioFSharp.Vis/paket.template b/src/BioFSharp.Vis/paket.template
index cf64cbd2..d261ff1d 100644
--- a/src/BioFSharp.Vis/paket.template
+++ b/src/BioFSharp.Vis/paket.template
@@ -1,23 +1,30 @@
 type project
+id BioFSharp.Vis
 title
     BioFSharp.Vis
 owners
     Timo Mühlhaus
-authors
+authors 
     Timo Mühlhaus
 projectUrl
     https://github.com/CSBiology/BioFSharp
 iconUrl
-    https://raw.githubusercontent.com/CSBiology/FSharp.FGL/master/docs/files/img/logo.png
+    https://raw.githubusercontent.com/CSBiology/BioFSharp/master/docs/files/img/logo.png
 licenseUrl
-    https://github.com/CSBiology/FSharp.FGL/blob/master/LICENSE.txt
+    https://github.com/CSBiology/BioFSharp/blob/master/LICENSE.txt
 requireLicenseAcceptance
     false
+language
+    F#
 copyright
-    Copyright 2015
+    Copyright 2019
 tags
-    F# FSharp bioinformatics
+    bioinformatics F# fsharp image-processing
 summary
-    F# FSharp bioinformatics
+    Image processing tools using continuous wavelet transformations
 description
-    F# FSharp bioinformatics
\ No newline at end of file
+    Image processing tools using continuous wavelet transformations
+include-referenced-projects 
+    true
+files
+    ../../bin/BioFSharp.Vis ==> lib
\ No newline at end of file
diff --git a/src/BioFSharp/AminoProperties.fs b/src/BioFSharp/AminoProperties.fs
index f6c25250..e09fa9a3 100644
--- a/src/BioFSharp/AminoProperties.fs
+++ b/src/BioFSharp/AminoProperties.fs
@@ -10,6 +10,8 @@ module AminoProperties =
 
     ///Union case of amino acid properties, referencing the according included information in this library. Use "initGetAminoProperty" function to obtain a simple mapping function
     type AminoProperty =
+        // Gravy score (Grand Average of Hydropathy)
+        | GravyScore 
         /// Hydrophobicity index (Argos et al., 1982)
         | HydrophobicityIndex
         /// Hydrophobicity index (Fasman, 1989)
@@ -78,6 +80,8 @@ module AminoProperties =
         | CompositionIntracellular
         /// Composition of amino acids in extracellular proteins (percent) (Cedano et al., 1997)
         | CompositionExtracellular
+        // Weights for coil at the window position of -3 (Qian-Sejnowski, 1988) 
+        | WeightsCoilMinus3
         /// Weights for alpha-helix at the window position of -3 (Qian-Sejnowski, 1988)
         | WeightsHelixMinus3
         /// Helix formation parameters (delta delta G) (O'Neil-DeGrado, 1990)
@@ -89,6 +93,7 @@ module AminoProperties =
 
 
         static member toString = function
+            | GravyScore -> "Gravy score (Grand Average of Hydropathy)"
             | HydrophobicityIndex       -> "Hydrophobicity index (Argos et al., 1982)"
             | HydrophobicityFasman      -> "Hydrophobicity index (Fasman, 1989)"
             | HydrophobicityIndex2      -> "Hydrophobicity index (Wolfenden et al., 1979)"
@@ -123,6 +128,7 @@ module AminoProperties =
             | AverageNumberSurroundingResidues   -> "Average number of surrounding residues (Ponnuswamy et al., 1980)"
             | CompositionIntracellular  -> "Interior composition of amino acids in intracellular proteins of mesophiles (percent) (Fukuchi-Nishikawa, 2001)"
             | CompositionExtracellular  -> "Composition of amino acids in extracellular proteins (percent) (Cedano et al., 1997)"
+            | WeightsCoilMinus3        -> "Weights for coil at the window position of -3 (Qian-Sejnowski, 1988)"
             | WeightsHelixMinus3        -> "Weights for alpha-helix at the window position of -3 (Qian-Sejnowski, 1988)"
             | HelixFormationParameters  -> "Helix formation parameters (delta delta G) (O'Neil-DeGrado, 1990)"
             | FreeEnergyHelicalRegion   -> "Free energy in alpha-helical region (Munoz-Serrano, 1994)"
diff --git a/src/BioFSharp/AssemblyInfo.fs b/src/BioFSharp/AssemblyInfo.fs
index 360f9928..0b57be74 100644
--- a/src/BioFSharp/AssemblyInfo.fs
+++ b/src/BioFSharp/AssemblyInfo.fs
@@ -5,8 +5,8 @@ open System.Reflection
 []
 []
 [")>]
-[]
-[]
+[]
+[]
 []
 do ()
 
@@ -14,6 +14,6 @@ module internal AssemblyVersionInformation =
     let [] AssemblyTitle = "BioFSharp"
     let [] AssemblyProduct = "BioFSharp"
     let [] AssemblyDescription = "An open source bioinformatics toolbox written in F#. "
-    let [] AssemblyVersion = "0.0.12"
-    let [] AssemblyFileVersion = "0.0.12"
+    let [] AssemblyVersion = "0.1.0"
+    let [] AssemblyFileVersion = "0.1.0"
     let [] AssemblyConfiguration = "Release"
diff --git a/src/BioFSharp/BioID.fs b/src/BioFSharp/BioID.fs
index 274bcf83..8b1e0d88 100644
--- a/src/BioFSharp/BioID.fs
+++ b/src/BioFSharp/BioID.fs
@@ -125,81 +125,171 @@ module BioID =
 //    let parseMgiId  = Regex.tryEitherParse MgiId "MGI:[0-9]*"
 
 
-//    module FastA =
-                
+    module FastA =
+
+        open System.Text.RegularExpressions
+        
 //        open FSharp.Care.Regex
 //        open FSharp.Care.Monads
 //        open FSharp.Care.Collections
 
-//        type FastaHeader<'IdType> = {
-//            ID          : 'IdType
-//            Description : string
-//            Info     : Map    
-//            }
+        type FastaHeader<'IdType> = {
+            ID          : 'IdType
+            Description : string
+            Info     : Map    
+            }
 
-//        let createFastaHeader id description info =
-//            {ID=id;Description=description;Info=info}
+        let createFastaHeader id description info =
+            {ID=id;Description=description;Info=info}
     
 
-//        /// Returns DisplayId of FastA header. None if none present.
-//        let displayIdOf (header:FastaHeader<_>) =
-//            header.Info.TryFindDefault "None" "DID"
-//        /// Returns Aliases of FastA header. None if none present.
-//        let aliasesOf (header:FastaHeader<_>) =
-//            header.Info.TryFindDefault "None" "ALS"
-//        /// Returns DataBaseVersion of FastA header. None if none present.
-//        let dataBaseVersionOf (header:FastaHeader<_>) =
-//            header.Info.TryFindDefault "None" "DBV"
-//        /// Returns Type of UniqueIdentifier of FastA header. None if none present.
-//        let touOf (header:FastaHeader<_>) =
-//            header.Info.TryFindDefault "None" "TOU"
-//        /// Returns SequenceVersion of FastA header. None if none present.
-//        let sequenceVersionOf (header:FastaHeader<_>) =
-//            header.Info.TryFindDefault "None" "SV"
-//        /// Returns OrganismName of FastA header. None if none present.
-//        let organismNameOf (header:FastaHeader<_>) =
-//            header.Info.TryFindDefault "None" "OS"
-//        /// Returns ProteinExistence of FastA header. None if none present.
-//        let proteinExistenceOf (header:FastaHeader<_>) =
-//            header.Info.TryFindDefault "None" "PE"
-//        /// Returns GeneName of FastA header. None if none present.
-//        let geneNameOf (header:FastaHeader<_>) =
-//            header.Info.TryFindDefault "None" "GN"
-//        /// Returns ProteinName of FastA header. None if none present.
-//        let proteinNameOf (header:FastaHeader<_>) =
-//            header.Info.TryFindDefault "None" "PN"
-
-
-
-//        /// Sets DisplayId in FastA header.
-//        let setDisplayId displayId (header:FastaHeader<_>) =
-//            {header with Info=header.Info.Add("DID",displayId)}
-//        /// Sets Aliases in FastA header.
-//        let setAliases alias (header:FastaHeader<_>) =
-//            {header with Info=header.Info.Add("ALS",alias)}
-//        /// Sets DataBaseVersion in FastA header.
-//        let setDataBaseVersion dbv (header:FastaHeader<_>) =
-//            {header with Info=header.Info.Add("DBV",dbv)}
-//        /// Sets Type of UniqueIdentifier in FastA header.
-//        let setIdType idType (header:FastaHeader<_>) =
-//            {header with Info=header.Info.Add("TOU",idType)}
-//        /// Sets SequenceVersion in FastA header.
-//        let setSequenceVersion sv (header:FastaHeader<_>) =
-//            {header with Info=header.Info.Add("SV",sv)}
-//        /// Sets OrganismName in FastA header.
-//        let setOrganismName os (header:FastaHeader<_>) =
-//            {header with Info=header.Info.Add("OS",os)}
-//        /// Sets ProteinExistence in FastA header.
-//        let setProteinExistence pe (header:FastaHeader<_>) =
-//            {header with Info=header.Info.Add("PE",pe)}
-//        /// Sets GeneName in FastA header.
-//        let setGeneName gn (header:FastaHeader<_>) =
-//            {header with Info=header.Info.Add("GN",gn)}    
-//        /// Sets ProteinName in FastA header.
-//        let setProteinName pn (header:FastaHeader<_>) =
-//            {header with Info=header.Info.Add("PN",pn)}
-
-
+        /// Returns DisplayId of FastA header. None if none present.
+        let displayIdOf (header:FastaHeader<_>) =
+            header.Info.TryFindDefault "None" "DID"
+        /// Returns Aliases of FastA header. None if none present.
+        let aliasesOf (header:FastaHeader<_>) =
+            header.Info.TryFindDefault "None" "ALS"
+        /// Returns DataBaseVersion of FastA header. None if none present.
+        let dataBaseVersionOf (header:FastaHeader<_>) =
+            header.Info.TryFindDefault "None" "DBV"
+        /// Returns Type of UniqueIdentifier of FastA header. None if none present.
+        let touOf (header:FastaHeader<_>) =
+            header.Info.TryFindDefault "None" "TOU"
+        /// Returns SequenceVersion of FastA header. None if none present.
+        let sequenceVersionOf (header:FastaHeader<_>) =
+            header.Info.TryFindDefault "None" "SV"
+        /// Returns OrganismName of FastA header. None if none present.
+        let organismNameOf (header:FastaHeader<_>) =
+            header.Info.TryFindDefault "None" "OS"
+        /// Returns ProteinExistence of FastA header. None if none present.
+        let proteinExistenceOf (header:FastaHeader<_>) =
+            header.Info.TryFindDefault "None" "PE"
+        /// Returns GeneName of FastA header. None if none present.
+        let geneNameOf (header:FastaHeader<_>) =
+            header.Info.TryFindDefault "None" "GN"
+        /// Returns ProteinName of FastA header. None if none present.
+        let proteinNameOf (header:FastaHeader<_>) =
+            header.Info.TryFindDefault "None" "PN"
+
+
+
+        /// Sets DisplayId in FastA header.
+        let setDisplayId displayId (header:FastaHeader<_>) =
+            {header with Info=header.Info.Add("DID",displayId)}
+        /// Sets Aliases in FastA header.
+        let setAliases alias (header:FastaHeader<_>) =
+            {header with Info=header.Info.Add("ALS",alias)}
+        /// Sets DataBaseVersion in FastA header.
+        let setDataBaseVersion dbv (header:FastaHeader<_>) =
+            {header with Info=header.Info.Add("DBV",dbv)}
+        /// Sets Type of UniqueIdentifier in FastA header.
+        let setIdType idType (header:FastaHeader<_>) =
+            {header with Info=header.Info.Add("TOU",idType)}
+        /// Sets SequenceVersion in FastA header.
+        let setSequenceVersion sv (header:FastaHeader<_>) =
+            {header with Info=header.Info.Add("SV",sv)}
+        /// Sets OrganismName in FastA header.
+        let setOrganismName os (header:FastaHeader<_>) =
+            {header with Info=header.Info.Add("OS",os)}
+        /// Sets ProteinExistence in FastA header.
+        let setProteinExistence pe (header:FastaHeader<_>) =
+            {header with Info=header.Info.Add("PE",pe)}
+        /// Sets GeneName in FastA header.
+        let setGeneName gn (header:FastaHeader<_>) =
+            {header with Info=header.Info.Add("GN",gn)}    
+        /// Sets ProteinName in FastA header.
+        let setProteinName pn (header:FastaHeader<_>) =
+            {header with Info=header.Info.Add("PN",pn)}
+
+
+        /// DisplayId (DID=)
+        let private displayIdRegex              = new Regex(@"(?<=DID=)[^=]*(?= \S*=|$)", options = RegexOptions.Compiled)
+        /// Aliases (ALS=)
+        let private AliasesRegex                = new Regex(@"(?<=ALS=)[^=]*(?= \S*=|$)", options = RegexOptions.Compiled)
+        /// DataBaseVersion (DBV=)
+        let private DataBaseVersionRegex        = new Regex(@"(?<=DBV=)[^=]*(?= \S*=|$)", options = RegexOptions.Compiled)
+        /// Type of UniqueIdentifier (TOU=)
+        let private TypeOfUniqueIdentifierRegex = new Regex(@"(?<=TOU=)[^=]*(?= \S*=|$)", options = RegexOptions.Compiled)
+        /// SequenceVersion (SV=)
+        let private SequenceVersionRegex        = new Regex(@"(?<=SV=)[^=]*(?= \S*=|$)", options = RegexOptions.Compiled) 
+        /// OrganismName (OS=)
+        let private OrganismNameRegex           = new Regex(@"(?<=OS=)[^=]*(?= \S*=|$)", options = RegexOptions.Compiled) 
+        /// OrganismIdentifier (OX=)
+        let private OrganismIdentifierRegex     = new Regex(@"(?<=OX=)[^=]*(?= \S*=|$)", options = RegexOptions.Compiled) 
+        /// ProteinExistence (PE=)
+        let private ProteinExistenceRegex       = new Regex(@"(?<=PE=)[^=]*(?= \S*=|$)", options = RegexOptions.Compiled) 
+        /// GeneName (GN=)
+        let private GeneNameRegex               = new Regex(@"(?<=GN=)[^=]*(?= \S*=|$)", options = RegexOptions.Compiled) 
+        ///  ProteinName (PN=)
+        let private ProteinNameRegex            = new Regex(@"(?<=PN=)[^=]*(?= \S*=|$)", options = RegexOptions.Compiled)    
+
+        /// Parse Description without attributes
+        let private descriptionRegex =  new Regex(@"(?<=|)[^=|]*(?= \S*=|$)", options = RegexOptions.Compiled)
+
+
+        /// Returns FastAHeader Object from UniProt style FastAHeader string
+        ///
+        /// For Reference see: https://www.uniprot.org/help/fasta-headers
+        let fromString (str:string) =    
+            let rec loop (input:(string*(string-> string option)) list) acc (s:string) =
+                match input with
+                | h::t ->   let key,parser = h 
+                            match parser s with
+                            | Some r -> loop t ((key,r)::acc) s
+                            | None -> loop t acc s
+                | [] -> (acc)
+
+            let matchToResultOption (m : Match) = if m.Success then Some m.Value else None
+
+            /// Parse DisplayId (DID=) from string
+            let parseDisplayId str =                displayIdRegex.Match(str) |> matchToResultOption
+            /// Parse Aliases (ALS=)
+            let parseAliases str =                  AliasesRegex.Match(str) |> matchToResultOption
+            /// Parse DataBaseVersion (DBV=)
+            let parseDataBaseVersion str =          DataBaseVersionRegex.Match(str) |> matchToResultOption
+            /// Parse Type of UniqueIdentifier (TOU=)
+            let parseTypeOfUniqueIdentifier str =   TypeOfUniqueIdentifierRegex.Match(str) |> matchToResultOption
+            /// Parse SequenceVersion (SV=)
+            let parseSequenceVersion str =          SequenceVersionRegex.Match(str) |> matchToResultOption
+            /// Parse OrganismName (OS=)
+            let parseOrganismName str =             OrganismNameRegex.Match(str) |> matchToResultOption
+            /// Parse OrganismIdentifier (OX=)
+            let parseOrganismIdentifier str =       OrganismIdentifierRegex.Match(str) |> matchToResultOption
+            /// Parse ProteinExistence (PE=)
+            let parseProteinExistence str =         ProteinExistenceRegex.Match(str) |> matchToResultOption
+            /// Parse GeneName (GN=)
+            let parseGeneName str =                 GeneNameRegex.Match(str) |> matchToResultOption
+            /// Parse ProteinName (PN=)
+            let parseProteinName str =              ProteinNameRegex.Match(str) |> matchToResultOption     
+
+            /// Parse Description without attributes
+            let parseDescription str = descriptionRegex.Match(str).Value
+
+            let splitStr = str.Split([|'|'|],3)
+            
+            let descr,info =
+                //splitStr.[2].Split(' ')
+                //|> Array.takeWhile 
+                if splitStr.Length > 1 then 
+                    let tmp =
+                        [
+                            "DID",parseDisplayId;
+                            "ALS",parseAliases;
+                            "DBV",parseDataBaseVersion;
+                            "TOU",parseTypeOfUniqueIdentifier;
+                            "SV",parseSequenceVersion;
+                            "OS",parseOrganismName;
+                            "OX",parseOrganismIdentifier
+                            "PE",parseProteinExistence;
+                            "GN",parseGeneName;
+                            "PN",parseProteinName;
+
+                        ]
+                    splitStr.[0] + " " + (parseDescription str),loop tmp [] splitStr.[2]
+                else
+                    ("",[])
+   
+            createFastaHeader splitStr.[1] (descr.Trim()) (info|> Map.ofList)
 ////        /// Returns FastaHeader from string
 ////        let fromString (str:string) =    
 ////            let rec loop (input:(string*(string->Either<(string*string),string>)) list) acc (s:string) =
diff --git a/src/BioFSharp/PeptideClassification.fs b/src/BioFSharp/PeptideClassification.fs
index ea30635d..b68f980a 100644
--- a/src/BioFSharp/PeptideClassification.fs
+++ b/src/BioFSharp/PeptideClassification.fs
@@ -1,16 +1,17 @@
 namespace BioFSharp
 
-///Contains functions to classify peptides by their 
+///Contains functions to classify peptides based on their specificity when mapped to proteins
 module PeptideClassification =
     
     open FSharpAux
     open System.Collections.Generic
 
+    ///Direction of strand
     type StrandDirection =
         | Forward = 0
         | Reverse = 1
 
-    
+    /// Contains information about protein used to deduce its relation to other proteins
     type ProteinModelInfo<'id,'chromosomeId,'geneLocus when 'id: comparison and 'chromosomeId: comparison and 'geneLocus: comparison> = {
         Id              : 'id
         ChromosomeId    : 'chromosomeId
@@ -21,6 +22,7 @@ module PeptideClassification =
         Orthologs       : Set< ProteinModelInfo<'id,'chromosomeId,'geneLocus> >
         }
 
+    /// Helper function to create ProteinModelInfo
     let createProteinModelInfo id chromosomeId strand geneLocus spliceVariantId seqEquivalents orthologs = {
         Id               = id
         ChromosomeId     = chromosomeId
@@ -31,26 +33,29 @@ module PeptideClassification =
         Orthologs        = Set.ofSeq orthologs
         }       
 
+    /// Contains protein sequence and information about protein used to deduce its relation to other proteins
     type ProteinModel<'id,'chromosomeId,'geneLocus,'sequence when 'id: comparison and 'chromosomeId: comparison and 'geneLocus: comparison and 'sequence: comparison> = {
         ProteinModelInfo : ProteinModelInfo<'id,'chromosomeId,'geneLocus>
         Sequence : 'sequence 
         }
 
-    
+    /// Helper function
     let createProteinModel proteinModelInfo sequence = 
         {ProteinModelInfo=proteinModelInfo;Sequence=sequence}
     
-
+    /// A marker for unambiguity of a peptide in protein inference 
     type PeptideEvidenceClass = 
         | Unknown   = 0
+        //Maps to exactly one isoform of one protein
         | C1a       = 1
+        //Maps to different isoforms of one protein
         | C1b       = 2
         | C2a       = 3
         | C2b       = 4
         | C3a       = 5
         | C3b       = 6
 
-    
+    /// Creates a lookup data base to assign peptides to the proteins they are contained in
     let createPeptideProteinRelation digest (protModels:seq option>) =
         let ppRelation = BidirectionalDictionary<'sequence,ProteinModelInfo<'id,'chromosomeId,'geneLocus>>()
         protModels            
@@ -80,7 +85,7 @@ module PeptideClassification =
             )     
         gLocusToSplVarNr    
 
-
+    ///Assigns a PeptideEvidenceClass to the peptide by using the information given through lookup and proteinInfos
     let classify (lookUp:Dictionary<'geneLocus,int>) (peptide,proteinInfos:seq>) =
     
         let isGeneUnambiguous (pmi:seq< ProteinModelInfo<'id,'chromosomeId,'geneLocus> >) =
diff --git a/src/BioFSharp/Resources/AminoAcidProperties.txt b/src/BioFSharp/Resources/AminoAcidProperties.txt
index dc820951..d60863bd 100644
--- a/src/BioFSharp/Resources/AminoAcidProperties.txt
+++ b/src/BioFSharp/Resources/AminoAcidProperties.txt
@@ -1,3 +1,5 @@
+Gravy score (Grand Average of Hydropathy)
+A:1.8 R:-4.5 N:-3.5 D:-3.5 C:2.5 Q:-3.5 E:-3.5 G:-0.4 H:-3.2 I:4.5 L:3.8 K:-3.9 M:1.9 F:2.8 P:-1.6 S:-0.8 T:-0.7 W:-0.9 Y:-1.3 V:4.2
 Hydrophobicity index (Argos et al., 1982)
 A:0.61 R:0.6 N:0.06 D:0.46 C:1.07 Q:0 E:0.47 G:0.07 H:0.61 I:2.22 L:1.53 K:1.15 M:1.18 F:2.02 P:1.95 S:0.05 T:0.05 W:2.65 Y:1.88 V:1.32
 Hydrophobicity index (Fasman, 1989)
diff --git a/src/BioFSharp/paket.template b/src/BioFSharp/paket.template
index b6be001e..161d6217 100644
--- a/src/BioFSharp/paket.template
+++ b/src/BioFSharp/paket.template
@@ -1,4 +1,4 @@
-type file
+type project
 id BioFSharp
 title
     BioFSharp
@@ -17,13 +17,14 @@ requireLicenseAcceptance
 language
     F#
 copyright
-    Copyright 2017
+    Copyright 2019
 tags
     bioinformatics F# fsharp
 summary
     BioFSharp aims to be a user-friendly library for Bioinformatics written in F# as the official successor of FSharpBio.
 description
     BioFSharp aims to be a user-friendly library for Bioinformatics written in F# as the official successor of FSharpBio.
-
+include-referenced-projects 
+    true
 files
 	../../bin/BioFSharp ==> lib